blob: d172bbd1240b1057238c1caaf7250de3a0cd7015 [file] [log] [blame]
Thierry Strudel3d639192016-09-09 11:52:26 -07001/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6* * Redistributions of source code must retain the above copyright
7* notice, this list of conditions and the following disclaimer.
8* * Redistributions in binary form must reproduce the above
9* copyright notice, this list of conditions and the following
10* disclaimer in the documentation and/or other materials provided
11* with the distribution.
12* * Neither the name of The Linux Foundation nor the names of its
13* contributors may be used to endorse or promote products derived
14* from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34
35// To remove
36#include <cutils/properties.h>
37
38// System dependencies
39#include <dlfcn.h>
40#include <fcntl.h>
41#include <stdio.h>
42#include <stdlib.h>
43#include "utils/Timers.h"
44#include "sys/ioctl.h"
Shuzhen Wangf6890e02016-08-12 14:28:54 -070045#include <time.h>
Thierry Strudel3d639192016-09-09 11:52:26 -070046#include <sync/sync.h>
47#include "gralloc_priv.h"
Thierry Strudele80ad7c2016-12-06 10:16:27 -080048#include <map>
Thierry Strudel3d639192016-09-09 11:52:26 -070049
50// Display dependencies
51#include "qdMetaData.h"
52
53// Camera dependencies
54#include "android/QCamera3External.h"
55#include "util/QCameraFlash.h"
56#include "QCamera3HWI.h"
57#include "QCamera3VendorTags.h"
58#include "QCameraTrace.h"
59
Chien-Yu Chene687bd02016-12-07 18:30:26 -080060#include "HdrPlusClientUtils.h"
61
Thierry Strudel3d639192016-09-09 11:52:26 -070062extern "C" {
63#include "mm_camera_dbg.h"
64}
Shuzhen Wangfb961e52016-11-28 11:48:02 -080065#include "cam_cond.h"
Thierry Strudel3d639192016-09-09 11:52:26 -070066
67using namespace android;
68
69namespace qcamera {
70
71#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
72
73#define EMPTY_PIPELINE_DELAY 2
74#define PARTIAL_RESULT_COUNT 2
75#define FRAME_SKIP_DELAY 0
76
77#define MAX_VALUE_8BIT ((1<<8)-1)
78#define MAX_VALUE_10BIT ((1<<10)-1)
79#define MAX_VALUE_12BIT ((1<<12)-1)
80
81#define VIDEO_4K_WIDTH 3840
82#define VIDEO_4K_HEIGHT 2160
83
84#define MAX_EIS_WIDTH 1920
85#define MAX_EIS_HEIGHT 1080
86
87#define MAX_RAW_STREAMS 1
88#define MAX_STALLING_STREAMS 1
89#define MAX_PROCESSED_STREAMS 3
90/* Batch mode is enabled only if FPS set is equal to or greater than this */
91#define MIN_FPS_FOR_BATCH_MODE (120)
92#define PREVIEW_FPS_FOR_HFR (30)
93#define DEFAULT_VIDEO_FPS (30.0)
Thierry Strudele80ad7c2016-12-06 10:16:27 -080094#define TEMPLATE_MAX_PREVIEW_FPS (30.0)
Thierry Strudel3d639192016-09-09 11:52:26 -070095#define MAX_HFR_BATCH_SIZE (8)
96#define REGIONS_TUPLE_COUNT 5
97#define HDR_PLUS_PERF_TIME_OUT (7000) // milliseconds
Thierry Strudel3d639192016-09-09 11:52:26 -070098// Set a threshold for detection of missing buffers //seconds
99#define MISSING_REQUEST_BUF_TIMEOUT 3
Chien-Yu Chene687bd02016-12-07 18:30:26 -0800100#define MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT 30
Thierry Strudel3d639192016-09-09 11:52:26 -0700101#define FLUSH_TIMEOUT 3
102#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
103
104#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3 ( CAM_QCOM_FEATURE_DENOISE2D |\
105 CAM_QCOM_FEATURE_CROP |\
106 CAM_QCOM_FEATURE_ROTATION |\
107 CAM_QCOM_FEATURE_SHARPNESS |\
108 CAM_QCOM_FEATURE_SCALE |\
109 CAM_QCOM_FEATURE_CAC |\
110 CAM_QCOM_FEATURE_CDS )
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700111/* Per configuration size for static metadata length*/
112#define PER_CONFIGURATION_SIZE_3 (3)
Thierry Strudel3d639192016-09-09 11:52:26 -0700113
114#define TIMEOUT_NEVER -1
115
Thierry Strudel04e026f2016-10-10 11:27:36 -0700116/* Face landmarks indices */
117#define LEFT_EYE_X 0
118#define LEFT_EYE_Y 1
119#define RIGHT_EYE_X 2
120#define RIGHT_EYE_Y 3
121#define MOUTH_X 4
122#define MOUTH_Y 5
123#define TOTAL_LANDMARK_INDICES 6
124
Thierry Strudel3d639192016-09-09 11:52:26 -0700125cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
126const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
127extern pthread_mutex_t gCamLock;
128volatile uint32_t gCamHal3LogLevel = 1;
129extern uint8_t gNumCameraSessions;
130
131const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
132 {"On", CAM_CDS_MODE_ON},
133 {"Off", CAM_CDS_MODE_OFF},
134 {"Auto",CAM_CDS_MODE_AUTO}
135};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700136const QCamera3HardwareInterface::QCameraMap<
137 camera_metadata_enum_android_video_hdr_mode_t,
138 cam_video_hdr_mode_t> QCamera3HardwareInterface::VIDEO_HDR_MODES_MAP[] = {
139 { QCAMERA3_VIDEO_HDR_MODE_OFF, CAM_VIDEO_HDR_MODE_OFF },
140 { QCAMERA3_VIDEO_HDR_MODE_ON, CAM_VIDEO_HDR_MODE_ON }
141};
142
143
144const QCamera3HardwareInterface::QCameraMap<
145 camera_metadata_enum_android_ir_mode_t,
146 cam_ir_mode_type_t> QCamera3HardwareInterface::IR_MODES_MAP [] = {
147 {QCAMERA3_IR_MODE_OFF, CAM_IR_MODE_OFF},
148 {QCAMERA3_IR_MODE_ON, CAM_IR_MODE_ON},
149 {QCAMERA3_IR_MODE_AUTO, CAM_IR_MODE_AUTO}
150};
Thierry Strudel3d639192016-09-09 11:52:26 -0700151
152const QCamera3HardwareInterface::QCameraMap<
153 camera_metadata_enum_android_control_effect_mode_t,
154 cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
155 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF },
156 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO },
157 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE },
158 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE },
159 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA },
160 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE },
161 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
162 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
163 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA }
164};
165
166const QCamera3HardwareInterface::QCameraMap<
167 camera_metadata_enum_android_control_awb_mode_t,
168 cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
169 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF },
170 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO },
171 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT },
172 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT },
173 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
174 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT },
175 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
176 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT },
177 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE }
178};
179
180const QCamera3HardwareInterface::QCameraMap<
181 camera_metadata_enum_android_control_scene_mode_t,
182 cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
183 { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY, CAM_SCENE_MODE_FACE_PRIORITY },
184 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION },
185 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT },
186 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE },
187 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT },
188 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
189 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE },
190 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH },
191 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW },
192 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET },
193 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE },
194 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS },
195 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS },
196 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY },
197 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT },
198 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE}
199};
200
201const QCamera3HardwareInterface::QCameraMap<
202 camera_metadata_enum_android_control_af_mode_t,
203 cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
204 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_OFF },
205 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED },
206 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO },
207 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO },
208 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF },
209 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
210 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO }
211};
212
213const QCamera3HardwareInterface::QCameraMap<
214 camera_metadata_enum_android_color_correction_aberration_mode_t,
215 cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
216 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
217 CAM_COLOR_CORRECTION_ABERRATION_OFF },
218 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
219 CAM_COLOR_CORRECTION_ABERRATION_FAST },
220 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
221 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
222};
223
224const QCamera3HardwareInterface::QCameraMap<
225 camera_metadata_enum_android_control_ae_antibanding_mode_t,
226 cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
227 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF },
228 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
229 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
230 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
231};
232
233const QCamera3HardwareInterface::QCameraMap<
234 camera_metadata_enum_android_control_ae_mode_t,
235 cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
236 { ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF },
237 { ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF },
238 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO},
239 { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON },
240 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
241};
242
243const QCamera3HardwareInterface::QCameraMap<
244 camera_metadata_enum_android_flash_mode_t,
245 cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
246 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF },
247 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
248 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH }
249};
250
251const QCamera3HardwareInterface::QCameraMap<
252 camera_metadata_enum_android_statistics_face_detect_mode_t,
253 cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
254 { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, CAM_FACE_DETECT_MODE_OFF },
255 { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE },
256 { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL, CAM_FACE_DETECT_MODE_FULL }
257};
258
259const QCamera3HardwareInterface::QCameraMap<
260 camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
261 cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
262 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
263 CAM_FOCUS_UNCALIBRATED },
264 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
265 CAM_FOCUS_APPROXIMATE },
266 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
267 CAM_FOCUS_CALIBRATED }
268};
269
270const QCamera3HardwareInterface::QCameraMap<
271 camera_metadata_enum_android_lens_state_t,
272 cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
273 { ANDROID_LENS_STATE_STATIONARY, CAM_AF_LENS_STATE_STATIONARY},
274 { ANDROID_LENS_STATE_MOVING, CAM_AF_LENS_STATE_MOVING}
275};
276
277const int32_t available_thumbnail_sizes[] = {0, 0,
278 176, 144,
279 240, 144,
280 256, 144,
281 240, 160,
282 256, 154,
283 240, 240,
284 320, 240};
285
286const QCamera3HardwareInterface::QCameraMap<
287 camera_metadata_enum_android_sensor_test_pattern_mode_t,
288 cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
289 { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, CAM_TEST_PATTERN_OFF },
290 { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, CAM_TEST_PATTERN_SOLID_COLOR },
291 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS, CAM_TEST_PATTERN_COLOR_BARS },
292 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
293 { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9, CAM_TEST_PATTERN_PN9 },
294 { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1, CAM_TEST_PATTERN_CUSTOM1},
295};
296
297/* Since there is no mapping for all the options some Android enum are not listed.
298 * Also, the order in this list is important because while mapping from HAL to Android it will
299 * traverse from lower to higher index which means that for HAL values that are map to different
300 * Android values, the traverse logic will select the first one found.
301 */
302const QCamera3HardwareInterface::QCameraMap<
303 camera_metadata_enum_android_sensor_reference_illuminant1_t,
304 cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
305 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
306 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
307 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
308 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
309 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
310 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
311 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
312 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
313 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
314 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
315 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
316 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
317 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
318 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
319 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
320 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
321};
322
323const QCamera3HardwareInterface::QCameraMap<
324 int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
325 { 60, CAM_HFR_MODE_60FPS},
326 { 90, CAM_HFR_MODE_90FPS},
327 { 120, CAM_HFR_MODE_120FPS},
328 { 150, CAM_HFR_MODE_150FPS},
329 { 180, CAM_HFR_MODE_180FPS},
330 { 210, CAM_HFR_MODE_210FPS},
331 { 240, CAM_HFR_MODE_240FPS},
332 { 480, CAM_HFR_MODE_480FPS},
333};
334
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700335const QCamera3HardwareInterface::QCameraMap<
336 qcamera3_ext_instant_aec_mode_t,
337 cam_aec_convergence_type> QCamera3HardwareInterface::INSTANT_AEC_MODES_MAP[] = {
338 { QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE, CAM_AEC_NORMAL_CONVERGENCE},
339 { QCAMERA3_INSTANT_AEC_AGGRESSIVE_CONVERGENCE, CAM_AEC_AGGRESSIVE_CONVERGENCE},
340 { QCAMERA3_INSTANT_AEC_FAST_CONVERGENCE, CAM_AEC_FAST_CONVERGENCE},
341};
Thierry Strudel3d639192016-09-09 11:52:26 -0700342camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
343 .initialize = QCamera3HardwareInterface::initialize,
344 .configure_streams = QCamera3HardwareInterface::configure_streams,
345 .register_stream_buffers = NULL,
346 .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
347 .process_capture_request = QCamera3HardwareInterface::process_capture_request,
348 .get_metadata_vendor_tag_ops = NULL,
349 .dump = QCamera3HardwareInterface::dump,
350 .flush = QCamera3HardwareInterface::flush,
351 .reserved = {0},
352};
353
354// initialise to some default value
355uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
356
357/*===========================================================================
358 * FUNCTION : QCamera3HardwareInterface
359 *
360 * DESCRIPTION: constructor of QCamera3HardwareInterface
361 *
362 * PARAMETERS :
363 * @cameraId : camera ID
364 *
365 * RETURN : none
366 *==========================================================================*/
367QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
368 const camera_module_callbacks_t *callbacks)
369 : mCameraId(cameraId),
370 mCameraHandle(NULL),
371 mCameraInitialized(false),
372 mCallbackOps(NULL),
373 mMetadataChannel(NULL),
374 mPictureChannel(NULL),
375 mRawChannel(NULL),
376 mSupportChannel(NULL),
377 mAnalysisChannel(NULL),
378 mRawDumpChannel(NULL),
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700379 mHdrPlusRawSrcChannel(NULL),
Thierry Strudel3d639192016-09-09 11:52:26 -0700380 mDummyBatchChannel(NULL),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800381 mPerfLockMgr(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700382 mCommon(),
383 mChannelHandle(0),
384 mFirstConfiguration(true),
385 mFlush(false),
386 mFlushPerf(false),
387 mParamHeap(NULL),
388 mParameters(NULL),
389 mPrevParameters(NULL),
390 m_bIsVideo(false),
391 m_bIs4KVideo(false),
392 m_bEisSupportedSize(false),
393 m_bEisEnable(false),
394 m_MobicatMask(0),
395 mMinProcessedFrameDuration(0),
396 mMinJpegFrameDuration(0),
397 mMinRawFrameDuration(0),
398 mMetaFrameCount(0U),
399 mUpdateDebugLevel(false),
400 mCallbacks(callbacks),
401 mCaptureIntent(0),
402 mCacMode(0),
Samuel Ha68ba5172016-12-15 18:41:12 -0800403 /* DevCamDebug metadata internal m control*/
404 mDevCamDebugMetaEnable(0),
405 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -0700406 mBatchSize(0),
407 mToBeQueuedVidBufs(0),
408 mHFRVideoFps(DEFAULT_VIDEO_FPS),
409 mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
410 mFirstFrameNumberInBatch(0),
411 mNeedSensorRestart(false),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800412 mPreviewStarted(false),
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700413 mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
414 mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700415 mInstantAEC(false),
416 mResetInstantAEC(false),
417 mInstantAECSettledFrameNumber(0),
418 mAecSkipDisplayFrameBound(0),
419 mInstantAecFrameIdxCount(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700420 mLdafCalibExist(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700421 mLastCustIntentFrmNum(-1),
422 mState(CLOSED),
423 mIsDeviceLinked(false),
424 mIsMainCamera(true),
425 mLinkedCameraId(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700426 m_pDualCamCmdHeap(NULL),
427 m_pDualCamCmdPtr(NULL)
Thierry Strudel3d639192016-09-09 11:52:26 -0700428{
429 getLogLevel();
Thierry Strudel3d639192016-09-09 11:52:26 -0700430 mCommon.init(gCamCapability[cameraId]);
431 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700432#ifndef USE_HAL_3_3
433 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
434#else
Thierry Strudel3d639192016-09-09 11:52:26 -0700435 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700436#endif
Thierry Strudel3d639192016-09-09 11:52:26 -0700437 mCameraDevice.common.close = close_camera_device;
438 mCameraDevice.ops = &mCameraOps;
439 mCameraDevice.priv = this;
440 gCamCapability[cameraId]->version = CAM_HAL_V3;
441 // TODO: hardcode for now until mctl add support for min_num_pp_bufs
442 //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
443 gCamCapability[cameraId]->min_num_pp_bufs = 3;
444
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800445 PTHREAD_COND_INIT(&mBuffersCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700446
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800447 PTHREAD_COND_INIT(&mRequestCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700448 mPendingLiveRequest = 0;
449 mCurrentRequestId = -1;
450 pthread_mutex_init(&mMutex, NULL);
451
452 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
453 mDefaultMetadata[i] = NULL;
454
455 // Getting system props of different kinds
456 char prop[PROPERTY_VALUE_MAX];
457 memset(prop, 0, sizeof(prop));
458 property_get("persist.camera.raw.dump", prop, "0");
459 mEnableRawDump = atoi(prop);
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800460 property_get("persist.camera.hal3.force.hdr", prop, "0");
461 mForceHdrSnapshot = atoi(prop);
462
Thierry Strudel3d639192016-09-09 11:52:26 -0700463 if (mEnableRawDump)
464 LOGD("Raw dump from Camera HAL enabled");
465
466 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
467 memset(mLdafCalib, 0, sizeof(mLdafCalib));
468
469 memset(prop, 0, sizeof(prop));
470 property_get("persist.camera.tnr.preview", prop, "0");
471 m_bTnrPreview = (uint8_t)atoi(prop);
472
473 memset(prop, 0, sizeof(prop));
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800474 property_get("persist.camera.swtnr.preview", prop, "1");
475 m_bSwTnrPreview = (uint8_t)atoi(prop);
476
477 memset(prop, 0, sizeof(prop));
Thierry Strudel3d639192016-09-09 11:52:26 -0700478 property_get("persist.camera.tnr.video", prop, "0");
479 m_bTnrVideo = (uint8_t)atoi(prop);
480
481 memset(prop, 0, sizeof(prop));
482 property_get("persist.camera.avtimer.debug", prop, "0");
483 m_debug_avtimer = (uint8_t)atoi(prop);
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800484 LOGI("AV timer enabled: %d", m_debug_avtimer);
Thierry Strudel3d639192016-09-09 11:52:26 -0700485
486 //Load and read GPU library.
487 lib_surface_utils = NULL;
488 LINK_get_surface_pixel_alignment = NULL;
489 mSurfaceStridePadding = CAM_PAD_TO_32;
490 lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
491 if (lib_surface_utils) {
492 *(void **)&LINK_get_surface_pixel_alignment =
493 dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
494 if (LINK_get_surface_pixel_alignment) {
495 mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
496 }
497 dlclose(lib_surface_utils);
498 }
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700499
500 m60HzZone = is60HzZone();
Thierry Strudel3d639192016-09-09 11:52:26 -0700501}
502
503/*===========================================================================
504 * FUNCTION : ~QCamera3HardwareInterface
505 *
506 * DESCRIPTION: destructor of QCamera3HardwareInterface
507 *
508 * PARAMETERS : none
509 *
510 * RETURN : none
511 *==========================================================================*/
512QCamera3HardwareInterface::~QCamera3HardwareInterface()
513{
514 LOGD("E");
515
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800516 int32_t rc = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -0700517
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800518 // Disable power hint and enable the perf lock for close camera
519 mPerfLockMgr.releasePerfLock(PERF_LOCK_POWERHINT_ENCODE);
520 mPerfLockMgr.acquirePerfLock(PERF_LOCK_CLOSE_CAMERA);
521
522 // unlink of dualcam during close camera
523 if (mIsDeviceLinked) {
524 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
525 &m_pDualCamCmdPtr->bundle_info;
526 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
527 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
528 pthread_mutex_lock(&gCamLock);
529
530 if (mIsMainCamera == 1) {
531 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
532 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
533 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
534 // related session id should be session id of linked session
535 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
536 } else {
537 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
538 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
539 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
540 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
541 }
542 pthread_mutex_unlock(&gCamLock);
543
544 rc = mCameraHandle->ops->set_dual_cam_cmd(
545 mCameraHandle->camera_handle);
546 if (rc < 0) {
547 LOGE("Dualcam: Unlink failed, but still proceed to close");
548 }
549 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700550
551 /* We need to stop all streams before deleting any stream */
552 if (mRawDumpChannel) {
553 mRawDumpChannel->stop();
554 }
555
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700556 if (mHdrPlusRawSrcChannel) {
557 mHdrPlusRawSrcChannel->stop();
558 }
559
Thierry Strudel3d639192016-09-09 11:52:26 -0700560 // NOTE: 'camera3_stream_t *' objects are already freed at
561 // this stage by the framework
562 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
563 it != mStreamInfo.end(); it++) {
564 QCamera3ProcessingChannel *channel = (*it)->channel;
565 if (channel) {
566 channel->stop();
567 }
568 }
569 if (mSupportChannel)
570 mSupportChannel->stop();
571
572 if (mAnalysisChannel) {
573 mAnalysisChannel->stop();
574 }
575 if (mMetadataChannel) {
576 mMetadataChannel->stop();
577 }
578 if (mChannelHandle) {
579 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
580 mChannelHandle);
581 LOGD("stopping channel %d", mChannelHandle);
582 }
583
584 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
585 it != mStreamInfo.end(); it++) {
586 QCamera3ProcessingChannel *channel = (*it)->channel;
587 if (channel)
588 delete channel;
589 free (*it);
590 }
591 if (mSupportChannel) {
592 delete mSupportChannel;
593 mSupportChannel = NULL;
594 }
595
596 if (mAnalysisChannel) {
597 delete mAnalysisChannel;
598 mAnalysisChannel = NULL;
599 }
600 if (mRawDumpChannel) {
601 delete mRawDumpChannel;
602 mRawDumpChannel = NULL;
603 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700604 if (mHdrPlusRawSrcChannel) {
605 delete mHdrPlusRawSrcChannel;
606 mHdrPlusRawSrcChannel = NULL;
607 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700608 if (mDummyBatchChannel) {
609 delete mDummyBatchChannel;
610 mDummyBatchChannel = NULL;
611 }
612
613 mPictureChannel = NULL;
614
615 if (mMetadataChannel) {
616 delete mMetadataChannel;
617 mMetadataChannel = NULL;
618 }
619
620 /* Clean up all channels */
621 if (mCameraInitialized) {
622 if(!mFirstConfiguration){
623 //send the last unconfigure
624 cam_stream_size_info_t stream_config_info;
625 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
626 stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
627 stream_config_info.buffer_info.max_buffers =
628 m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700629 clear_metadata_buffer(mParameters);
Thierry Strudel3d639192016-09-09 11:52:26 -0700630 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
631 stream_config_info);
632 int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
633 if (rc < 0) {
634 LOGE("set_parms failed for unconfigure");
635 }
636 }
637 deinitParameters();
638 }
639
640 if (mChannelHandle) {
641 mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
642 mChannelHandle);
643 LOGH("deleting channel %d", mChannelHandle);
644 mChannelHandle = 0;
645 }
646
647 if (mState != CLOSED)
648 closeCamera();
649
650 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
651 req.mPendingBufferList.clear();
652 }
653 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -0700654 for (pendingRequestIterator i = mPendingRequestsList.begin();
655 i != mPendingRequestsList.end();) {
656 i = erasePendingRequest(i);
657 }
658 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
659 if (mDefaultMetadata[i])
660 free_camera_metadata(mDefaultMetadata[i]);
661
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800662 mPerfLockMgr.releasePerfLock(PERF_LOCK_CLOSE_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700663
664 pthread_cond_destroy(&mRequestCond);
665
666 pthread_cond_destroy(&mBuffersCond);
667
668 pthread_mutex_destroy(&mMutex);
669 LOGD("X");
670}
671
672/*===========================================================================
673 * FUNCTION : erasePendingRequest
674 *
675 * DESCRIPTION: function to erase a desired pending request after freeing any
676 * allocated memory
677 *
678 * PARAMETERS :
679 * @i : iterator pointing to pending request to be erased
680 *
681 * RETURN : iterator pointing to the next request
682 *==========================================================================*/
683QCamera3HardwareInterface::pendingRequestIterator
684 QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
685{
686 if (i->input_buffer != NULL) {
687 free(i->input_buffer);
688 i->input_buffer = NULL;
689 }
690 if (i->settings != NULL)
691 free_camera_metadata((camera_metadata_t*)i->settings);
692 return mPendingRequestsList.erase(i);
693}
694
695/*===========================================================================
696 * FUNCTION : camEvtHandle
697 *
698 * DESCRIPTION: Function registered to mm-camera-interface to handle events
699 *
700 * PARAMETERS :
701 * @camera_handle : interface layer camera handle
702 * @evt : ptr to event
703 * @user_data : user data ptr
704 *
705 * RETURN : none
706 *==========================================================================*/
707void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
708 mm_camera_event_t *evt,
709 void *user_data)
710{
711 QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
712 if (obj && evt) {
713 switch(evt->server_event_type) {
714 case CAM_EVENT_TYPE_DAEMON_DIED:
715 pthread_mutex_lock(&obj->mMutex);
716 obj->mState = ERROR;
717 pthread_mutex_unlock(&obj->mMutex);
718 LOGE("Fatal, camera daemon died");
719 break;
720
721 case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
722 LOGD("HAL got request pull from Daemon");
723 pthread_mutex_lock(&obj->mMutex);
724 obj->mWokenUpByDaemon = true;
725 obj->unblockRequestIfNecessary();
726 pthread_mutex_unlock(&obj->mMutex);
727 break;
728
729 default:
730 LOGW("Warning: Unhandled event %d",
731 evt->server_event_type);
732 break;
733 }
734 } else {
735 LOGE("NULL user_data/evt");
736 }
737}
738
739/*===========================================================================
740 * FUNCTION : openCamera
741 *
742 * DESCRIPTION: open camera
743 *
744 * PARAMETERS :
745 * @hw_device : double ptr for camera device struct
746 *
747 * RETURN : int32_t type of status
748 * NO_ERROR -- success
749 * none-zero failure code
750 *==========================================================================*/
751int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
752{
753 int rc = 0;
754 if (mState != CLOSED) {
755 *hw_device = NULL;
756 return PERMISSION_DENIED;
757 }
758
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800759 mPerfLockMgr.acquirePerfLock(PERF_LOCK_OPEN_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700760 LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
761 mCameraId);
762
763 rc = openCamera();
764 if (rc == 0) {
765 *hw_device = &mCameraDevice.common;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800766 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -0700767 *hw_device = NULL;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800768 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700769
Thierry Strudel3d639192016-09-09 11:52:26 -0700770 LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
771 mCameraId, rc);
772
773 if (rc == NO_ERROR) {
774 mState = OPENED;
775 }
776 return rc;
777}
778
779/*===========================================================================
780 * FUNCTION : openCamera
781 *
782 * DESCRIPTION: open camera
783 *
784 * PARAMETERS : none
785 *
786 * RETURN : int32_t type of status
787 * NO_ERROR -- success
788 * none-zero failure code
789 *==========================================================================*/
790int QCamera3HardwareInterface::openCamera()
791{
792 int rc = 0;
793 char value[PROPERTY_VALUE_MAX];
794
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800795 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_OPENCAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700796 if (mCameraHandle) {
797 LOGE("Failure: Camera already opened");
798 return ALREADY_EXISTS;
799 }
800
801 rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
802 if (rc < 0) {
803 LOGE("Failed to reserve flash for camera id: %d",
804 mCameraId);
805 return UNKNOWN_ERROR;
806 }
807
808 rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
809 if (rc) {
810 LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
811 return rc;
812 }
813
814 if (!mCameraHandle) {
815 LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
816 return -ENODEV;
817 }
818
819 rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
820 camEvtHandle, (void *)this);
821
822 if (rc < 0) {
823 LOGE("Error, failed to register event callback");
824 /* Not closing camera here since it is already handled in destructor */
825 return FAILED_TRANSACTION;
826 }
827
828 mExifParams.debug_params =
829 (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
830 if (mExifParams.debug_params) {
831 memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
832 } else {
833 LOGE("Out of Memory. Allocation failed for 3A debug exif params");
834 return NO_MEMORY;
835 }
836 mFirstConfiguration = true;
837
838 //Notify display HAL that a camera session is active.
839 //But avoid calling the same during bootup because camera service might open/close
840 //cameras at boot time during its initialization and display service will also internally
841 //wait for camera service to initialize first while calling this display API, resulting in a
842 //deadlock situation. Since boot time camera open/close calls are made only to fetch
843 //capabilities, no need of this display bw optimization.
844 //Use "service.bootanim.exit" property to know boot status.
845 property_get("service.bootanim.exit", value, "0");
846 if (atoi(value) == 1) {
847 pthread_mutex_lock(&gCamLock);
848 if (gNumCameraSessions++ == 0) {
849 setCameraLaunchStatus(true);
850 }
851 pthread_mutex_unlock(&gCamLock);
852 }
853
854 //fill the session id needed while linking dual cam
855 pthread_mutex_lock(&gCamLock);
856 rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
857 &sessionId[mCameraId]);
858 pthread_mutex_unlock(&gCamLock);
859
860 if (rc < 0) {
861 LOGE("Error, failed to get sessiion id");
862 return UNKNOWN_ERROR;
863 } else {
864 //Allocate related cam sync buffer
865 //this is needed for the payload that goes along with bundling cmd for related
866 //camera use cases
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700867 m_pDualCamCmdHeap = new QCamera3HeapMemory(1);
868 rc = m_pDualCamCmdHeap->allocate(sizeof(cam_dual_camera_cmd_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -0700869 if(rc != OK) {
870 rc = NO_MEMORY;
871 LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
872 return NO_MEMORY;
873 }
874
875 //Map memory for related cam sync buffer
876 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700877 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF,
878 m_pDualCamCmdHeap->getFd(0),
879 sizeof(cam_dual_camera_cmd_info_t),
880 m_pDualCamCmdHeap->getPtr(0));
Thierry Strudel3d639192016-09-09 11:52:26 -0700881 if(rc < 0) {
882 LOGE("Dualcam: failed to map Related cam sync buffer");
883 rc = FAILED_TRANSACTION;
884 return NO_MEMORY;
885 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700886 m_pDualCamCmdPtr =
887 (cam_dual_camera_cmd_info_t*) DATA_PTR(m_pDualCamCmdHeap,0);
Thierry Strudel3d639192016-09-09 11:52:26 -0700888 }
889
890 LOGH("mCameraId=%d",mCameraId);
891
892 return NO_ERROR;
893}
894
895/*===========================================================================
896 * FUNCTION : closeCamera
897 *
898 * DESCRIPTION: close camera
899 *
900 * PARAMETERS : none
901 *
902 * RETURN : int32_t type of status
903 * NO_ERROR -- success
904 * none-zero failure code
905 *==========================================================================*/
906int QCamera3HardwareInterface::closeCamera()
907{
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800908 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CLOSECAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700909 int rc = NO_ERROR;
910 char value[PROPERTY_VALUE_MAX];
911
912 LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
913 mCameraId);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -0700914
915 // unmap memory for related cam sync buffer
916 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800917 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF);
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700918 if (NULL != m_pDualCamCmdHeap) {
919 m_pDualCamCmdHeap->deallocate();
920 delete m_pDualCamCmdHeap;
921 m_pDualCamCmdHeap = NULL;
922 m_pDualCamCmdPtr = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -0700923 }
924
Thierry Strudel3d639192016-09-09 11:52:26 -0700925 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
926 mCameraHandle = NULL;
927
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700928 // Disconnect from HDR+ client.
929 if (mHdrPlusClient != nullptr) {
930 mHdrPlusClient->disconnect();
931 mHdrPlusClient = nullptr;
932 }
933
Thierry Strudel3d639192016-09-09 11:52:26 -0700934 //reset session id to some invalid id
935 pthread_mutex_lock(&gCamLock);
936 sessionId[mCameraId] = 0xDEADBEEF;
937 pthread_mutex_unlock(&gCamLock);
938
939 //Notify display HAL that there is no active camera session
940 //but avoid calling the same during bootup. Refer to openCamera
941 //for more details.
942 property_get("service.bootanim.exit", value, "0");
943 if (atoi(value) == 1) {
944 pthread_mutex_lock(&gCamLock);
945 if (--gNumCameraSessions == 0) {
946 setCameraLaunchStatus(false);
947 }
948 pthread_mutex_unlock(&gCamLock);
949 }
950
Thierry Strudel3d639192016-09-09 11:52:26 -0700951 if (mExifParams.debug_params) {
952 free(mExifParams.debug_params);
953 mExifParams.debug_params = NULL;
954 }
955 if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
956 LOGW("Failed to release flash for camera id: %d",
957 mCameraId);
958 }
959 mState = CLOSED;
960 LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
961 mCameraId, rc);
962 return rc;
963}
964
965/*===========================================================================
966 * FUNCTION : initialize
967 *
968 * DESCRIPTION: Initialize frameworks callback functions
969 *
970 * PARAMETERS :
971 * @callback_ops : callback function to frameworks
972 *
973 * RETURN :
974 *
975 *==========================================================================*/
976int QCamera3HardwareInterface::initialize(
977 const struct camera3_callback_ops *callback_ops)
978{
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800979 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_INIT);
Thierry Strudel3d639192016-09-09 11:52:26 -0700980 int rc;
981
982 LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
983 pthread_mutex_lock(&mMutex);
984
985 // Validate current state
986 switch (mState) {
987 case OPENED:
988 /* valid state */
989 break;
990 default:
991 LOGE("Invalid state %d", mState);
992 rc = -ENODEV;
993 goto err1;
994 }
995
996 rc = initParameters();
997 if (rc < 0) {
998 LOGE("initParamters failed %d", rc);
999 goto err1;
1000 }
1001 mCallbackOps = callback_ops;
1002
1003 mChannelHandle = mCameraHandle->ops->add_channel(
1004 mCameraHandle->camera_handle, NULL, NULL, this);
1005 if (mChannelHandle == 0) {
1006 LOGE("add_channel failed");
1007 rc = -ENOMEM;
1008 pthread_mutex_unlock(&mMutex);
1009 return rc;
1010 }
1011
1012 pthread_mutex_unlock(&mMutex);
1013 mCameraInitialized = true;
1014 mState = INITIALIZED;
1015 LOGI("X");
1016 return 0;
1017
1018err1:
1019 pthread_mutex_unlock(&mMutex);
1020 return rc;
1021}
1022
1023/*===========================================================================
1024 * FUNCTION : validateStreamDimensions
1025 *
1026 * DESCRIPTION: Check if the configuration requested are those advertised
1027 *
1028 * PARAMETERS :
1029 * @stream_list : streams to be configured
1030 *
1031 * RETURN :
1032 *
1033 *==========================================================================*/
1034int QCamera3HardwareInterface::validateStreamDimensions(
1035 camera3_stream_configuration_t *streamList)
1036{
1037 int rc = NO_ERROR;
1038 size_t count = 0;
1039
1040 camera3_stream_t *inputStream = NULL;
1041 /*
1042 * Loop through all streams to find input stream if it exists*
1043 */
1044 for (size_t i = 0; i< streamList->num_streams; i++) {
1045 if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
1046 if (inputStream != NULL) {
1047 LOGE("Error, Multiple input streams requested");
1048 return -EINVAL;
1049 }
1050 inputStream = streamList->streams[i];
1051 }
1052 }
1053 /*
1054 * Loop through all streams requested in configuration
1055 * Check if unsupported sizes have been requested on any of them
1056 */
1057 for (size_t j = 0; j < streamList->num_streams; j++) {
1058 bool sizeFound = false;
1059 camera3_stream_t *newStream = streamList->streams[j];
1060
1061 uint32_t rotatedHeight = newStream->height;
1062 uint32_t rotatedWidth = newStream->width;
1063 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1064 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1065 rotatedHeight = newStream->width;
1066 rotatedWidth = newStream->height;
1067 }
1068
1069 /*
1070 * Sizes are different for each type of stream format check against
1071 * appropriate table.
1072 */
1073 switch (newStream->format) {
1074 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
1075 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
1076 case HAL_PIXEL_FORMAT_RAW10:
1077 count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
1078 for (size_t i = 0; i < count; i++) {
1079 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
1080 (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
1081 sizeFound = true;
1082 break;
1083 }
1084 }
1085 break;
1086 case HAL_PIXEL_FORMAT_BLOB:
1087 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
1088 /* Verify set size against generated sizes table */
1089 for (size_t i = 0; i < count; i++) {
1090 if (((int32_t)rotatedWidth ==
1091 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1092 ((int32_t)rotatedHeight ==
1093 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1094 sizeFound = true;
1095 break;
1096 }
1097 }
1098 break;
1099 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1100 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1101 default:
1102 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1103 || newStream->stream_type == CAMERA3_STREAM_INPUT
1104 || IS_USAGE_ZSL(newStream->usage)) {
1105 if (((int32_t)rotatedWidth ==
1106 gCamCapability[mCameraId]->active_array_size.width) &&
1107 ((int32_t)rotatedHeight ==
1108 gCamCapability[mCameraId]->active_array_size.height)) {
1109 sizeFound = true;
1110 break;
1111 }
1112 /* We could potentially break here to enforce ZSL stream
1113 * set from frameworks always is full active array size
1114 * but it is not clear from the spc if framework will always
1115 * follow that, also we have logic to override to full array
1116 * size, so keeping the logic lenient at the moment
1117 */
1118 }
1119 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
1120 MAX_SIZES_CNT);
1121 for (size_t i = 0; i < count; i++) {
1122 if (((int32_t)rotatedWidth ==
1123 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1124 ((int32_t)rotatedHeight ==
1125 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1126 sizeFound = true;
1127 break;
1128 }
1129 }
1130 break;
1131 } /* End of switch(newStream->format) */
1132
1133 /* We error out even if a single stream has unsupported size set */
1134 if (!sizeFound) {
1135 LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1136 rotatedWidth, rotatedHeight, newStream->format,
1137 gCamCapability[mCameraId]->active_array_size.width,
1138 gCamCapability[mCameraId]->active_array_size.height);
1139 rc = -EINVAL;
1140 break;
1141 }
1142 } /* End of for each stream */
1143 return rc;
1144}
1145
1146/*==============================================================================
1147 * FUNCTION : isSupportChannelNeeded
1148 *
1149 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1150 *
1151 * PARAMETERS :
1152 * @stream_list : streams to be configured
1153 * @stream_config_info : the config info for streams to be configured
1154 *
1155 * RETURN : Boolen true/false decision
1156 *
1157 *==========================================================================*/
1158bool QCamera3HardwareInterface::isSupportChannelNeeded(
1159 camera3_stream_configuration_t *streamList,
1160 cam_stream_size_info_t stream_config_info)
1161{
1162 uint32_t i;
1163 bool pprocRequested = false;
1164 /* Check for conditions where PProc pipeline does not have any streams*/
1165 for (i = 0; i < stream_config_info.num_streams; i++) {
1166 if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1167 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1168 pprocRequested = true;
1169 break;
1170 }
1171 }
1172
1173 if (pprocRequested == false )
1174 return true;
1175
1176 /* Dummy stream needed if only raw or jpeg streams present */
1177 for (i = 0; i < streamList->num_streams; i++) {
1178 switch(streamList->streams[i]->format) {
1179 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1180 case HAL_PIXEL_FORMAT_RAW10:
1181 case HAL_PIXEL_FORMAT_RAW16:
1182 case HAL_PIXEL_FORMAT_BLOB:
1183 break;
1184 default:
1185 return false;
1186 }
1187 }
1188 return true;
1189}
1190
1191/*==============================================================================
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001192 * FUNCTION : sensor_mode_info
Thierry Strudel3d639192016-09-09 11:52:26 -07001193 *
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001194 * DESCRIPTION: Get sensor mode information based on current stream configuratoin
Thierry Strudel3d639192016-09-09 11:52:26 -07001195 *
1196 * PARAMETERS :
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001197 * @sensor_mode_info : sensor mode information (output)
Thierry Strudel3d639192016-09-09 11:52:26 -07001198 *
1199 * RETURN : int32_t type of status
1200 * NO_ERROR -- success
1201 * none-zero failure code
1202 *
1203 *==========================================================================*/
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001204int32_t QCamera3HardwareInterface::getSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
Thierry Strudel3d639192016-09-09 11:52:26 -07001205{
1206 int32_t rc = NO_ERROR;
1207
1208 cam_dimension_t max_dim = {0, 0};
1209 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1210 if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1211 max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1212 if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1213 max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1214 }
1215
1216 clear_metadata_buffer(mParameters);
1217
1218 rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1219 max_dim);
1220 if (rc != NO_ERROR) {
1221 LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1222 return rc;
1223 }
1224
1225 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1226 if (rc != NO_ERROR) {
1227 LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1228 return rc;
1229 }
1230
1231 clear_metadata_buffer(mParameters);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001232 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -07001233
1234 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1235 mParameters);
1236 if (rc != NO_ERROR) {
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001237 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
Thierry Strudel3d639192016-09-09 11:52:26 -07001238 return rc;
1239 }
1240
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001241 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO, sensorModeInfo);
1242 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u", __FUNCTION__,
1243 sensorModeInfo.active_array_size.width, sensorModeInfo.active_array_size.height,
1244 sensorModeInfo.pixel_array_size.width, sensorModeInfo.pixel_array_size.height,
1245 sensorModeInfo.op_pixel_clk);
Thierry Strudel3d639192016-09-09 11:52:26 -07001246
1247 return rc;
1248}
1249
1250/*==============================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07001251 * FUNCTION : addToPPFeatureMask
1252 *
1253 * DESCRIPTION: add additional features to pp feature mask based on
1254 * stream type and usecase
1255 *
1256 * PARAMETERS :
1257 * @stream_format : stream type for feature mask
1258 * @stream_idx : stream idx within postprocess_mask list to change
1259 *
1260 * RETURN : NULL
1261 *
1262 *==========================================================================*/
1263void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1264 uint32_t stream_idx)
1265{
1266 char feature_mask_value[PROPERTY_VALUE_MAX];
1267 cam_feature_mask_t feature_mask;
1268 int args_converted;
1269 int property_len;
1270
1271 /* Get feature mask from property */
Thierry Strudel269c81a2016-10-12 12:13:59 -07001272#ifdef _LE_CAMERA_
1273 char swtnr_feature_mask_value[PROPERTY_VALUE_MAX];
1274 snprintf(swtnr_feature_mask_value, PROPERTY_VALUE_MAX, "%lld", CAM_QTI_FEATURE_SW_TNR);
1275 property_len = property_get("persist.camera.hal3.feature",
1276 feature_mask_value, swtnr_feature_mask_value);
1277#else
Thierry Strudel3d639192016-09-09 11:52:26 -07001278 property_len = property_get("persist.camera.hal3.feature",
1279 feature_mask_value, "0");
Thierry Strudel269c81a2016-10-12 12:13:59 -07001280#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07001281 if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1282 (feature_mask_value[1] == 'x')) {
1283 args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1284 } else {
1285 args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1286 }
1287 if (1 != args_converted) {
1288 feature_mask = 0;
1289 LOGE("Wrong feature mask %s", feature_mask_value);
1290 return;
1291 }
1292
1293 switch (stream_format) {
1294 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1295 /* Add LLVD to pp feature mask only if video hint is enabled */
1296 if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1297 mStreamConfigInfo.postprocess_mask[stream_idx]
1298 |= CAM_QTI_FEATURE_SW_TNR;
1299 LOGH("Added SW TNR to pp feature mask");
1300 } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1301 mStreamConfigInfo.postprocess_mask[stream_idx]
1302 |= CAM_QCOM_FEATURE_LLVD;
1303 LOGH("Added LLVD SeeMore to pp feature mask");
1304 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001305 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1306 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
1307 mStreamConfigInfo.postprocess_mask[stream_idx] |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
1308 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001309 break;
1310 }
1311 default:
1312 break;
1313 }
1314 LOGD("PP feature mask %llx",
1315 mStreamConfigInfo.postprocess_mask[stream_idx]);
1316}
1317
1318/*==============================================================================
1319 * FUNCTION : updateFpsInPreviewBuffer
1320 *
1321 * DESCRIPTION: update FPS information in preview buffer.
1322 *
1323 * PARAMETERS :
1324 * @metadata : pointer to metadata buffer
1325 * @frame_number: frame_number to look for in pending buffer list
1326 *
1327 * RETURN : None
1328 *
1329 *==========================================================================*/
1330void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1331 uint32_t frame_number)
1332{
1333 // Mark all pending buffers for this particular request
1334 // with corresponding framerate information
1335 for (List<PendingBuffersInRequest>::iterator req =
1336 mPendingBuffersMap.mPendingBuffersInRequest.begin();
1337 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1338 for(List<PendingBufferInfo>::iterator j =
1339 req->mPendingBufferList.begin();
1340 j != req->mPendingBufferList.end(); j++) {
1341 QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1342 if ((req->frame_number == frame_number) &&
1343 (channel->getStreamTypeMask() &
1344 (1U << CAM_STREAM_TYPE_PREVIEW))) {
1345 IF_META_AVAILABLE(cam_fps_range_t, float_range,
1346 CAM_INTF_PARM_FPS_RANGE, metadata) {
1347 typeof (MetaData_t::refreshrate) cameraFps = float_range->max_fps;
1348 struct private_handle_t *priv_handle =
1349 (struct private_handle_t *)(*(j->buffer));
1350 setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1351 }
1352 }
1353 }
1354 }
1355}
1356
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001357/*==============================================================================
1358 * FUNCTION : updateTimeStampInPendingBuffers
1359 *
1360 * DESCRIPTION: update timestamp in display metadata for all pending buffers
1361 * of a frame number
1362 *
1363 * PARAMETERS :
1364 * @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
1365 * @timestamp : timestamp to be set
1366 *
1367 * RETURN : None
1368 *
1369 *==========================================================================*/
1370void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
1371 uint32_t frameNumber, nsecs_t timestamp)
1372{
1373 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
1374 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1375 if (req->frame_number != frameNumber)
1376 continue;
1377
1378 for (auto k = req->mPendingBufferList.begin();
1379 k != req->mPendingBufferList.end(); k++ ) {
1380 struct private_handle_t *priv_handle =
1381 (struct private_handle_t *) (*(k->buffer));
1382 setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
1383 }
1384 }
1385 return;
1386}
1387
Thierry Strudel3d639192016-09-09 11:52:26 -07001388/*===========================================================================
1389 * FUNCTION : configureStreams
1390 *
1391 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1392 * and output streams.
1393 *
1394 * PARAMETERS :
1395 * @stream_list : streams to be configured
1396 *
1397 * RETURN :
1398 *
1399 *==========================================================================*/
1400int QCamera3HardwareInterface::configureStreams(
1401 camera3_stream_configuration_t *streamList)
1402{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001403 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS);
Thierry Strudel3d639192016-09-09 11:52:26 -07001404 int rc = 0;
1405
1406 // Acquire perfLock before configure streams
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001407 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001408 rc = configureStreamsPerfLocked(streamList);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001409 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001410
1411 return rc;
1412}
1413
1414/*===========================================================================
1415 * FUNCTION : configureStreamsPerfLocked
1416 *
1417 * DESCRIPTION: configureStreams while perfLock is held.
1418 *
1419 * PARAMETERS :
1420 * @stream_list : streams to be configured
1421 *
1422 * RETURN : int32_t type of status
1423 * NO_ERROR -- success
1424 * none-zero failure code
1425 *==========================================================================*/
1426int QCamera3HardwareInterface::configureStreamsPerfLocked(
1427 camera3_stream_configuration_t *streamList)
1428{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001429 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS_PERF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07001430 int rc = 0;
1431
1432 // Sanity check stream_list
1433 if (streamList == NULL) {
1434 LOGE("NULL stream configuration");
1435 return BAD_VALUE;
1436 }
1437 if (streamList->streams == NULL) {
1438 LOGE("NULL stream list");
1439 return BAD_VALUE;
1440 }
1441
1442 if (streamList->num_streams < 1) {
1443 LOGE("Bad number of streams requested: %d",
1444 streamList->num_streams);
1445 return BAD_VALUE;
1446 }
1447
1448 if (streamList->num_streams >= MAX_NUM_STREAMS) {
1449 LOGE("Maximum number of streams %d exceeded: %d",
1450 MAX_NUM_STREAMS, streamList->num_streams);
1451 return BAD_VALUE;
1452 }
1453
1454 mOpMode = streamList->operation_mode;
1455 LOGD("mOpMode: %d", mOpMode);
1456
1457 /* first invalidate all the steams in the mStreamList
1458 * if they appear again, they will be validated */
1459 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1460 it != mStreamInfo.end(); it++) {
1461 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1462 if (channel) {
1463 channel->stop();
1464 }
1465 (*it)->status = INVALID;
1466 }
1467
1468 if (mRawDumpChannel) {
1469 mRawDumpChannel->stop();
1470 delete mRawDumpChannel;
1471 mRawDumpChannel = NULL;
1472 }
1473
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001474 if (mHdrPlusRawSrcChannel) {
1475 mHdrPlusRawSrcChannel->stop();
1476 delete mHdrPlusRawSrcChannel;
1477 mHdrPlusRawSrcChannel = NULL;
1478 }
1479
Thierry Strudel3d639192016-09-09 11:52:26 -07001480 if (mSupportChannel)
1481 mSupportChannel->stop();
1482
1483 if (mAnalysisChannel) {
1484 mAnalysisChannel->stop();
1485 }
1486 if (mMetadataChannel) {
1487 /* If content of mStreamInfo is not 0, there is metadata stream */
1488 mMetadataChannel->stop();
1489 }
1490 if (mChannelHandle) {
1491 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
1492 mChannelHandle);
1493 LOGD("stopping channel %d", mChannelHandle);
1494 }
1495
1496 pthread_mutex_lock(&mMutex);
1497
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001498 // Check if HDR+ is enabled.
1499 char prop[PROPERTY_VALUE_MAX];
1500 property_get("persist.camera.hdrplus", prop, "0");
1501 bool enableHdrPlus = atoi(prop);
1502 if (enableHdrPlus) {
1503 ALOGD("%s: HDR+ in Camera HAL enabled.", __FUNCTION__);
1504 // Connect to HDR+ client if not yet.
1505 if (mHdrPlusClient == nullptr) {
1506 mHdrPlusClient = std::make_shared<HdrPlusClient>();
1507 rc = mHdrPlusClient->connect(this);
1508 if (rc < 0) {
1509 LOGE("%s: Failed to connect to HDR+ client: %s (%d).", __FUNCTION__,
1510 strerror(-rc), rc);
1511 pthread_mutex_unlock(&mMutex);
1512 return -ENODEV;
1513 }
1514
1515 // Set static metadata.
1516 rc = mHdrPlusClient->setStaticMetadata(*gStaticMetadata[mCameraId]);
1517 if (rc < 0) {
1518 LOGE("%s: Failed set static metadata in HDR+ client: %s (%d).", __FUNCTION__,
1519 strerror(-rc), rc);
1520 pthread_mutex_unlock(&mMutex);
1521 return -ENODEV;
1522 }
1523 }
1524 } else {
1525 ALOGD("%s: HDR+ in Camera HAL disabled.", __FUNCTION__);
1526 // Disconnect from HDR+ client if HDR+ is not enabled.
1527 if (mHdrPlusClient != nullptr) {
1528 mHdrPlusClient->disconnect();
1529 mHdrPlusClient = nullptr;
1530 }
1531 }
1532
Thierry Strudel3d639192016-09-09 11:52:26 -07001533 // Check state
1534 switch (mState) {
1535 case INITIALIZED:
1536 case CONFIGURED:
1537 case STARTED:
1538 /* valid state */
1539 break;
1540 default:
1541 LOGE("Invalid state %d", mState);
1542 pthread_mutex_unlock(&mMutex);
1543 return -ENODEV;
1544 }
1545
1546 /* Check whether we have video stream */
1547 m_bIs4KVideo = false;
1548 m_bIsVideo = false;
1549 m_bEisSupportedSize = false;
1550 m_bTnrEnabled = false;
1551 bool isZsl = false;
1552 uint32_t videoWidth = 0U;
1553 uint32_t videoHeight = 0U;
1554 size_t rawStreamCnt = 0;
1555 size_t stallStreamCnt = 0;
1556 size_t processedStreamCnt = 0;
1557 // Number of streams on ISP encoder path
1558 size_t numStreamsOnEncoder = 0;
1559 size_t numYuv888OnEncoder = 0;
1560 bool bYuv888OverrideJpeg = false;
1561 cam_dimension_t largeYuv888Size = {0, 0};
1562 cam_dimension_t maxViewfinderSize = {0, 0};
1563 bool bJpegExceeds4K = false;
1564 bool bJpegOnEncoder = false;
1565 bool bUseCommonFeatureMask = false;
1566 cam_feature_mask_t commonFeatureMask = 0;
1567 bool bSmallJpegSize = false;
1568 uint32_t width_ratio;
1569 uint32_t height_ratio;
1570 maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1571 camera3_stream_t *inputStream = NULL;
1572 bool isJpeg = false;
1573 cam_dimension_t jpegSize = {0, 0};
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001574 cam_dimension_t previewSize = {0, 0};
Thierry Strudel3d639192016-09-09 11:52:26 -07001575
1576 cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1577
1578 /*EIS configuration*/
Thierry Strudel3d639192016-09-09 11:52:26 -07001579 bool oisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001580 uint8_t eis_prop_set;
1581 uint32_t maxEisWidth = 0;
1582 uint32_t maxEisHeight = 0;
1583
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001584 // Initialize all instant AEC related variables
1585 mInstantAEC = false;
1586 mResetInstantAEC = false;
1587 mInstantAECSettledFrameNumber = 0;
1588 mAecSkipDisplayFrameBound = 0;
1589 mInstantAecFrameIdxCount = 0;
1590
Thierry Strudel3d639192016-09-09 11:52:26 -07001591 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1592
1593 size_t count = IS_TYPE_MAX;
1594 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1595 for (size_t i = 0; i < count; i++) {
1596 if ((gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001597 (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
1598 m_bEisSupported = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001599 break;
1600 }
1601 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001602 count = CAM_OPT_STAB_MAX;
1603 count = MIN(gCamCapability[mCameraId]->optical_stab_modes_count, count);
1604 for (size_t i = 0; i < count; i++) {
1605 if (gCamCapability[mCameraId]->optical_stab_modes[i] == CAM_OPT_STAB_ON) {
1606 oisSupported = true;
1607 break;
1608 }
1609 }
1610
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001611 if (m_bEisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001612 maxEisWidth = MAX_EIS_WIDTH;
1613 maxEisHeight = MAX_EIS_HEIGHT;
1614 }
1615
1616 /* EIS setprop control */
1617 char eis_prop[PROPERTY_VALUE_MAX];
1618 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001619 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07001620 eis_prop_set = (uint8_t)atoi(eis_prop);
1621
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001622 m_bEisEnable = eis_prop_set && (!oisSupported && m_bEisSupported) &&
Thierry Strudel3d639192016-09-09 11:52:26 -07001623 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1624
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001625 LOGD("m_bEisEnable: %d, eis_prop_set: %d, m_bEisSupported: %d, oisSupported:%d ",
1626 m_bEisEnable, eis_prop_set, m_bEisSupported, oisSupported);
1627
Thierry Strudel3d639192016-09-09 11:52:26 -07001628 /* stream configurations */
1629 for (size_t i = 0; i < streamList->num_streams; i++) {
1630 camera3_stream_t *newStream = streamList->streams[i];
1631 LOGI("stream[%d] type = %d, format = %d, width = %d, "
1632 "height = %d, rotation = %d, usage = 0x%x",
1633 i, newStream->stream_type, newStream->format,
1634 newStream->width, newStream->height, newStream->rotation,
1635 newStream->usage);
1636 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1637 newStream->stream_type == CAMERA3_STREAM_INPUT){
1638 isZsl = true;
1639 }
1640 if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1641 inputStream = newStream;
1642 }
1643
1644 if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
1645 isJpeg = true;
1646 jpegSize.width = newStream->width;
1647 jpegSize.height = newStream->height;
1648 if (newStream->width > VIDEO_4K_WIDTH ||
1649 newStream->height > VIDEO_4K_HEIGHT)
1650 bJpegExceeds4K = true;
1651 }
1652
1653 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1654 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1655 m_bIsVideo = true;
1656 videoWidth = newStream->width;
1657 videoHeight = newStream->height;
1658 if ((VIDEO_4K_WIDTH <= newStream->width) &&
1659 (VIDEO_4K_HEIGHT <= newStream->height)) {
1660 m_bIs4KVideo = true;
1661 }
1662 m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1663 (newStream->height <= maxEisHeight);
1664 }
1665 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1666 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1667 switch (newStream->format) {
1668 case HAL_PIXEL_FORMAT_BLOB:
1669 stallStreamCnt++;
1670 if (isOnEncoder(maxViewfinderSize, newStream->width,
1671 newStream->height)) {
1672 numStreamsOnEncoder++;
1673 bJpegOnEncoder = true;
1674 }
1675 width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
1676 newStream->width);
1677 height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
1678 newStream->height);;
1679 FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
1680 "FATAL: max_downscale_factor cannot be zero and so assert");
1681 if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
1682 (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
1683 LOGH("Setting small jpeg size flag to true");
1684 bSmallJpegSize = true;
1685 }
1686 break;
1687 case HAL_PIXEL_FORMAT_RAW10:
1688 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1689 case HAL_PIXEL_FORMAT_RAW16:
1690 rawStreamCnt++;
1691 break;
1692 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1693 processedStreamCnt++;
1694 if (isOnEncoder(maxViewfinderSize, newStream->width,
1695 newStream->height)) {
1696 if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
1697 !IS_USAGE_ZSL(newStream->usage)) {
1698 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1699 }
1700 numStreamsOnEncoder++;
1701 }
1702 break;
1703 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1704 processedStreamCnt++;
1705 if (isOnEncoder(maxViewfinderSize, newStream->width,
1706 newStream->height)) {
1707 // If Yuv888 size is not greater than 4K, set feature mask
1708 // to SUPERSET so that it support concurrent request on
1709 // YUV and JPEG.
1710 if (newStream->width <= VIDEO_4K_WIDTH &&
1711 newStream->height <= VIDEO_4K_HEIGHT) {
1712 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1713 }
1714 numStreamsOnEncoder++;
1715 numYuv888OnEncoder++;
1716 largeYuv888Size.width = newStream->width;
1717 largeYuv888Size.height = newStream->height;
1718 }
1719 break;
1720 default:
1721 processedStreamCnt++;
1722 if (isOnEncoder(maxViewfinderSize, newStream->width,
1723 newStream->height)) {
1724 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1725 numStreamsOnEncoder++;
1726 }
1727 break;
1728 }
1729
1730 }
1731 }
1732
1733 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1734 gCamCapability[mCameraId]->position == CAM_POSITION_FRONT_AUX ||
1735 !m_bIsVideo) {
1736 m_bEisEnable = false;
1737 }
1738
1739 /* Logic to enable/disable TNR based on specific config size/etc.*/
1740 if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
1741 ((videoWidth == 1920 && videoHeight == 1080) ||
1742 (videoWidth == 1280 && videoHeight == 720)) &&
1743 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
1744 m_bTnrEnabled = true;
1745
1746 /* Check if num_streams is sane */
1747 if (stallStreamCnt > MAX_STALLING_STREAMS ||
1748 rawStreamCnt > MAX_RAW_STREAMS ||
1749 processedStreamCnt > MAX_PROCESSED_STREAMS) {
1750 LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
1751 stallStreamCnt, rawStreamCnt, processedStreamCnt);
1752 pthread_mutex_unlock(&mMutex);
1753 return -EINVAL;
1754 }
1755 /* Check whether we have zsl stream or 4k video case */
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001756 if (isZsl && m_bIs4KVideo) {
1757 LOGE("Currently invalid configuration ZSL & 4K Video!");
Thierry Strudel3d639192016-09-09 11:52:26 -07001758 pthread_mutex_unlock(&mMutex);
1759 return -EINVAL;
1760 }
1761 /* Check if stream sizes are sane */
1762 if (numStreamsOnEncoder > 2) {
1763 LOGE("Number of streams on ISP encoder path exceeds limits of 2");
1764 pthread_mutex_unlock(&mMutex);
1765 return -EINVAL;
1766 } else if (1 < numStreamsOnEncoder){
1767 bUseCommonFeatureMask = true;
1768 LOGH("Multiple streams above max viewfinder size, common mask needed");
1769 }
1770
1771 /* Check if BLOB size is greater than 4k in 4k recording case */
1772 if (m_bIs4KVideo && bJpegExceeds4K) {
1773 LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
1774 pthread_mutex_unlock(&mMutex);
1775 return -EINVAL;
1776 }
1777
1778 // When JPEG and preview streams share VFE output, CPP will not apply CAC2
1779 // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
1780 // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
1781 // is not true. Otherwise testMandatoryOutputCombinations will fail with following
1782 // configurations:
1783 // {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
1784 // {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
1785 // (These two configurations will not have CAC2 enabled even in HQ modes.)
1786 if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
1787 ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
1788 __func__);
1789 pthread_mutex_unlock(&mMutex);
1790 return -EINVAL;
1791 }
1792
1793 // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
1794 // the YUV stream's size is greater or equal to the JPEG size, set common
1795 // postprocess mask to NONE, so that we can take advantage of postproc bypass.
1796 if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
1797 jpegSize.width, jpegSize.height) &&
1798 largeYuv888Size.width > jpegSize.width &&
1799 largeYuv888Size.height > jpegSize.height) {
1800 bYuv888OverrideJpeg = true;
1801 } else if (!isJpeg && numStreamsOnEncoder > 1) {
1802 commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1803 }
1804
1805 LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
1806 maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
1807 commonFeatureMask);
1808 LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
1809 numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
1810
1811 rc = validateStreamDimensions(streamList);
1812 if (rc == NO_ERROR) {
1813 rc = validateStreamRotations(streamList);
1814 }
1815 if (rc != NO_ERROR) {
1816 LOGE("Invalid stream configuration requested!");
1817 pthread_mutex_unlock(&mMutex);
1818 return rc;
1819 }
1820
1821 camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
1822 for (size_t i = 0; i < streamList->num_streams; i++) {
1823 camera3_stream_t *newStream = streamList->streams[i];
1824 LOGH("newStream type = %d, stream format = %d "
1825 "stream size : %d x %d, stream rotation = %d",
1826 newStream->stream_type, newStream->format,
1827 newStream->width, newStream->height, newStream->rotation);
1828 //if the stream is in the mStreamList validate it
1829 bool stream_exists = false;
1830 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
1831 it != mStreamInfo.end(); it++) {
1832 if ((*it)->stream == newStream) {
1833 QCamera3ProcessingChannel *channel =
1834 (QCamera3ProcessingChannel*)(*it)->stream->priv;
1835 stream_exists = true;
1836 if (channel)
1837 delete channel;
1838 (*it)->status = VALID;
1839 (*it)->stream->priv = NULL;
1840 (*it)->channel = NULL;
1841 }
1842 }
1843 if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
1844 //new stream
1845 stream_info_t* stream_info;
1846 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
1847 if (!stream_info) {
1848 LOGE("Could not allocate stream info");
1849 rc = -ENOMEM;
1850 pthread_mutex_unlock(&mMutex);
1851 return rc;
1852 }
1853 stream_info->stream = newStream;
1854 stream_info->status = VALID;
1855 stream_info->channel = NULL;
1856 mStreamInfo.push_back(stream_info);
1857 }
1858 /* Covers Opaque ZSL and API1 F/W ZSL */
1859 if (IS_USAGE_ZSL(newStream->usage)
1860 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
1861 if (zslStream != NULL) {
1862 LOGE("Multiple input/reprocess streams requested!");
1863 pthread_mutex_unlock(&mMutex);
1864 return BAD_VALUE;
1865 }
1866 zslStream = newStream;
1867 }
1868 /* Covers YUV reprocess */
1869 if (inputStream != NULL) {
1870 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
1871 && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
1872 && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
1873 && inputStream->width == newStream->width
1874 && inputStream->height == newStream->height) {
1875 if (zslStream != NULL) {
1876 /* This scenario indicates multiple YUV streams with same size
1877 * as input stream have been requested, since zsl stream handle
1878 * is solely use for the purpose of overriding the size of streams
1879 * which share h/w streams we will just make a guess here as to
1880 * which of the stream is a ZSL stream, this will be refactored
1881 * once we make generic logic for streams sharing encoder output
1882 */
1883 LOGH("Warning, Multiple ip/reprocess streams requested!");
1884 }
1885 zslStream = newStream;
1886 }
1887 }
1888 }
1889
1890 /* If a zsl stream is set, we know that we have configured at least one input or
1891 bidirectional stream */
1892 if (NULL != zslStream) {
1893 mInputStreamInfo.dim.width = (int32_t)zslStream->width;
1894 mInputStreamInfo.dim.height = (int32_t)zslStream->height;
1895 mInputStreamInfo.format = zslStream->format;
1896 mInputStreamInfo.usage = zslStream->usage;
1897 LOGD("Input stream configured! %d x %d, format %d, usage %d",
1898 mInputStreamInfo.dim.width,
1899 mInputStreamInfo.dim.height,
1900 mInputStreamInfo.format, mInputStreamInfo.usage);
1901 }
1902
1903 cleanAndSortStreamInfo();
1904 if (mMetadataChannel) {
1905 delete mMetadataChannel;
1906 mMetadataChannel = NULL;
1907 }
1908 if (mSupportChannel) {
1909 delete mSupportChannel;
1910 mSupportChannel = NULL;
1911 }
1912
1913 if (mAnalysisChannel) {
1914 delete mAnalysisChannel;
1915 mAnalysisChannel = NULL;
1916 }
1917
1918 if (mDummyBatchChannel) {
1919 delete mDummyBatchChannel;
1920 mDummyBatchChannel = NULL;
1921 }
1922
1923 //Create metadata channel and initialize it
1924 cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
1925 setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
1926 gCamCapability[mCameraId]->color_arrangement);
1927 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
1928 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001929 setBufferErrorStatus, &padding_info, metadataFeatureMask, this);
Thierry Strudel3d639192016-09-09 11:52:26 -07001930 if (mMetadataChannel == NULL) {
1931 LOGE("failed to allocate metadata channel");
1932 rc = -ENOMEM;
1933 pthread_mutex_unlock(&mMutex);
1934 return rc;
1935 }
1936 rc = mMetadataChannel->initialize(IS_TYPE_NONE);
1937 if (rc < 0) {
1938 LOGE("metadata channel initialization failed");
1939 delete mMetadataChannel;
1940 mMetadataChannel = NULL;
1941 pthread_mutex_unlock(&mMutex);
1942 return rc;
1943 }
1944
Thierry Strudel3d639192016-09-09 11:52:26 -07001945 bool isRawStreamRequested = false;
1946 memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
1947 /* Allocate channel objects for the requested streams */
1948 for (size_t i = 0; i < streamList->num_streams; i++) {
1949 camera3_stream_t *newStream = streamList->streams[i];
1950 uint32_t stream_usage = newStream->usage;
1951 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
1952 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
1953 struct camera_info *p_info = NULL;
1954 pthread_mutex_lock(&gCamLock);
1955 p_info = get_cam_info(mCameraId, &mStreamConfigInfo.sync_type);
1956 pthread_mutex_unlock(&gCamLock);
1957 if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1958 || IS_USAGE_ZSL(newStream->usage)) &&
1959 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
1960 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
1961 if (bUseCommonFeatureMask) {
1962 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1963 commonFeatureMask;
1964 } else {
1965 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1966 CAM_QCOM_FEATURE_NONE;
1967 }
1968
1969 } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
1970 LOGH("Input stream configured, reprocess config");
1971 } else {
1972 //for non zsl streams find out the format
1973 switch (newStream->format) {
1974 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
1975 {
1976 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1977 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1978 /* add additional features to pp feature mask */
1979 addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
1980 mStreamConfigInfo.num_streams);
1981
1982 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
1983 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
1984 CAM_STREAM_TYPE_VIDEO;
1985 if (m_bTnrEnabled && m_bTnrVideo) {
1986 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
1987 CAM_QCOM_FEATURE_CPP_TNR;
1988 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
1989 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
1990 ~CAM_QCOM_FEATURE_CDS;
1991 }
1992 } else {
1993 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
1994 CAM_STREAM_TYPE_PREVIEW;
1995 if (m_bTnrEnabled && m_bTnrPreview) {
1996 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
1997 CAM_QCOM_FEATURE_CPP_TNR;
1998 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
1999 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2000 ~CAM_QCOM_FEATURE_CDS;
2001 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002002 if(!m_bSwTnrPreview) {
2003 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2004 ~CAM_QTI_FEATURE_SW_TNR;
2005 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002006 padding_info.width_padding = mSurfaceStridePadding;
2007 padding_info.height_padding = CAM_PAD_TO_2;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002008 previewSize.width = (int32_t)newStream->width;
2009 previewSize.height = (int32_t)newStream->height;
Thierry Strudel3d639192016-09-09 11:52:26 -07002010 }
2011 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
2012 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
2013 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2014 newStream->height;
2015 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2016 newStream->width;
2017 }
2018 }
2019 break;
2020 case HAL_PIXEL_FORMAT_YCbCr_420_888:
2021 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
2022 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2023 if (bUseCommonFeatureMask)
2024 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2025 commonFeatureMask;
2026 else
2027 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2028 CAM_QCOM_FEATURE_NONE;
2029 } else {
2030 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2031 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2032 }
2033 break;
2034 case HAL_PIXEL_FORMAT_BLOB:
2035 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
2036 // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
2037 if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
2038 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2039 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2040 } else {
2041 if (bUseCommonFeatureMask &&
2042 isOnEncoder(maxViewfinderSize, newStream->width,
2043 newStream->height)) {
2044 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
2045 } else {
2046 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2047 }
2048 }
2049 if (isZsl) {
2050 if (zslStream) {
2051 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2052 (int32_t)zslStream->width;
2053 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2054 (int32_t)zslStream->height;
2055 } else {
2056 LOGE("Error, No ZSL stream identified");
2057 pthread_mutex_unlock(&mMutex);
2058 return -EINVAL;
2059 }
2060 } else if (m_bIs4KVideo) {
2061 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
2062 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
2063 } else if (bYuv888OverrideJpeg) {
2064 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2065 (int32_t)largeYuv888Size.width;
2066 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2067 (int32_t)largeYuv888Size.height;
2068 }
2069 break;
2070 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2071 case HAL_PIXEL_FORMAT_RAW16:
2072 case HAL_PIXEL_FORMAT_RAW10:
2073 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2074 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2075 isRawStreamRequested = true;
2076 break;
2077 default:
2078 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
2079 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2080 break;
2081 }
2082 }
2083
2084 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2085 (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2086 gCamCapability[mCameraId]->color_arrangement);
2087
2088 if (newStream->priv == NULL) {
2089 //New stream, construct channel
2090 switch (newStream->stream_type) {
2091 case CAMERA3_STREAM_INPUT:
2092 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
2093 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
2094 break;
2095 case CAMERA3_STREAM_BIDIRECTIONAL:
2096 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
2097 GRALLOC_USAGE_HW_CAMERA_WRITE;
2098 break;
2099 case CAMERA3_STREAM_OUTPUT:
2100 /* For video encoding stream, set read/write rarely
2101 * flag so that they may be set to un-cached */
2102 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
2103 newStream->usage |=
2104 (GRALLOC_USAGE_SW_READ_RARELY |
2105 GRALLOC_USAGE_SW_WRITE_RARELY |
2106 GRALLOC_USAGE_HW_CAMERA_WRITE);
2107 else if (IS_USAGE_ZSL(newStream->usage))
2108 {
2109 LOGD("ZSL usage flag skipping");
2110 }
2111 else if (newStream == zslStream
2112 || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
2113 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
2114 } else
2115 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
2116 break;
2117 default:
2118 LOGE("Invalid stream_type %d", newStream->stream_type);
2119 break;
2120 }
2121
2122 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
2123 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
2124 QCamera3ProcessingChannel *channel = NULL;
2125 switch (newStream->format) {
2126 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2127 if ((newStream->usage &
2128 private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
2129 (streamList->operation_mode ==
2130 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2131 ) {
2132 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2133 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002134 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002135 this,
2136 newStream,
2137 (cam_stream_type_t)
2138 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2139 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2140 mMetadataChannel,
2141 0); //heap buffers are not required for HFR video channel
2142 if (channel == NULL) {
2143 LOGE("allocation of channel failed");
2144 pthread_mutex_unlock(&mMutex);
2145 return -ENOMEM;
2146 }
2147 //channel->getNumBuffers() will return 0 here so use
2148 //MAX_INFLIGH_HFR_REQUESTS
2149 newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2150 newStream->priv = channel;
2151 LOGI("num video buffers in HFR mode: %d",
2152 MAX_INFLIGHT_HFR_REQUESTS);
2153 } else {
2154 /* Copy stream contents in HFR preview only case to create
2155 * dummy batch channel so that sensor streaming is in
2156 * HFR mode */
2157 if (!m_bIsVideo && (streamList->operation_mode ==
2158 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2159 mDummyBatchStream = *newStream;
2160 }
2161 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2162 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002163 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002164 this,
2165 newStream,
2166 (cam_stream_type_t)
2167 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2168 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2169 mMetadataChannel,
2170 MAX_INFLIGHT_REQUESTS);
2171 if (channel == NULL) {
2172 LOGE("allocation of channel failed");
2173 pthread_mutex_unlock(&mMutex);
2174 return -ENOMEM;
2175 }
2176 newStream->max_buffers = channel->getNumBuffers();
2177 newStream->priv = channel;
2178 }
2179 break;
2180 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
2181 channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2182 mChannelHandle,
2183 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002184 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002185 this,
2186 newStream,
2187 (cam_stream_type_t)
2188 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2189 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2190 mMetadataChannel);
2191 if (channel == NULL) {
2192 LOGE("allocation of YUV channel failed");
2193 pthread_mutex_unlock(&mMutex);
2194 return -ENOMEM;
2195 }
2196 newStream->max_buffers = channel->getNumBuffers();
2197 newStream->priv = channel;
2198 break;
2199 }
2200 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2201 case HAL_PIXEL_FORMAT_RAW16:
2202 case HAL_PIXEL_FORMAT_RAW10:
2203 mRawChannel = new QCamera3RawChannel(
2204 mCameraHandle->camera_handle, mChannelHandle,
2205 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002206 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002207 this, newStream,
2208 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2209 mMetadataChannel,
2210 (newStream->format == HAL_PIXEL_FORMAT_RAW16));
2211 if (mRawChannel == NULL) {
2212 LOGE("allocation of raw channel failed");
2213 pthread_mutex_unlock(&mMutex);
2214 return -ENOMEM;
2215 }
2216 newStream->max_buffers = mRawChannel->getNumBuffers();
2217 newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2218 break;
2219 case HAL_PIXEL_FORMAT_BLOB:
2220 // Max live snapshot inflight buffer is 1. This is to mitigate
2221 // frame drop issues for video snapshot. The more buffers being
2222 // allocated, the more frame drops there are.
2223 mPictureChannel = new QCamera3PicChannel(
2224 mCameraHandle->camera_handle, mChannelHandle,
2225 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002226 setBufferErrorStatus, &padding_info, this, newStream,
Thierry Strudel3d639192016-09-09 11:52:26 -07002227 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2228 m_bIs4KVideo, isZsl, mMetadataChannel,
2229 (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2230 if (mPictureChannel == NULL) {
2231 LOGE("allocation of channel failed");
2232 pthread_mutex_unlock(&mMutex);
2233 return -ENOMEM;
2234 }
2235 newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2236 newStream->max_buffers = mPictureChannel->getNumBuffers();
2237 mPictureChannel->overrideYuvSize(
2238 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2239 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
2240 break;
2241
2242 default:
2243 LOGE("not a supported format 0x%x", newStream->format);
2244 break;
2245 }
2246 } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2247 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2248 } else {
2249 LOGE("Error, Unknown stream type");
2250 pthread_mutex_unlock(&mMutex);
2251 return -EINVAL;
2252 }
2253
2254 QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
2255 if (channel != NULL && channel->isUBWCEnabled()) {
2256 cam_format_t fmt = channel->getStreamDefaultFormat(
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07002257 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2258 newStream->width, newStream->height);
Thierry Strudel3d639192016-09-09 11:52:26 -07002259 if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2260 newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2261 }
2262 }
2263
2264 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2265 it != mStreamInfo.end(); it++) {
2266 if ((*it)->stream == newStream) {
2267 (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2268 break;
2269 }
2270 }
2271 } else {
2272 // Channel already exists for this stream
2273 // Do nothing for now
2274 }
2275 padding_info = gCamCapability[mCameraId]->padding_info;
2276
2277 /* Do not add entries for input stream in metastream info
2278 * since there is no real stream associated with it
2279 */
2280 if (newStream->stream_type != CAMERA3_STREAM_INPUT)
2281 mStreamConfigInfo.num_streams++;
2282 }
2283
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002284 // Create analysis stream all the time, even when h/w support is not available
2285 {
2286 cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2287 setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
2288 gCamCapability[mCameraId]->color_arrangement);
2289 cam_analysis_info_t analysisInfo;
2290 int32_t ret = NO_ERROR;
2291 ret = mCommon.getAnalysisInfo(
2292 FALSE,
2293 analysisFeatureMask,
2294 &analysisInfo);
2295 if (ret == NO_ERROR) {
2296 cam_dimension_t analysisDim;
2297 analysisDim = mCommon.getMatchingDimension(previewSize,
2298 analysisInfo.analysis_recommended_res);
2299
2300 mAnalysisChannel = new QCamera3SupportChannel(
2301 mCameraHandle->camera_handle,
2302 mChannelHandle,
2303 mCameraHandle->ops,
2304 &analysisInfo.analysis_padding_info,
2305 analysisFeatureMask,
2306 CAM_STREAM_TYPE_ANALYSIS,
2307 &analysisDim,
2308 (analysisInfo.analysis_format
2309 == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
2310 : CAM_FORMAT_YUV_420_NV21),
2311 analysisInfo.hw_analysis_supported,
2312 gCamCapability[mCameraId]->color_arrangement,
2313 this,
2314 0); // force buffer count to 0
2315 } else {
2316 LOGW("getAnalysisInfo failed, ret = %d", ret);
2317 }
2318 if (!mAnalysisChannel) {
2319 LOGW("Analysis channel cannot be created");
2320 }
2321 }
2322
Thierry Strudel3d639192016-09-09 11:52:26 -07002323 //RAW DUMP channel
2324 if (mEnableRawDump && isRawStreamRequested == false){
2325 cam_dimension_t rawDumpSize;
2326 rawDumpSize = getMaxRawSize(mCameraId);
2327 cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2328 setPAAFSupport(rawDumpFeatureMask,
2329 CAM_STREAM_TYPE_RAW,
2330 gCamCapability[mCameraId]->color_arrangement);
2331 mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2332 mChannelHandle,
2333 mCameraHandle->ops,
2334 rawDumpSize,
2335 &padding_info,
2336 this, rawDumpFeatureMask);
2337 if (!mRawDumpChannel) {
2338 LOGE("Raw Dump channel cannot be created");
2339 pthread_mutex_unlock(&mMutex);
2340 return -ENOMEM;
2341 }
2342 }
2343
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002344 // Initialize HDR+ Raw Source channel.
2345 if (mHdrPlusClient != nullptr) {
2346 if (isRawStreamRequested || mRawDumpChannel) {
2347 ALOGE("%s: Enabling HDR+ while RAW output stream is configured is not supported.",
2348 __FUNCTION__);
2349 mHdrPlusClient->disconnect();
2350 mHdrPlusClient = nullptr;
2351 } else {
2352 cam_dimension_t rawSize = getMaxRawSize(mCameraId);
2353 cam_feature_mask_t hdrPlusRawFeatureMask = CAM_QCOM_FEATURE_NONE;
2354 setPAAFSupport(hdrPlusRawFeatureMask,
2355 CAM_STREAM_TYPE_RAW,
2356 gCamCapability[mCameraId]->color_arrangement);
2357 mHdrPlusRawSrcChannel = new QCamera3HdrPlusRawSrcChannel(mCameraHandle->camera_handle,
2358 mChannelHandle,
2359 mCameraHandle->ops,
2360 rawSize,
2361 &padding_info,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08002362 this, hdrPlusRawFeatureMask,
2363 mHdrPlusClient,
2364 kPbRaw10InputStreamId);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002365 if (!mHdrPlusRawSrcChannel) {
2366 LOGE("HDR+ Raw Source channel cannot be created");
2367 pthread_mutex_unlock(&mMutex);
2368 return -ENOMEM;
2369 }
2370 }
2371 }
2372
Thierry Strudel3d639192016-09-09 11:52:26 -07002373
2374 if (mAnalysisChannel) {
2375 cam_analysis_info_t analysisInfo;
2376 memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2377 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2378 CAM_STREAM_TYPE_ANALYSIS;
2379 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2380 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2381 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2382 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2383 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002384 rc = mCommon.getAnalysisInfo(FALSE,
Thierry Strudel3d639192016-09-09 11:52:26 -07002385 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2386 &analysisInfo);
2387 if (rc != NO_ERROR) {
2388 LOGE("getAnalysisInfo failed, ret = %d", rc);
2389 pthread_mutex_unlock(&mMutex);
2390 return rc;
2391 }
2392 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002393 mCommon.getMatchingDimension(previewSize,
2394 analysisInfo.analysis_recommended_res);
Thierry Strudel3d639192016-09-09 11:52:26 -07002395 mStreamConfigInfo.num_streams++;
2396 }
2397
2398 if (isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
2399 cam_analysis_info_t supportInfo;
2400 memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
2401 cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2402 setPAAFSupport(callbackFeatureMask,
2403 CAM_STREAM_TYPE_CALLBACK,
2404 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002405 int32_t ret = NO_ERROR;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002406 ret = mCommon.getAnalysisInfo(FALSE, callbackFeatureMask, &supportInfo);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002407 if (ret != NO_ERROR) {
2408 /* Ignore the error for Mono camera
2409 * because the PAAF bit mask is only set
2410 * for CAM_STREAM_TYPE_ANALYSIS stream type
2411 */
2412 if (gCamCapability[mCameraId]->color_arrangement != CAM_FILTER_ARRANGEMENT_Y) {
2413 LOGW("getAnalysisInfo failed, ret = %d", ret);
2414 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002415 }
2416 mSupportChannel = new QCamera3SupportChannel(
2417 mCameraHandle->camera_handle,
2418 mChannelHandle,
2419 mCameraHandle->ops,
2420 &gCamCapability[mCameraId]->padding_info,
2421 callbackFeatureMask,
2422 CAM_STREAM_TYPE_CALLBACK,
2423 &QCamera3SupportChannel::kDim,
2424 CAM_FORMAT_YUV_420_NV21,
2425 supportInfo.hw_analysis_supported,
2426 gCamCapability[mCameraId]->color_arrangement,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002427 this, 0);
Thierry Strudel3d639192016-09-09 11:52:26 -07002428 if (!mSupportChannel) {
2429 LOGE("dummy channel cannot be created");
2430 pthread_mutex_unlock(&mMutex);
2431 return -ENOMEM;
2432 }
2433 }
2434
2435 if (mSupportChannel) {
2436 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2437 QCamera3SupportChannel::kDim;
2438 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2439 CAM_STREAM_TYPE_CALLBACK;
2440 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2441 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2442 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2443 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2444 gCamCapability[mCameraId]->color_arrangement);
2445 mStreamConfigInfo.num_streams++;
2446 }
2447
2448 if (mRawDumpChannel) {
2449 cam_dimension_t rawSize;
2450 rawSize = getMaxRawSize(mCameraId);
2451 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2452 rawSize;
2453 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2454 CAM_STREAM_TYPE_RAW;
2455 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2456 CAM_QCOM_FEATURE_NONE;
2457 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2458 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2459 gCamCapability[mCameraId]->color_arrangement);
2460 mStreamConfigInfo.num_streams++;
2461 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002462
2463 if (mHdrPlusRawSrcChannel) {
2464 cam_dimension_t rawSize;
2465 rawSize = getMaxRawSize(mCameraId);
2466 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] = rawSize;
2467 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2468 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2469 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2470 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2471 gCamCapability[mCameraId]->color_arrangement);
2472 mStreamConfigInfo.num_streams++;
2473 }
2474
Thierry Strudel3d639192016-09-09 11:52:26 -07002475 /* In HFR mode, if video stream is not added, create a dummy channel so that
2476 * ISP can create a batch mode even for preview only case. This channel is
2477 * never 'start'ed (no stream-on), it is only 'initialized' */
2478 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2479 !m_bIsVideo) {
2480 cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2481 setPAAFSupport(dummyFeatureMask,
2482 CAM_STREAM_TYPE_VIDEO,
2483 gCamCapability[mCameraId]->color_arrangement);
2484 mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2485 mChannelHandle,
2486 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002487 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002488 this,
2489 &mDummyBatchStream,
2490 CAM_STREAM_TYPE_VIDEO,
2491 dummyFeatureMask,
2492 mMetadataChannel);
2493 if (NULL == mDummyBatchChannel) {
2494 LOGE("creation of mDummyBatchChannel failed."
2495 "Preview will use non-hfr sensor mode ");
2496 }
2497 }
2498 if (mDummyBatchChannel) {
2499 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2500 mDummyBatchStream.width;
2501 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2502 mDummyBatchStream.height;
2503 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2504 CAM_STREAM_TYPE_VIDEO;
2505 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2506 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2507 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2508 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2509 gCamCapability[mCameraId]->color_arrangement);
2510 mStreamConfigInfo.num_streams++;
2511 }
2512
2513 mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
2514 mStreamConfigInfo.buffer_info.max_buffers =
2515 m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
2516
2517 /* Initialize mPendingRequestInfo and mPendingBuffersMap */
2518 for (pendingRequestIterator i = mPendingRequestsList.begin();
2519 i != mPendingRequestsList.end();) {
2520 i = erasePendingRequest(i);
2521 }
2522 mPendingFrameDropList.clear();
2523 // Initialize/Reset the pending buffers list
2524 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
2525 req.mPendingBufferList.clear();
2526 }
2527 mPendingBuffersMap.mPendingBuffersInRequest.clear();
2528
Thierry Strudel3d639192016-09-09 11:52:26 -07002529 mCurJpegMeta.clear();
2530 //Get min frame duration for this streams configuration
2531 deriveMinFrameDuration();
2532
2533 // Update state
2534 mState = CONFIGURED;
2535
2536 pthread_mutex_unlock(&mMutex);
2537
2538 return rc;
2539}
2540
2541/*===========================================================================
2542 * FUNCTION : validateCaptureRequest
2543 *
2544 * DESCRIPTION: validate a capture request from camera service
2545 *
2546 * PARAMETERS :
2547 * @request : request from framework to process
2548 *
2549 * RETURN :
2550 *
2551 *==========================================================================*/
2552int QCamera3HardwareInterface::validateCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002553 camera3_capture_request_t *request,
2554 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07002555{
2556 ssize_t idx = 0;
2557 const camera3_stream_buffer_t *b;
2558 CameraMetadata meta;
2559
2560 /* Sanity check the request */
2561 if (request == NULL) {
2562 LOGE("NULL capture request");
2563 return BAD_VALUE;
2564 }
2565
2566 if ((request->settings == NULL) && (mState == CONFIGURED)) {
2567 /*settings cannot be null for the first request*/
2568 return BAD_VALUE;
2569 }
2570
2571 uint32_t frameNumber = request->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002572 if ((request->num_output_buffers < 1 || request->output_buffers == NULL)
2573 && (internallyRequestedStreams.size() == 0)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002574 LOGE("Request %d: No output buffers provided!",
2575 __FUNCTION__, frameNumber);
2576 return BAD_VALUE;
2577 }
2578 if (request->num_output_buffers >= MAX_NUM_STREAMS) {
2579 LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
2580 request->num_output_buffers, MAX_NUM_STREAMS);
2581 return BAD_VALUE;
2582 }
2583 if (request->input_buffer != NULL) {
2584 b = request->input_buffer;
2585 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2586 LOGE("Request %d: Buffer %ld: Status not OK!",
2587 frameNumber, (long)idx);
2588 return BAD_VALUE;
2589 }
2590 if (b->release_fence != -1) {
2591 LOGE("Request %d: Buffer %ld: Has a release fence!",
2592 frameNumber, (long)idx);
2593 return BAD_VALUE;
2594 }
2595 if (b->buffer == NULL) {
2596 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
2597 frameNumber, (long)idx);
2598 return BAD_VALUE;
2599 }
2600 }
2601
2602 // Validate all buffers
2603 b = request->output_buffers;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002604 while (idx < (ssize_t)request->num_output_buffers) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002605 QCamera3ProcessingChannel *channel =
2606 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
2607 if (channel == NULL) {
2608 LOGE("Request %d: Buffer %ld: Unconfigured stream!",
2609 frameNumber, (long)idx);
2610 return BAD_VALUE;
2611 }
2612 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2613 LOGE("Request %d: Buffer %ld: Status not OK!",
2614 frameNumber, (long)idx);
2615 return BAD_VALUE;
2616 }
2617 if (b->release_fence != -1) {
2618 LOGE("Request %d: Buffer %ld: Has a release fence!",
2619 frameNumber, (long)idx);
2620 return BAD_VALUE;
2621 }
2622 if (b->buffer == NULL) {
2623 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
2624 frameNumber, (long)idx);
2625 return BAD_VALUE;
2626 }
2627 if (*(b->buffer) == NULL) {
2628 LOGE("Request %d: Buffer %ld: NULL private handle!",
2629 frameNumber, (long)idx);
2630 return BAD_VALUE;
2631 }
2632 idx++;
2633 b = request->output_buffers + idx;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002634 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002635 return NO_ERROR;
2636}
2637
2638/*===========================================================================
2639 * FUNCTION : deriveMinFrameDuration
2640 *
2641 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
2642 * on currently configured streams.
2643 *
2644 * PARAMETERS : NONE
2645 *
2646 * RETURN : NONE
2647 *
2648 *==========================================================================*/
2649void QCamera3HardwareInterface::deriveMinFrameDuration()
2650{
2651 int32_t maxJpegDim, maxProcessedDim, maxRawDim;
2652
2653 maxJpegDim = 0;
2654 maxProcessedDim = 0;
2655 maxRawDim = 0;
2656
2657 // Figure out maximum jpeg, processed, and raw dimensions
2658 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
2659 it != mStreamInfo.end(); it++) {
2660
2661 // Input stream doesn't have valid stream_type
2662 if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
2663 continue;
2664
2665 int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
2666 if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
2667 if (dimension > maxJpegDim)
2668 maxJpegDim = dimension;
2669 } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
2670 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
2671 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
2672 if (dimension > maxRawDim)
2673 maxRawDim = dimension;
2674 } else {
2675 if (dimension > maxProcessedDim)
2676 maxProcessedDim = dimension;
2677 }
2678 }
2679
2680 size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
2681 MAX_SIZES_CNT);
2682
2683 //Assume all jpeg dimensions are in processed dimensions.
2684 if (maxJpegDim > maxProcessedDim)
2685 maxProcessedDim = maxJpegDim;
2686 //Find the smallest raw dimension that is greater or equal to jpeg dimension
2687 if (maxProcessedDim > maxRawDim) {
2688 maxRawDim = INT32_MAX;
2689
2690 for (size_t i = 0; i < count; i++) {
2691 int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
2692 gCamCapability[mCameraId]->raw_dim[i].height;
2693 if (dimension >= maxProcessedDim && dimension < maxRawDim)
2694 maxRawDim = dimension;
2695 }
2696 }
2697
2698 //Find minimum durations for processed, jpeg, and raw
2699 for (size_t i = 0; i < count; i++) {
2700 if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
2701 gCamCapability[mCameraId]->raw_dim[i].height) {
2702 mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
2703 break;
2704 }
2705 }
2706 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
2707 for (size_t i = 0; i < count; i++) {
2708 if (maxProcessedDim ==
2709 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
2710 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
2711 mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
2712 mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
2713 break;
2714 }
2715 }
2716}
2717
2718/*===========================================================================
2719 * FUNCTION : getMinFrameDuration
2720 *
2721 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
2722 * and current request configuration.
2723 *
2724 * PARAMETERS : @request: requset sent by the frameworks
2725 *
2726 * RETURN : min farme duration for a particular request
2727 *
2728 *==========================================================================*/
2729int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
2730{
2731 bool hasJpegStream = false;
2732 bool hasRawStream = false;
2733 for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
2734 const camera3_stream_t *stream = request->output_buffers[i].stream;
2735 if (stream->format == HAL_PIXEL_FORMAT_BLOB)
2736 hasJpegStream = true;
2737 else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
2738 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
2739 stream->format == HAL_PIXEL_FORMAT_RAW16)
2740 hasRawStream = true;
2741 }
2742
2743 if (!hasJpegStream)
2744 return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
2745 else
2746 return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
2747}
2748
2749/*===========================================================================
2750 * FUNCTION : handleBuffersDuringFlushLock
2751 *
2752 * DESCRIPTION: Account for buffers returned from back-end during flush
2753 * This function is executed while mMutex is held by the caller.
2754 *
2755 * PARAMETERS :
2756 * @buffer: image buffer for the callback
2757 *
2758 * RETURN :
2759 *==========================================================================*/
2760void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
2761{
2762 bool buffer_found = false;
2763 for (List<PendingBuffersInRequest>::iterator req =
2764 mPendingBuffersMap.mPendingBuffersInRequest.begin();
2765 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
2766 for (List<PendingBufferInfo>::iterator i =
2767 req->mPendingBufferList.begin();
2768 i != req->mPendingBufferList.end(); i++) {
2769 if (i->buffer == buffer->buffer) {
2770 mPendingBuffersMap.numPendingBufsAtFlush--;
2771 LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
2772 buffer->buffer, req->frame_number,
2773 mPendingBuffersMap.numPendingBufsAtFlush);
2774 buffer_found = true;
2775 break;
2776 }
2777 }
2778 if (buffer_found) {
2779 break;
2780 }
2781 }
2782 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
2783 //signal the flush()
2784 LOGD("All buffers returned to HAL. Continue flush");
2785 pthread_cond_signal(&mBuffersCond);
2786 }
2787}
2788
Thierry Strudel3d639192016-09-09 11:52:26 -07002789/*===========================================================================
2790 * FUNCTION : handleBatchMetadata
2791 *
2792 * DESCRIPTION: Handles metadata buffer callback in batch mode
2793 *
2794 * PARAMETERS : @metadata_buf: metadata buffer
2795 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
2796 * the meta buf in this method
2797 *
2798 * RETURN :
2799 *
2800 *==========================================================================*/
2801void QCamera3HardwareInterface::handleBatchMetadata(
2802 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
2803{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002804 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BATCH_METADATA);
Thierry Strudel3d639192016-09-09 11:52:26 -07002805
2806 if (NULL == metadata_buf) {
2807 LOGE("metadata_buf is NULL");
2808 return;
2809 }
2810 /* In batch mode, the metdata will contain the frame number and timestamp of
2811 * the last frame in the batch. Eg: a batch containing buffers from request
2812 * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
2813 * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
2814 * multiple process_capture_results */
2815 metadata_buffer_t *metadata =
2816 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
2817 int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
2818 uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
2819 uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
2820 uint32_t frame_number = 0, urgent_frame_number = 0;
2821 int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
2822 bool invalid_metadata = false;
2823 size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
2824 size_t loopCount = 1;
2825
2826 int32_t *p_frame_number_valid =
2827 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
2828 uint32_t *p_frame_number =
2829 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
2830 int64_t *p_capture_time =
2831 POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
2832 int32_t *p_urgent_frame_number_valid =
2833 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
2834 uint32_t *p_urgent_frame_number =
2835 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
2836
2837 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
2838 (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
2839 (NULL == p_urgent_frame_number)) {
2840 LOGE("Invalid metadata");
2841 invalid_metadata = true;
2842 } else {
2843 frame_number_valid = *p_frame_number_valid;
2844 last_frame_number = *p_frame_number;
2845 last_frame_capture_time = *p_capture_time;
2846 urgent_frame_number_valid = *p_urgent_frame_number_valid;
2847 last_urgent_frame_number = *p_urgent_frame_number;
2848 }
2849
2850 /* In batchmode, when no video buffers are requested, set_parms are sent
2851 * for every capture_request. The difference between consecutive urgent
2852 * frame numbers and frame numbers should be used to interpolate the
2853 * corresponding frame numbers and time stamps */
2854 pthread_mutex_lock(&mMutex);
2855 if (urgent_frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07002856 ssize_t idx = mPendingBatchMap.indexOfKey(last_urgent_frame_number);
2857 if(idx < 0) {
2858 LOGE("Invalid urgent frame number received: %d. Irrecoverable error",
2859 last_urgent_frame_number);
2860 mState = ERROR;
2861 pthread_mutex_unlock(&mMutex);
2862 return;
2863 }
2864 first_urgent_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07002865 urgentFrameNumDiff = last_urgent_frame_number + 1 -
2866 first_urgent_frame_number;
2867
2868 LOGD("urgent_frm: valid: %d frm_num: %d - %d",
2869 urgent_frame_number_valid,
2870 first_urgent_frame_number, last_urgent_frame_number);
2871 }
2872
2873 if (frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07002874 ssize_t idx = mPendingBatchMap.indexOfKey(last_frame_number);
2875 if(idx < 0) {
2876 LOGE("Invalid frame number received: %d. Irrecoverable error",
2877 last_frame_number);
2878 mState = ERROR;
2879 pthread_mutex_unlock(&mMutex);
2880 return;
2881 }
2882 first_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07002883 frameNumDiff = last_frame_number + 1 -
2884 first_frame_number;
2885 mPendingBatchMap.removeItem(last_frame_number);
2886
2887 LOGD("frm: valid: %d frm_num: %d - %d",
2888 frame_number_valid,
2889 first_frame_number, last_frame_number);
2890
2891 }
2892 pthread_mutex_unlock(&mMutex);
2893
2894 if (urgent_frame_number_valid || frame_number_valid) {
2895 loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
2896 if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
2897 LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
2898 urgentFrameNumDiff, last_urgent_frame_number);
2899 if (frameNumDiff > MAX_HFR_BATCH_SIZE)
2900 LOGE("frameNumDiff: %d frameNum: %d",
2901 frameNumDiff, last_frame_number);
2902 }
2903
2904 for (size_t i = 0; i < loopCount; i++) {
2905 /* handleMetadataWithLock is called even for invalid_metadata for
2906 * pipeline depth calculation */
2907 if (!invalid_metadata) {
2908 /* Infer frame number. Batch metadata contains frame number of the
2909 * last frame */
2910 if (urgent_frame_number_valid) {
2911 if (i < urgentFrameNumDiff) {
2912 urgent_frame_number =
2913 first_urgent_frame_number + i;
2914 LOGD("inferred urgent frame_number: %d",
2915 urgent_frame_number);
2916 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2917 CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
2918 } else {
2919 /* This is to handle when urgentFrameNumDiff < frameNumDiff */
2920 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2921 CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
2922 }
2923 }
2924
2925 /* Infer frame number. Batch metadata contains frame number of the
2926 * last frame */
2927 if (frame_number_valid) {
2928 if (i < frameNumDiff) {
2929 frame_number = first_frame_number + i;
2930 LOGD("inferred frame_number: %d", frame_number);
2931 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2932 CAM_INTF_META_FRAME_NUMBER, frame_number);
2933 } else {
2934 /* This is to handle when urgentFrameNumDiff > frameNumDiff */
2935 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2936 CAM_INTF_META_FRAME_NUMBER_VALID, 0);
2937 }
2938 }
2939
2940 if (last_frame_capture_time) {
2941 //Infer timestamp
2942 first_frame_capture_time = last_frame_capture_time -
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002943 (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07002944 capture_time =
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002945 first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07002946 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2947 CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
2948 LOGD("batch capture_time: %lld, capture_time: %lld",
2949 last_frame_capture_time, capture_time);
2950 }
2951 }
2952 pthread_mutex_lock(&mMutex);
2953 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002954 false /* free_and_bufdone_meta_buf */,
2955 (i == 0) /* first metadata in the batch metadata */);
Thierry Strudel3d639192016-09-09 11:52:26 -07002956 pthread_mutex_unlock(&mMutex);
2957 }
2958
2959 /* BufDone metadata buffer */
2960 if (free_and_bufdone_meta_buf) {
2961 mMetadataChannel->bufDone(metadata_buf);
2962 free(metadata_buf);
2963 }
2964}
2965
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002966void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
2967 camera3_error_msg_code_t errorCode)
2968{
2969 camera3_notify_msg_t notify_msg;
2970 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
2971 notify_msg.type = CAMERA3_MSG_ERROR;
2972 notify_msg.message.error.error_code = errorCode;
2973 notify_msg.message.error.error_stream = NULL;
2974 notify_msg.message.error.frame_number = frameNumber;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002975 orchestrateNotify(&notify_msg);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002976
2977 return;
2978}
Thierry Strudel3d639192016-09-09 11:52:26 -07002979/*===========================================================================
2980 * FUNCTION : handleMetadataWithLock
2981 *
2982 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
2983 *
2984 * PARAMETERS : @metadata_buf: metadata buffer
2985 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
2986 * the meta buf in this method
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002987 * @firstMetadataInBatch: Boolean to indicate whether this is the
2988 * first metadata in a batch. Valid only for batch mode
Thierry Strudel3d639192016-09-09 11:52:26 -07002989 *
2990 * RETURN :
2991 *
2992 *==========================================================================*/
2993void QCamera3HardwareInterface::handleMetadataWithLock(
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002994 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
2995 bool firstMetadataInBatch)
Thierry Strudel3d639192016-09-09 11:52:26 -07002996{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002997 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_METADATA_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07002998 if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
2999 //during flush do not send metadata from this thread
3000 LOGD("not sending metadata during flush or when mState is error");
3001 if (free_and_bufdone_meta_buf) {
3002 mMetadataChannel->bufDone(metadata_buf);
3003 free(metadata_buf);
3004 }
3005 return;
3006 }
3007
3008 //not in flush
3009 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3010 int32_t frame_number_valid, urgent_frame_number_valid;
3011 uint32_t frame_number, urgent_frame_number;
3012 int64_t capture_time;
3013 nsecs_t currentSysTime;
3014
3015 int32_t *p_frame_number_valid =
3016 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3017 uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3018 int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3019 int32_t *p_urgent_frame_number_valid =
3020 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3021 uint32_t *p_urgent_frame_number =
3022 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3023 IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
3024 metadata) {
3025 LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
3026 *p_frame_number_valid, *p_frame_number);
3027 }
3028
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003029 camera_metadata_t *resultMetadata = nullptr;
3030
Thierry Strudel3d639192016-09-09 11:52:26 -07003031 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
3032 (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
3033 LOGE("Invalid metadata");
3034 if (free_and_bufdone_meta_buf) {
3035 mMetadataChannel->bufDone(metadata_buf);
3036 free(metadata_buf);
3037 }
3038 goto done_metadata;
3039 }
3040 frame_number_valid = *p_frame_number_valid;
3041 frame_number = *p_frame_number;
3042 capture_time = *p_capture_time;
3043 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3044 urgent_frame_number = *p_urgent_frame_number;
3045 currentSysTime = systemTime(CLOCK_MONOTONIC);
3046
3047 // Detect if buffers from any requests are overdue
3048 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003049 int64_t timeout;
3050 {
3051 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
3052 // If there is a pending HDR+ request, the following requests may be blocked until the
3053 // HDR+ request is done. So allow a longer timeout.
3054 timeout = (mHdrPlusPendingRequests.size() > 0) ?
3055 MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT : MISSING_REQUEST_BUF_TIMEOUT;
3056 }
3057
3058 if ( (currentSysTime - req.timestamp) > s2ns(timeout) ) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003059 for (auto &missed : req.mPendingBufferList) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003060 assert(missed.stream->priv);
3061 if (missed.stream->priv) {
3062 QCamera3Channel *ch = (QCamera3Channel *)(missed.stream->priv);
3063 assert(ch->mStreams[0]);
3064 if (ch->mStreams[0]) {
3065 LOGE("Cancel missing frame = %d, buffer = %p,"
3066 "stream type = %d, stream format = %d",
3067 req.frame_number, missed.buffer,
3068 ch->mStreams[0]->getMyType(), missed.stream->format);
3069 ch->timeoutFrame(req.frame_number);
3070 }
3071 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003072 }
3073 }
3074 }
3075 //Partial result on process_capture_result for timestamp
3076 if (urgent_frame_number_valid) {
3077 LOGD("valid urgent frame_number = %u, capture_time = %lld",
3078 urgent_frame_number, capture_time);
3079
3080 //Recieved an urgent Frame Number, handle it
3081 //using partial results
3082 for (pendingRequestIterator i =
3083 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3084 LOGD("Iterator Frame = %d urgent frame = %d",
3085 i->frame_number, urgent_frame_number);
3086
3087 if ((!i->input_buffer) && (i->frame_number < urgent_frame_number) &&
3088 (i->partial_result_cnt == 0)) {
3089 LOGE("Error: HAL missed urgent metadata for frame number %d",
3090 i->frame_number);
3091 }
3092
3093 if (i->frame_number == urgent_frame_number &&
3094 i->bUrgentReceived == 0) {
3095
3096 camera3_capture_result_t result;
3097 memset(&result, 0, sizeof(camera3_capture_result_t));
3098
3099 i->partial_result_cnt++;
3100 i->bUrgentReceived = 1;
3101 // Extract 3A metadata
3102 result.result =
3103 translateCbUrgentMetadataToResultMetadata(metadata);
3104 // Populate metadata result
3105 result.frame_number = urgent_frame_number;
3106 result.num_output_buffers = 0;
3107 result.output_buffers = NULL;
3108 result.partial_result = i->partial_result_cnt;
3109
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003110 if (mHdrPlusClient != nullptr) {
3111 // Notify HDR+ client about the partial metadata.
3112 mHdrPlusClient->notifyFrameMetadata(result.frame_number, *result.result,
3113 result.partial_result == PARTIAL_RESULT_COUNT);
3114 }
3115
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003116 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003117 LOGD("urgent frame_number = %u, capture_time = %lld",
3118 result.frame_number, capture_time);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003119 if (mResetInstantAEC && mInstantAECSettledFrameNumber == 0) {
3120 // Instant AEC settled for this frame.
3121 LOGH("instant AEC settled for frame number %d", urgent_frame_number);
3122 mInstantAECSettledFrameNumber = urgent_frame_number;
3123 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003124 free_camera_metadata((camera_metadata_t *)result.result);
3125 break;
3126 }
3127 }
3128 }
3129
3130 if (!frame_number_valid) {
3131 LOGD("Not a valid normal frame number, used as SOF only");
3132 if (free_and_bufdone_meta_buf) {
3133 mMetadataChannel->bufDone(metadata_buf);
3134 free(metadata_buf);
3135 }
3136 goto done_metadata;
3137 }
3138 LOGH("valid frame_number = %u, capture_time = %lld",
3139 frame_number, capture_time);
3140
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003141 // Check whether any stream buffer corresponding to this is dropped or not
3142 // If dropped, then send the ERROR_BUFFER for the corresponding stream
3143 // OR check if instant AEC is enabled, then need to drop frames untill AEC is settled.
3144 for (auto & pendingRequest : mPendingRequestsList) {
3145 if (p_cam_frame_drop || (mInstantAEC || pendingRequest.frame_number <
3146 mInstantAECSettledFrameNumber)) {
3147 camera3_notify_msg_t notify_msg = {};
3148 for (auto & buffer : pendingRequest.buffers) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003149 bool dropFrame = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003150 QCamera3ProcessingChannel *channel =
3151 (QCamera3ProcessingChannel *)buffer.stream->priv;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003152 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003153 if (p_cam_frame_drop) {
3154 for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003155 if (streamID == p_cam_frame_drop->stream_request[k].streamID) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003156 // Got the stream ID for drop frame.
3157 dropFrame = true;
3158 break;
3159 }
3160 }
3161 } else {
3162 // This is instant AEC case.
3163 // For instant AEC drop the stream untill AEC is settled.
3164 dropFrame = true;
3165 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003166
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003167 if (dropFrame) {
3168 // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
3169 if (p_cam_frame_drop) {
3170 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003171 LOGE("Start of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003172 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003173 } else {
3174 // For instant AEC, inform frame drop and frame number
3175 LOGH("Start of reporting error frame#=%u for instant AEC, streamID=%u, "
3176 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003177 pendingRequest.frame_number, streamID,
3178 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003179 }
3180 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003181 notify_msg.message.error.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003182 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003183 notify_msg.message.error.error_stream = buffer.stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003184 orchestrateNotify(&notify_msg);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003185 if (p_cam_frame_drop) {
3186 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003187 LOGE("End of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003188 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003189 } else {
3190 // For instant AEC, inform frame drop and frame number
3191 LOGH("End of reporting error frame#=%u for instant AEC, streamID=%u, "
3192 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003193 pendingRequest.frame_number, streamID,
3194 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003195 }
3196 PendingFrameDropInfo PendingFrameDrop;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003197 PendingFrameDrop.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003198 PendingFrameDrop.stream_ID = streamID;
3199 // Add the Frame drop info to mPendingFrameDropList
3200 mPendingFrameDropList.push_back(PendingFrameDrop);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003201 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003202 }
3203 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003204 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003205
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003206 for (auto & pendingRequest : mPendingRequestsList) {
3207 // Find the pending request with the frame number.
3208 if (pendingRequest.frame_number == frame_number) {
3209 // Update the sensor timestamp.
3210 pendingRequest.timestamp = capture_time;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003211
Thierry Strudel3d639192016-09-09 11:52:26 -07003212
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003213 /* Set the timestamp in display metadata so that clients aware of
3214 private_handle such as VT can use this un-modified timestamps.
3215 Camera framework is unaware of this timestamp and cannot change this */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003216 updateTimeStampInPendingBuffers(pendingRequest.frame_number, pendingRequest.timestamp);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003217
Thierry Strudel3d639192016-09-09 11:52:26 -07003218 // Find channel requiring metadata, meaning internal offline postprocess
3219 // is needed.
3220 //TODO: for now, we don't support two streams requiring metadata at the same time.
3221 // (because we are not making copies, and metadata buffer is not reference counted.
3222 bool internalPproc = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003223 for (pendingBufferIterator iter = pendingRequest.buffers.begin();
3224 iter != pendingRequest.buffers.end(); iter++) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003225 if (iter->need_metadata) {
3226 internalPproc = true;
3227 QCamera3ProcessingChannel *channel =
3228 (QCamera3ProcessingChannel *)iter->stream->priv;
3229 channel->queueReprocMetadata(metadata_buf);
3230 break;
3231 }
3232 }
3233
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003234 for (auto itr = pendingRequest.internalRequestList.begin();
3235 itr != pendingRequest.internalRequestList.end(); itr++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003236 if (itr->need_metadata) {
3237 internalPproc = true;
3238 QCamera3ProcessingChannel *channel =
3239 (QCamera3ProcessingChannel *)itr->stream->priv;
3240 channel->queueReprocMetadata(metadata_buf);
3241 break;
3242 }
3243 }
3244
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003245 resultMetadata = translateFromHalMetadata(metadata,
3246 pendingRequest.timestamp, pendingRequest.request_id,
3247 pendingRequest.jpegMetadata, pendingRequest.pipeline_depth,
3248 pendingRequest.capture_intent,
Samuel Ha68ba5172016-12-15 18:41:12 -08003249 /* DevCamDebug metadata translateFromHalMetadata function call*/
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003250 pendingRequest.DevCamDebug_meta_enable,
Samuel Ha68ba5172016-12-15 18:41:12 -08003251 /* DevCamDebug metadata end */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003252 internalPproc, pendingRequest.fwkCacMode,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003253 firstMetadataInBatch);
Thierry Strudel3d639192016-09-09 11:52:26 -07003254
3255 saveExifParams(metadata);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003256 updateFpsInPreviewBuffer(metadata, pendingRequest.frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003257
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003258 if (pendingRequest.blob_request) {
3259 //Dump tuning metadata if enabled and available
3260 char prop[PROPERTY_VALUE_MAX];
3261 memset(prop, 0, sizeof(prop));
3262 property_get("persist.camera.dumpmetadata", prop, "0");
3263 int32_t enabled = atoi(prop);
3264 if (enabled && metadata->is_tuning_params_valid) {
3265 dumpMetadataToFile(metadata->tuning_params,
3266 mMetaFrameCount,
3267 enabled,
3268 "Snapshot",
3269 frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003270 }
3271 }
3272
3273 if (!internalPproc) {
3274 LOGD("couldn't find need_metadata for this metadata");
3275 // Return metadata buffer
3276 if (free_and_bufdone_meta_buf) {
3277 mMetadataChannel->bufDone(metadata_buf);
3278 free(metadata_buf);
3279 }
3280 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003281
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003282 break;
Thierry Strudel3d639192016-09-09 11:52:26 -07003283 }
3284 }
3285
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003286 // Try to send out shutter callbacks and capture results.
3287 handlePendingResultsWithLock(frame_number, resultMetadata);
3288 return;
3289
Thierry Strudel3d639192016-09-09 11:52:26 -07003290done_metadata:
3291 for (pendingRequestIterator i = mPendingRequestsList.begin();
3292 i != mPendingRequestsList.end() ;i++) {
3293 i->pipeline_depth++;
3294 }
3295 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3296 unblockRequestIfNecessary();
3297}
3298
3299/*===========================================================================
3300 * FUNCTION : hdrPlusPerfLock
3301 *
3302 * DESCRIPTION: perf lock for HDR+ using custom intent
3303 *
3304 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
3305 *
3306 * RETURN : None
3307 *
3308 *==========================================================================*/
3309void QCamera3HardwareInterface::hdrPlusPerfLock(
3310 mm_camera_super_buf_t *metadata_buf)
3311{
3312 if (NULL == metadata_buf) {
3313 LOGE("metadata_buf is NULL");
3314 return;
3315 }
3316 metadata_buffer_t *metadata =
3317 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3318 int32_t *p_frame_number_valid =
3319 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3320 uint32_t *p_frame_number =
3321 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3322
3323 if (p_frame_number_valid == NULL || p_frame_number == NULL) {
3324 LOGE("%s: Invalid metadata", __func__);
3325 return;
3326 }
3327
3328 //acquire perf lock for 5 sec after the last HDR frame is captured
3329 if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
3330 if ((p_frame_number != NULL) &&
3331 (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003332 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT, HDR_PLUS_PERF_TIME_OUT);
Thierry Strudel3d639192016-09-09 11:52:26 -07003333 }
3334 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003335}
3336
3337/*===========================================================================
3338 * FUNCTION : handleInputBufferWithLock
3339 *
3340 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
3341 *
3342 * PARAMETERS : @frame_number: frame number of the input buffer
3343 *
3344 * RETURN :
3345 *
3346 *==========================================================================*/
3347void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
3348{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003349 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_IN_BUF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003350 pendingRequestIterator i = mPendingRequestsList.begin();
3351 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3352 i++;
3353 }
3354 if (i != mPendingRequestsList.end() && i->input_buffer) {
3355 //found the right request
3356 if (!i->shutter_notified) {
3357 CameraMetadata settings;
3358 camera3_notify_msg_t notify_msg;
3359 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3360 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
3361 if(i->settings) {
3362 settings = i->settings;
3363 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
3364 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
3365 } else {
3366 LOGE("No timestamp in input settings! Using current one.");
3367 }
3368 } else {
3369 LOGE("Input settings missing!");
3370 }
3371
3372 notify_msg.type = CAMERA3_MSG_SHUTTER;
3373 notify_msg.message.shutter.frame_number = frame_number;
3374 notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003375 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07003376 i->shutter_notified = true;
3377 LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
3378 i->frame_number, notify_msg.message.shutter.timestamp);
3379 }
3380
3381 if (i->input_buffer->release_fence != -1) {
3382 int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3383 close(i->input_buffer->release_fence);
3384 if (rc != OK) {
3385 LOGE("input buffer sync wait failed %d", rc);
3386 }
3387 }
3388
3389 camera3_capture_result result;
3390 memset(&result, 0, sizeof(camera3_capture_result));
3391 result.frame_number = frame_number;
3392 result.result = i->settings;
3393 result.input_buffer = i->input_buffer;
3394 result.partial_result = PARTIAL_RESULT_COUNT;
3395
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003396 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003397 LOGD("Input request metadata and input buffer frame_number = %u",
3398 i->frame_number);
3399 i = erasePendingRequest(i);
3400 } else {
3401 LOGE("Could not find input request for frame number %d", frame_number);
3402 }
3403}
3404
3405/*===========================================================================
3406 * FUNCTION : handleBufferWithLock
3407 *
3408 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
3409 *
3410 * PARAMETERS : @buffer: image buffer for the callback
3411 * @frame_number: frame number of the image buffer
3412 *
3413 * RETURN :
3414 *
3415 *==========================================================================*/
3416void QCamera3HardwareInterface::handleBufferWithLock(
3417 camera3_stream_buffer_t *buffer, uint32_t frame_number)
3418{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003419 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BUF_LKD);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003420
3421 if (buffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3422 mPerfLockMgr.releasePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
3423 }
3424
Thierry Strudel3d639192016-09-09 11:52:26 -07003425 /* Nothing to be done during error state */
3426 if ((ERROR == mState) || (DEINIT == mState)) {
3427 return;
3428 }
3429 if (mFlushPerf) {
3430 handleBuffersDuringFlushLock(buffer);
3431 return;
3432 }
3433 //not in flush
3434 // If the frame number doesn't exist in the pending request list,
3435 // directly send the buffer to the frameworks, and update pending buffers map
3436 // Otherwise, book-keep the buffer.
3437 pendingRequestIterator i = mPendingRequestsList.begin();
3438 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3439 i++;
3440 }
3441 if (i == mPendingRequestsList.end()) {
3442 // Verify all pending requests frame_numbers are greater
3443 for (pendingRequestIterator j = mPendingRequestsList.begin();
3444 j != mPendingRequestsList.end(); j++) {
3445 if ((j->frame_number < frame_number) && !(j->input_buffer)) {
3446 LOGW("Error: pending live frame number %d is smaller than %d",
3447 j->frame_number, frame_number);
3448 }
3449 }
3450 camera3_capture_result_t result;
3451 memset(&result, 0, sizeof(camera3_capture_result_t));
3452 result.result = NULL;
3453 result.frame_number = frame_number;
3454 result.num_output_buffers = 1;
3455 result.partial_result = 0;
3456 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
3457 m != mPendingFrameDropList.end(); m++) {
3458 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
3459 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
3460 if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
3461 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
3462 LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
3463 frame_number, streamID);
3464 m = mPendingFrameDropList.erase(m);
3465 break;
3466 }
3467 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003468 buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07003469 result.output_buffers = buffer;
3470 LOGH("result frame_number = %d, buffer = %p",
3471 frame_number, buffer->buffer);
3472
3473 mPendingBuffersMap.removeBuf(buffer->buffer);
3474
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003475 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07003476 } else {
3477 if (i->input_buffer) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003478 if (i->input_buffer->release_fence != -1) {
3479 int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3480 close(i->input_buffer->release_fence);
3481 if (rc != OK) {
3482 LOGE("input buffer sync wait failed %d", rc);
3483 }
3484 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003485 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003486
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003487 // Put buffer into the pending request
3488 for (auto &requestedBuffer : i->buffers) {
3489 if (requestedBuffer.stream == buffer->stream) {
3490 if (requestedBuffer.buffer != nullptr) {
3491 LOGE("Error: buffer is already set");
3492 } else {
3493 requestedBuffer.buffer = (camera3_stream_buffer_t *)malloc(
3494 sizeof(camera3_stream_buffer_t));
3495 *(requestedBuffer.buffer) = *buffer;
3496 LOGH("cache buffer %p at result frame_number %u",
3497 buffer->buffer, frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003498 }
3499 }
3500 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003501
3502 if (i->input_buffer) {
3503 // For a reprocessing request, try to send out shutter callback and result metadata.
3504 handlePendingResultsWithLock(frame_number, nullptr);
3505 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003506 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003507
3508 if (mPreviewStarted == false) {
3509 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
3510 if ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask()) {
3511 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
3512 mPerfLockMgr.releasePerfLock(PERF_LOCK_OPEN_CAMERA);
3513 mPreviewStarted = true;
3514
3515 // Set power hint for preview
3516 mPerfLockMgr.acquirePerfLock(PERF_LOCK_POWERHINT_ENCODE, 0);
3517 }
3518 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003519}
3520
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003521void QCamera3HardwareInterface::handlePendingResultsWithLock(uint32_t frameNumber,
3522 const camera_metadata_t *resultMetadata)
3523{
3524 // Find the pending request for this result metadata.
3525 auto requestIter = mPendingRequestsList.begin();
3526 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != frameNumber) {
3527 requestIter++;
3528 }
3529
3530 if (requestIter == mPendingRequestsList.end()) {
3531 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, frameNumber);
3532 return;
3533 }
3534
3535 // Update the result metadata
3536 requestIter->resultMetadata = resultMetadata;
3537
3538 // Check what type of request this is.
3539 bool liveRequest = false;
3540 if (requestIter->hdrplus) {
3541 // HDR+ request doesn't have partial results.
3542 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
3543 } else if (requestIter->input_buffer != nullptr) {
3544 // Reprocessing request result is the same as settings.
3545 requestIter->resultMetadata = requestIter->settings;
3546 // Reprocessing request doesn't have partial results.
3547 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
3548 } else {
3549 liveRequest = true;
3550 requestIter->partial_result_cnt++;
3551 mPendingLiveRequest--;
3552
3553 // For a live request, send the metadata to HDR+ client.
3554 if (mHdrPlusClient != nullptr) {
3555 mHdrPlusClient->notifyFrameMetadata(frameNumber, *resultMetadata,
3556 requestIter->partial_result_cnt == PARTIAL_RESULT_COUNT);
3557 }
3558 }
3559
3560 // The pending requests are ordered by increasing frame numbers. The shutter callback and
3561 // result metadata are ready to be sent if all previous pending requests are ready to be sent.
3562 bool readyToSend = true;
3563
3564 // Iterate through the pending requests to send out shutter callbacks and results that are
3565 // ready. Also if this result metadata belongs to a live request, notify errors for previous
3566 // live requests that don't have result metadata yet.
3567 auto iter = mPendingRequestsList.begin();
3568 while (iter != mPendingRequestsList.end()) {
3569 // Check if current pending request is ready. If it's not ready, the following pending
3570 // requests are also not ready.
3571 if (readyToSend && iter->resultMetadata == nullptr) {
3572 readyToSend = false;
3573 }
3574
3575 bool thisLiveRequest = iter->hdrplus == false && iter->input_buffer == nullptr;
3576
3577 std::vector<camera3_stream_buffer_t> outputBuffers;
3578
3579 camera3_capture_result_t result = {};
3580 result.frame_number = iter->frame_number;
3581 result.result = iter->resultMetadata;
3582 result.partial_result = iter->partial_result_cnt;
3583
3584 // If this pending buffer has result metadata, we may be able to send out shutter callback
3585 // and result metadata.
3586 if (iter->resultMetadata != nullptr) {
3587 if (!readyToSend) {
3588 // If any of the previous pending request is not ready, this pending request is
3589 // also not ready to send in order to keep shutter callbacks and result metadata
3590 // in order.
3591 iter++;
3592 continue;
3593 }
3594
3595 // Invoke shutter callback if not yet.
3596 if (!iter->shutter_notified) {
3597 int64_t timestamp = systemTime(CLOCK_MONOTONIC);
3598
3599 // Find the timestamp in HDR+ result metadata
3600 camera_metadata_ro_entry_t entry;
3601 status_t res = find_camera_metadata_ro_entry(iter->resultMetadata,
3602 ANDROID_SENSOR_TIMESTAMP, &entry);
3603 if (res != OK) {
3604 ALOGE("%s: Cannot find sensor timestamp for frame number %d: %s (%d)",
3605 __FUNCTION__, iter->frame_number, strerror(-res), res);
3606 } else {
3607 timestamp = entry.data.i64[0];
3608 }
3609
3610 camera3_notify_msg_t notify_msg = {};
3611 notify_msg.type = CAMERA3_MSG_SHUTTER;
3612 notify_msg.message.shutter.frame_number = iter->frame_number;
3613 notify_msg.message.shutter.timestamp = timestamp;
3614 orchestrateNotify(&notify_msg);
3615 iter->shutter_notified = true;
3616 }
3617
3618 result.input_buffer = iter->input_buffer;
3619
3620 // Prepare output buffer array
3621 for (auto bufferInfoIter = iter->buffers.begin();
3622 bufferInfoIter != iter->buffers.end(); bufferInfoIter++) {
3623 if (bufferInfoIter->buffer != nullptr) {
3624
3625 QCamera3Channel *channel =
3626 (QCamera3Channel *)bufferInfoIter->buffer->stream->priv;
3627 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
3628
3629 // Check if this buffer is a dropped frame.
3630 auto frameDropIter = mPendingFrameDropList.begin();
3631 while (frameDropIter != mPendingFrameDropList.end()) {
3632 if((frameDropIter->stream_ID == streamID) &&
3633 (frameDropIter->frame_number == frameNumber)) {
3634 bufferInfoIter->buffer->status = CAMERA3_BUFFER_STATUS_ERROR;
3635 LOGE("Stream STATUS_ERROR frame_number=%u, streamID=%u", frameNumber,
3636 streamID);
3637 mPendingFrameDropList.erase(frameDropIter);
3638 break;
3639 } else {
3640 frameDropIter++;
3641 }
3642 }
3643
3644 // Check buffer error status
3645 bufferInfoIter->buffer->status |= mPendingBuffersMap.getBufErrStatus(
3646 bufferInfoIter->buffer->buffer);
3647 mPendingBuffersMap.removeBuf(bufferInfoIter->buffer->buffer);
3648
3649 outputBuffers.push_back(*(bufferInfoIter->buffer));
3650 free(bufferInfoIter->buffer);
3651 bufferInfoIter->buffer = NULL;
3652 }
3653 }
3654
3655 result.output_buffers = outputBuffers.size() > 0 ? &outputBuffers[0] : nullptr;
3656 result.num_output_buffers = outputBuffers.size();
3657 } else if (iter->frame_number < frameNumber && liveRequest && thisLiveRequest) {
3658 // If the result metadata belongs to a live request, notify errors for previous pending
3659 // live requests.
3660 mPendingLiveRequest--;
3661
3662 CameraMetadata dummyMetadata;
3663 dummyMetadata.update(ANDROID_REQUEST_ID, &(iter->request_id), 1);
3664 result.result = dummyMetadata.release();
3665
3666 notifyError(iter->frame_number, CAMERA3_MSG_ERROR_RESULT);
3667 } else {
3668 iter++;
3669 continue;
3670 }
3671
3672 orchestrateResult(&result);
3673
3674 // For reprocessing, result metadata is the same as settings so do not free it here to
3675 // avoid double free.
3676 if (result.result != iter->settings) {
3677 free_camera_metadata((camera_metadata_t *)result.result);
3678 }
3679 iter->resultMetadata = nullptr;
3680 iter = erasePendingRequest(iter);
3681 }
3682
3683 if (liveRequest) {
3684 for (auto &iter : mPendingRequestsList) {
3685 // Increment pipeline depth for the following pending requests.
3686 if (iter.frame_number > frameNumber) {
3687 iter.pipeline_depth++;
3688 }
3689 }
3690 }
3691
3692 unblockRequestIfNecessary();
3693}
3694
Thierry Strudel3d639192016-09-09 11:52:26 -07003695/*===========================================================================
3696 * FUNCTION : unblockRequestIfNecessary
3697 *
3698 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
3699 * that mMutex is held when this function is called.
3700 *
3701 * PARAMETERS :
3702 *
3703 * RETURN :
3704 *
3705 *==========================================================================*/
3706void QCamera3HardwareInterface::unblockRequestIfNecessary()
3707{
3708 // Unblock process_capture_request
3709 pthread_cond_signal(&mRequestCond);
3710}
3711
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003712/*===========================================================================
3713 * FUNCTION : isHdrSnapshotRequest
3714 *
3715 * DESCRIPTION: Function to determine if the request is for a HDR snapshot
3716 *
3717 * PARAMETERS : camera3 request structure
3718 *
3719 * RETURN : boolean decision variable
3720 *
3721 *==========================================================================*/
3722bool QCamera3HardwareInterface::isHdrSnapshotRequest(camera3_capture_request *request)
3723{
3724 if (request == NULL) {
3725 LOGE("Invalid request handle");
3726 assert(0);
3727 return false;
3728 }
3729
3730 if (!mForceHdrSnapshot) {
3731 CameraMetadata frame_settings;
3732 frame_settings = request->settings;
3733
3734 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
3735 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
3736 if (metaMode != ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
3737 return false;
3738 }
3739 } else {
3740 return false;
3741 }
3742
3743 if (frame_settings.exists(ANDROID_CONTROL_SCENE_MODE)) {
3744 uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
3745 if (fwk_sceneMode != ANDROID_CONTROL_SCENE_MODE_HDR) {
3746 return false;
3747 }
3748 } else {
3749 return false;
3750 }
3751 }
3752
3753 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
3754 if (request->output_buffers[i].stream->format
3755 == HAL_PIXEL_FORMAT_BLOB) {
3756 return true;
3757 }
3758 }
3759
3760 return false;
3761}
3762/*===========================================================================
3763 * FUNCTION : orchestrateRequest
3764 *
3765 * DESCRIPTION: Orchestrates a capture request from camera service
3766 *
3767 * PARAMETERS :
3768 * @request : request from framework to process
3769 *
3770 * RETURN : Error status codes
3771 *
3772 *==========================================================================*/
3773int32_t QCamera3HardwareInterface::orchestrateRequest(
3774 camera3_capture_request_t *request)
3775{
3776
3777 uint32_t originalFrameNumber = request->frame_number;
3778 uint32_t originalOutputCount = request->num_output_buffers;
3779 const camera_metadata_t *original_settings = request->settings;
3780 List<InternalRequest> internallyRequestedStreams;
3781 List<InternalRequest> emptyInternalList;
3782
3783 if (isHdrSnapshotRequest(request) && request->input_buffer == NULL) {
3784 LOGD("Framework requested:%d buffers in HDR snapshot", request->num_output_buffers);
3785 uint32_t internalFrameNumber;
3786 CameraMetadata modified_meta;
3787
3788
3789 /* Add Blob channel to list of internally requested streams */
3790 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
3791 if (request->output_buffers[i].stream->format
3792 == HAL_PIXEL_FORMAT_BLOB) {
3793 InternalRequest streamRequested;
3794 streamRequested.meteringOnly = 1;
3795 streamRequested.need_metadata = 0;
3796 streamRequested.stream = request->output_buffers[i].stream;
3797 internallyRequestedStreams.push_back(streamRequested);
3798 }
3799 }
3800 request->num_output_buffers = 0;
3801 auto itr = internallyRequestedStreams.begin();
3802
3803 /* Modify setting to set compensation */
3804 modified_meta = request->settings;
3805 int32_t expCompensation = GB_HDR_HALF_STEP_EV;
3806 uint8_t aeLock = 1;
3807 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
3808 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
3809 camera_metadata_t *modified_settings = modified_meta.release();
3810 request->settings = modified_settings;
3811
3812 /* Capture Settling & -2x frame */
3813 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
3814 request->frame_number = internalFrameNumber;
3815 processCaptureRequest(request, internallyRequestedStreams);
3816
3817 request->num_output_buffers = originalOutputCount;
3818 _orchestrationDb.allocStoreInternalFrameNumber(originalFrameNumber, internalFrameNumber);
3819 request->frame_number = internalFrameNumber;
3820 processCaptureRequest(request, emptyInternalList);
3821 request->num_output_buffers = 0;
3822
3823 modified_meta = modified_settings;
3824 expCompensation = 0;
3825 aeLock = 1;
3826 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
3827 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
3828 modified_settings = modified_meta.release();
3829 request->settings = modified_settings;
3830
3831 /* Capture Settling & 0X frame */
3832
3833 itr = internallyRequestedStreams.begin();
3834 if (itr == internallyRequestedStreams.end()) {
3835 LOGE("Error Internally Requested Stream list is empty");
3836 assert(0);
3837 } else {
3838 itr->need_metadata = 0;
3839 itr->meteringOnly = 1;
3840 }
3841
3842 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
3843 request->frame_number = internalFrameNumber;
3844 processCaptureRequest(request, internallyRequestedStreams);
3845
3846 itr = internallyRequestedStreams.begin();
3847 if (itr == internallyRequestedStreams.end()) {
3848 ALOGE("Error Internally Requested Stream list is empty");
3849 assert(0);
3850 } else {
3851 itr->need_metadata = 1;
3852 itr->meteringOnly = 0;
3853 }
3854
3855 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
3856 request->frame_number = internalFrameNumber;
3857 processCaptureRequest(request, internallyRequestedStreams);
3858
3859 /* Capture 2X frame*/
3860 modified_meta = modified_settings;
3861 expCompensation = GB_HDR_2X_STEP_EV;
3862 aeLock = 1;
3863 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
3864 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
3865 modified_settings = modified_meta.release();
3866 request->settings = modified_settings;
3867
3868 itr = internallyRequestedStreams.begin();
3869 if (itr == internallyRequestedStreams.end()) {
3870 ALOGE("Error Internally Requested Stream list is empty");
3871 assert(0);
3872 } else {
3873 itr->need_metadata = 0;
3874 itr->meteringOnly = 1;
3875 }
3876 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
3877 request->frame_number = internalFrameNumber;
3878 processCaptureRequest(request, internallyRequestedStreams);
3879
3880 itr = internallyRequestedStreams.begin();
3881 if (itr == internallyRequestedStreams.end()) {
3882 ALOGE("Error Internally Requested Stream list is empty");
3883 assert(0);
3884 } else {
3885 itr->need_metadata = 1;
3886 itr->meteringOnly = 0;
3887 }
3888
3889 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
3890 request->frame_number = internalFrameNumber;
3891 processCaptureRequest(request, internallyRequestedStreams);
3892
3893
3894 /* Capture 2X on original streaming config*/
3895 internallyRequestedStreams.clear();
3896
3897 /* Restore original settings pointer */
3898 request->settings = original_settings;
3899 } else {
3900 uint32_t internalFrameNumber;
3901 _orchestrationDb.allocStoreInternalFrameNumber(request->frame_number, internalFrameNumber);
3902 request->frame_number = internalFrameNumber;
3903 return processCaptureRequest(request, internallyRequestedStreams);
3904 }
3905
3906 return NO_ERROR;
3907}
3908
3909/*===========================================================================
3910 * FUNCTION : orchestrateResult
3911 *
3912 * DESCRIPTION: Orchestrates a capture result to camera service
3913 *
3914 * PARAMETERS :
3915 * @request : request from framework to process
3916 *
3917 * RETURN :
3918 *
3919 *==========================================================================*/
3920void QCamera3HardwareInterface::orchestrateResult(
3921 camera3_capture_result_t *result)
3922{
3923 uint32_t frameworkFrameNumber;
3924 int32_t rc = _orchestrationDb.getFrameworkFrameNumber(result->frame_number,
3925 frameworkFrameNumber);
3926 if (rc != NO_ERROR) {
3927 LOGE("Cannot find translated frameworkFrameNumber");
3928 assert(0);
3929 } else {
3930 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
3931 LOGD("CAM_DEBUG Internal Request drop the result");
3932 } else {
3933 result->frame_number = frameworkFrameNumber;
3934 mCallbackOps->process_capture_result(mCallbackOps, result);
3935 }
3936 }
3937}
3938
3939/*===========================================================================
3940 * FUNCTION : orchestrateNotify
3941 *
3942 * DESCRIPTION: Orchestrates a notify to camera service
3943 *
3944 * PARAMETERS :
3945 * @request : request from framework to process
3946 *
3947 * RETURN :
3948 *
3949 *==========================================================================*/
3950void QCamera3HardwareInterface::orchestrateNotify(camera3_notify_msg_t *notify_msg)
3951{
3952 uint32_t frameworkFrameNumber;
3953 uint32_t internalFrameNumber = notify_msg->message.shutter.frame_number;
3954 int32_t rc = _orchestrationDb.getFrameworkFrameNumber(internalFrameNumber,
3955 frameworkFrameNumber);
3956 if (rc != NO_ERROR) {
3957 LOGE("Cannot find translated frameworkFrameNumber");
3958 assert(0);
3959 } else {
3960 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
3961 LOGE("CAM_DEBUG Internal Request drop the notifyCb");
3962 } else {
3963 notify_msg->message.shutter.frame_number = frameworkFrameNumber;
3964 mCallbackOps->notify(mCallbackOps, notify_msg);
3965 }
3966 }
3967}
3968
3969/*===========================================================================
3970 * FUNCTION : FrameNumberRegistry
3971 *
3972 * DESCRIPTION: Constructor
3973 *
3974 * PARAMETERS :
3975 *
3976 * RETURN :
3977 *
3978 *==========================================================================*/
3979FrameNumberRegistry::FrameNumberRegistry()
3980{
3981 _nextFreeInternalNumber = INTERNAL_FRAME_STARTING_NUMBER;
3982}
3983
3984/*===========================================================================
3985 * FUNCTION : ~FrameNumberRegistry
3986 *
3987 * DESCRIPTION: Destructor
3988 *
3989 * PARAMETERS :
3990 *
3991 * RETURN :
3992 *
3993 *==========================================================================*/
3994FrameNumberRegistry::~FrameNumberRegistry()
3995{
3996}
3997
3998/*===========================================================================
3999 * FUNCTION : PurgeOldEntriesLocked
4000 *
4001 * DESCRIPTION: Maintainance function to trigger LRU cleanup mechanism
4002 *
4003 * PARAMETERS :
4004 *
4005 * RETURN : NONE
4006 *
4007 *==========================================================================*/
4008void FrameNumberRegistry::purgeOldEntriesLocked()
4009{
4010 while (_register.begin() != _register.end()) {
4011 auto itr = _register.begin();
4012 if (itr->first < (_nextFreeInternalNumber - FRAME_REGISTER_LRU_SIZE)) {
4013 _register.erase(itr);
4014 } else {
4015 return;
4016 }
4017 }
4018}
4019
4020/*===========================================================================
4021 * FUNCTION : allocStoreInternalFrameNumber
4022 *
4023 * DESCRIPTION: Method to note down a framework request and associate a new
4024 * internal request number against it
4025 *
4026 * PARAMETERS :
4027 * @fFrameNumber: Identifier given by framework
4028 * @internalFN : Output parameter which will have the newly generated internal
4029 * entry
4030 *
4031 * RETURN : Error code
4032 *
4033 *==========================================================================*/
4034int32_t FrameNumberRegistry::allocStoreInternalFrameNumber(uint32_t frameworkFrameNumber,
4035 uint32_t &internalFrameNumber)
4036{
4037 Mutex::Autolock lock(mRegistryLock);
4038 internalFrameNumber = _nextFreeInternalNumber++;
4039 LOGD("Storing ff#:%d, with internal:%d", frameworkFrameNumber, internalFrameNumber);
4040 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, frameworkFrameNumber));
4041 purgeOldEntriesLocked();
4042 return NO_ERROR;
4043}
4044
4045/*===========================================================================
4046 * FUNCTION : generateStoreInternalFrameNumber
4047 *
4048 * DESCRIPTION: Method to associate a new internal request number independent
4049 * of any associate with framework requests
4050 *
4051 * PARAMETERS :
4052 * @internalFrame#: Output parameter which will have the newly generated internal
4053 *
4054 *
4055 * RETURN : Error code
4056 *
4057 *==========================================================================*/
4058int32_t FrameNumberRegistry::generateStoreInternalFrameNumber(uint32_t &internalFrameNumber)
4059{
4060 Mutex::Autolock lock(mRegistryLock);
4061 internalFrameNumber = _nextFreeInternalNumber++;
4062 LOGD("Generated internal framenumber:%d", internalFrameNumber);
4063 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, EMPTY_FRAMEWORK_FRAME_NUMBER));
4064 purgeOldEntriesLocked();
4065 return NO_ERROR;
4066}
4067
4068/*===========================================================================
4069 * FUNCTION : getFrameworkFrameNumber
4070 *
4071 * DESCRIPTION: Method to query the framework framenumber given an internal #
4072 *
4073 * PARAMETERS :
4074 * @internalFrame#: Internal reference
4075 * @frameworkframenumber: Output parameter holding framework frame entry
4076 *
4077 * RETURN : Error code
4078 *
4079 *==========================================================================*/
4080int32_t FrameNumberRegistry::getFrameworkFrameNumber(uint32_t internalFrameNumber,
4081 uint32_t &frameworkFrameNumber)
4082{
4083 Mutex::Autolock lock(mRegistryLock);
4084 auto itr = _register.find(internalFrameNumber);
4085 if (itr == _register.end()) {
4086 LOGE("CAM_DEBUG: Cannot find internal#: %d", internalFrameNumber);
4087 return -ENOENT;
4088 }
4089
4090 frameworkFrameNumber = itr->second;
4091 purgeOldEntriesLocked();
4092 return NO_ERROR;
4093}
Thierry Strudel3d639192016-09-09 11:52:26 -07004094
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004095status_t QCamera3HardwareInterface::fillPbStreamConfig(
4096 pbcamera::StreamConfiguration *config, uint32_t pbStreamId, int pbStreamFormat,
4097 QCamera3Channel *channel, uint32_t streamIndex) {
4098 if (config == nullptr) {
4099 LOGE("%s: config is null", __FUNCTION__);
4100 return BAD_VALUE;
4101 }
4102
4103 if (channel == nullptr) {
4104 LOGE("%s: channel is null", __FUNCTION__);
4105 return BAD_VALUE;
4106 }
4107
4108 QCamera3Stream *stream = channel->getStreamByIndex(streamIndex);
4109 if (stream == nullptr) {
4110 LOGE("%s: Failed to get stream %d in channel.", __FUNCTION__, streamIndex);
4111 return NAME_NOT_FOUND;
4112 }
4113
4114 const cam_stream_info_t* streamInfo = stream->getStreamInfo();
4115 if (streamInfo == nullptr) {
4116 LOGE("%s: Failed to get stream info for stream %d in channel.", __FUNCTION__, streamIndex);
4117 return NAME_NOT_FOUND;
4118 }
4119
4120 config->id = pbStreamId;
4121 config->image.width = streamInfo->dim.width;
4122 config->image.height = streamInfo->dim.height;
4123 config->image.padding = 0;
4124 config->image.format = pbStreamFormat;
4125
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004126 uint32_t totalPlaneSize = 0;
4127
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004128 // Fill plane information.
4129 for (uint32_t i = 0; i < streamInfo->buf_planes.plane_info.num_planes; i++) {
4130 pbcamera::PlaneConfiguration plane;
4131 plane.stride = streamInfo->buf_planes.plane_info.mp[i].stride_in_bytes;
4132 plane.scanline = streamInfo->buf_planes.plane_info.mp[i].scanline;
4133 config->image.planes.push_back(plane);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004134
4135 totalPlaneSize += (plane.stride * plane.scanline);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004136 }
4137
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004138 config->image.padding = streamInfo->buf_planes.plane_info.frame_len - totalPlaneSize;
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004139 return OK;
4140}
4141
Thierry Strudel3d639192016-09-09 11:52:26 -07004142/*===========================================================================
4143 * FUNCTION : processCaptureRequest
4144 *
4145 * DESCRIPTION: process a capture request from camera service
4146 *
4147 * PARAMETERS :
4148 * @request : request from framework to process
4149 *
4150 * RETURN :
4151 *
4152 *==========================================================================*/
4153int QCamera3HardwareInterface::processCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004154 camera3_capture_request_t *request,
4155 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07004156{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004157 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CAP_REQ);
Thierry Strudel3d639192016-09-09 11:52:26 -07004158 int rc = NO_ERROR;
4159 int32_t request_id;
4160 CameraMetadata meta;
Thierry Strudel3d639192016-09-09 11:52:26 -07004161 bool isVidBufRequested = false;
4162 camera3_stream_buffer_t *pInputBuffer = NULL;
4163
4164 pthread_mutex_lock(&mMutex);
4165
4166 // Validate current state
4167 switch (mState) {
4168 case CONFIGURED:
4169 case STARTED:
4170 /* valid state */
4171 break;
4172
4173 case ERROR:
4174 pthread_mutex_unlock(&mMutex);
4175 handleCameraDeviceError();
4176 return -ENODEV;
4177
4178 default:
4179 LOGE("Invalid state %d", mState);
4180 pthread_mutex_unlock(&mMutex);
4181 return -ENODEV;
4182 }
4183
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004184 rc = validateCaptureRequest(request, internallyRequestedStreams);
Thierry Strudel3d639192016-09-09 11:52:26 -07004185 if (rc != NO_ERROR) {
4186 LOGE("incoming request is not valid");
4187 pthread_mutex_unlock(&mMutex);
4188 return rc;
4189 }
4190
4191 meta = request->settings;
4192
4193 // For first capture request, send capture intent, and
4194 // stream on all streams
4195 if (mState == CONFIGURED) {
4196 // send an unconfigure to the backend so that the isp
4197 // resources are deallocated
4198 if (!mFirstConfiguration) {
4199 cam_stream_size_info_t stream_config_info;
4200 int32_t hal_version = CAM_HAL_V3;
4201 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
4202 stream_config_info.buffer_info.min_buffers =
4203 MIN_INFLIGHT_REQUESTS;
4204 stream_config_info.buffer_info.max_buffers =
4205 m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
4206 clear_metadata_buffer(mParameters);
4207 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4208 CAM_INTF_PARM_HAL_VERSION, hal_version);
4209 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4210 CAM_INTF_META_STREAM_INFO, stream_config_info);
4211 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4212 mParameters);
4213 if (rc < 0) {
4214 LOGE("set_parms for unconfigure failed");
4215 pthread_mutex_unlock(&mMutex);
4216 return rc;
4217 }
4218 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004219 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004220 /* get eis information for stream configuration */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004221 cam_is_type_t isTypeVideo, isTypePreview, is_type=IS_TYPE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07004222 char is_type_value[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004223 property_get("persist.camera.is_type", is_type_value, "4");
4224 isTypeVideo = static_cast<cam_is_type_t>(atoi(is_type_value));
4225 // Make default value for preview IS_TYPE as IS_TYPE_EIS_2_0
4226 property_get("persist.camera.is_type_preview", is_type_value, "4");
4227 isTypePreview = static_cast<cam_is_type_t>(atoi(is_type_value));
4228 LOGD("isTypeVideo: %d isTypePreview: %d", isTypeVideo, isTypePreview);
Thierry Strudel3d639192016-09-09 11:52:26 -07004229
4230 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4231 int32_t hal_version = CAM_HAL_V3;
4232 uint8_t captureIntent =
4233 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4234 mCaptureIntent = captureIntent;
4235 clear_metadata_buffer(mParameters);
4236 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
4237 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
4238 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004239 if (mFirstConfiguration) {
4240 // configure instant AEC
4241 // Instant AEC is a session based parameter and it is needed only
4242 // once per complete session after open camera.
4243 // i.e. This is set only once for the first capture request, after open camera.
4244 setInstantAEC(meta);
4245 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004246 uint8_t fwkVideoStabMode=0;
4247 if (meta.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
4248 fwkVideoStabMode = meta.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
4249 }
4250
4251 // If EIS setprop is enabled & if first capture setting has EIS enabled then only
4252 // turn it on for video/preview
4253 bool setEis = m_bEisEnable && fwkVideoStabMode && m_bEisSupportedSize &&
4254 (isTypeVideo >= IS_TYPE_EIS_2_0);
Thierry Strudel3d639192016-09-09 11:52:26 -07004255 int32_t vsMode;
4256 vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
4257 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
4258 rc = BAD_VALUE;
4259 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004260 LOGD("setEis %d", setEis);
4261 bool eis3Supported = false;
4262 size_t count = IS_TYPE_MAX;
4263 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
4264 for (size_t i = 0; i < count; i++) {
4265 if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0) {
4266 eis3Supported = true;
4267 break;
4268 }
4269 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004270
4271 //IS type will be 0 unless EIS is supported. If EIS is supported
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004272 //it could either be 4 or 5 depending on the stream and video size
Thierry Strudel3d639192016-09-09 11:52:26 -07004273 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4274 if (setEis) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004275 if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_PREVIEW) {
4276 is_type = isTypePreview;
4277 } else if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_VIDEO ) {
4278 if ( (isTypeVideo == IS_TYPE_EIS_3_0) && (eis3Supported == FALSE) ) {
4279 LOGW(" EIS_3.0 is not supported and so setting EIS_2.0");
Thierry Strudel3d639192016-09-09 11:52:26 -07004280 is_type = IS_TYPE_EIS_2_0;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004281 } else {
4282 is_type = isTypeVideo;
Thierry Strudel3d639192016-09-09 11:52:26 -07004283 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004284 } else {
4285 is_type = IS_TYPE_NONE;
4286 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004287 mStreamConfigInfo.is_type[i] = is_type;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004288 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004289 mStreamConfigInfo.is_type[i] = IS_TYPE_NONE;
4290 }
4291 }
4292
4293 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4294 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
4295
4296 int32_t tintless_value = 1;
4297 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4298 CAM_INTF_PARM_TINTLESS, tintless_value);
4299 //Disable CDS for HFR mode or if DIS/EIS is on.
4300 //CDS is a session parameter in the backend/ISP, so need to be set/reset
4301 //after every configure_stream
4302 if ((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
4303 (m_bIsVideo)) {
4304 int32_t cds = CAM_CDS_MODE_OFF;
4305 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4306 CAM_INTF_PARM_CDS_MODE, cds))
4307 LOGE("Failed to disable CDS for HFR mode");
4308
4309 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004310
4311 if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
4312 uint8_t* use_av_timer = NULL;
4313
4314 if (m_debug_avtimer){
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004315 LOGI(" Enabling AV timer through setprop");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004316 use_av_timer = &m_debug_avtimer;
4317 }
4318 else{
4319 use_av_timer =
4320 meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004321 if (use_av_timer) {
4322 LOGI("Enabling AV timer through Metadata: use_av_timer: %d", *use_av_timer);
4323 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004324 }
4325
4326 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
4327 rc = BAD_VALUE;
4328 }
4329 }
4330
Thierry Strudel3d639192016-09-09 11:52:26 -07004331 setMobicat();
4332
4333 /* Set fps and hfr mode while sending meta stream info so that sensor
4334 * can configure appropriate streaming mode */
4335 mHFRVideoFps = DEFAULT_VIDEO_FPS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004336 mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
4337 mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004338 if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
4339 rc = setHalFpsRange(meta, mParameters);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004340 if (rc == NO_ERROR) {
4341 int32_t max_fps =
4342 (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
Zhijun He21b864a2016-06-24 13:41:19 -07004343 if (max_fps == 60 || mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004344 mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
4345 }
4346 /* For HFR, more buffers are dequeued upfront to improve the performance */
4347 if (mBatchSize) {
4348 mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
4349 mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
4350 }
4351 }
4352 else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004353 LOGE("setHalFpsRange failed");
4354 }
4355 }
4356 if (meta.exists(ANDROID_CONTROL_MODE)) {
4357 uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
4358 rc = extractSceneMode(meta, metaMode, mParameters);
4359 if (rc != NO_ERROR) {
4360 LOGE("extractSceneMode failed");
4361 }
4362 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004363 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07004364
Thierry Strudel04e026f2016-10-10 11:27:36 -07004365 if (meta.exists(QCAMERA3_VIDEO_HDR_MODE)) {
4366 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
4367 meta.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
4368 rc = setVideoHdrMode(mParameters, vhdr);
4369 if (rc != NO_ERROR) {
4370 LOGE("setVideoHDR is failed");
4371 }
4372 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004373
Thierry Strudel3d639192016-09-09 11:52:26 -07004374 //TODO: validate the arguments, HSV scenemode should have only the
4375 //advertised fps ranges
4376
4377 /*set the capture intent, hal version, tintless, stream info,
4378 *and disenable parameters to the backend*/
4379 LOGD("set_parms META_STREAM_INFO " );
4380 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4381 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x "
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004382 "Format:%d is_type: %d",
Thierry Strudel3d639192016-09-09 11:52:26 -07004383 mStreamConfigInfo.type[i],
4384 mStreamConfigInfo.stream_sizes[i].width,
4385 mStreamConfigInfo.stream_sizes[i].height,
4386 mStreamConfigInfo.postprocess_mask[i],
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004387 mStreamConfigInfo.format[i],
4388 mStreamConfigInfo.is_type[i]);
Thierry Strudel3d639192016-09-09 11:52:26 -07004389 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004390
Thierry Strudel3d639192016-09-09 11:52:26 -07004391 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4392 mParameters);
4393 if (rc < 0) {
4394 LOGE("set_parms failed for hal version, stream info");
4395 }
4396
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004397 cam_sensor_mode_info_t sensor_mode_info;
4398 memset(&sensor_mode_info, 0, sizeof(sensor_mode_info));
4399 rc = getSensorModeInfo(sensor_mode_info);
Thierry Strudel3d639192016-09-09 11:52:26 -07004400 if (rc != NO_ERROR) {
4401 LOGE("Failed to get sensor output size");
4402 pthread_mutex_unlock(&mMutex);
4403 goto error_exit;
4404 }
4405
4406 mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
4407 gCamCapability[mCameraId]->active_array_size.height,
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004408 sensor_mode_info.active_array_size.width,
4409 sensor_mode_info.active_array_size.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07004410
4411 /* Set batchmode before initializing channel. Since registerBuffer
4412 * internally initializes some of the channels, better set batchmode
4413 * even before first register buffer */
4414 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4415 it != mStreamInfo.end(); it++) {
4416 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
4417 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
4418 && mBatchSize) {
4419 rc = channel->setBatchSize(mBatchSize);
4420 //Disable per frame map unmap for HFR/batchmode case
4421 rc |= channel->setPerFrameMapUnmap(false);
4422 if (NO_ERROR != rc) {
4423 LOGE("Channel init failed %d", rc);
4424 pthread_mutex_unlock(&mMutex);
4425 goto error_exit;
4426 }
4427 }
4428 }
4429
4430 //First initialize all streams
4431 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4432 it != mStreamInfo.end(); it++) {
4433 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
4434 if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
4435 ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004436 setEis) {
4437 for (size_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4438 if ( (1U << mStreamConfigInfo.type[i]) == channel->getStreamTypeMask() ) {
4439 is_type = mStreamConfigInfo.is_type[i];
4440 break;
4441 }
4442 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004443 rc = channel->initialize(is_type);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004444 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004445 rc = channel->initialize(IS_TYPE_NONE);
4446 }
4447 if (NO_ERROR != rc) {
4448 LOGE("Channel initialization failed %d", rc);
4449 pthread_mutex_unlock(&mMutex);
4450 goto error_exit;
4451 }
4452 }
4453
4454 if (mRawDumpChannel) {
4455 rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
4456 if (rc != NO_ERROR) {
4457 LOGE("Error: Raw Dump Channel init failed");
4458 pthread_mutex_unlock(&mMutex);
4459 goto error_exit;
4460 }
4461 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004462 if (mHdrPlusRawSrcChannel) {
4463 rc = mHdrPlusRawSrcChannel->initialize(IS_TYPE_NONE);
4464 if (rc != NO_ERROR) {
4465 LOGE("Error: HDR+ RAW Source Channel init failed");
4466 pthread_mutex_unlock(&mMutex);
4467 goto error_exit;
4468 }
4469 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004470 if (mSupportChannel) {
4471 rc = mSupportChannel->initialize(IS_TYPE_NONE);
4472 if (rc < 0) {
4473 LOGE("Support channel initialization failed");
4474 pthread_mutex_unlock(&mMutex);
4475 goto error_exit;
4476 }
4477 }
4478 if (mAnalysisChannel) {
4479 rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
4480 if (rc < 0) {
4481 LOGE("Analysis channel initialization failed");
4482 pthread_mutex_unlock(&mMutex);
4483 goto error_exit;
4484 }
4485 }
4486 if (mDummyBatchChannel) {
4487 rc = mDummyBatchChannel->setBatchSize(mBatchSize);
4488 if (rc < 0) {
4489 LOGE("mDummyBatchChannel setBatchSize failed");
4490 pthread_mutex_unlock(&mMutex);
4491 goto error_exit;
4492 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004493 rc = mDummyBatchChannel->initialize(IS_TYPE_NONE);
Thierry Strudel3d639192016-09-09 11:52:26 -07004494 if (rc < 0) {
4495 LOGE("mDummyBatchChannel initialization failed");
4496 pthread_mutex_unlock(&mMutex);
4497 goto error_exit;
4498 }
4499 }
4500
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004501 if (mHdrPlusClient != nullptr) {
4502 pbcamera::InputConfiguration inputConfig;
4503 std::vector<pbcamera::StreamConfiguration> outputStreamConfigs;
4504
4505 // Configure HDR+ client streams.
4506 // Get input config.
4507 if (mHdrPlusRawSrcChannel) {
4508 // HDR+ input buffers will be provided by HAL.
4509 rc = fillPbStreamConfig(&inputConfig.streamConfig, kPbRaw10InputStreamId,
4510 HAL_PIXEL_FORMAT_RAW10, mHdrPlusRawSrcChannel, /*stream index*/0);
4511 if (rc != OK) {
4512 LOGE("%s: Failed to get fill stream config for HDR+ raw src stream.",
4513 __FUNCTION__);
4514 pthread_mutex_unlock(&mMutex);
4515 goto error_exit;
4516 }
4517
4518 inputConfig.isSensorInput = false;
4519 } else {
4520 // Sensor MIPI will send data to Easel.
4521 inputConfig.isSensorInput = true;
4522 inputConfig.sensorMode.pixelArrayWidth =
4523 sensor_mode_info.pixel_array_size.width;
4524 inputConfig.sensorMode.pixelArrayHeight =
4525 sensor_mode_info.pixel_array_size.height;
4526 inputConfig.sensorMode.activeArrayWidth =
4527 sensor_mode_info.active_array_size.width;
4528 inputConfig.sensorMode.activeArrayHeight =
4529 sensor_mode_info.active_array_size.height;
4530 inputConfig.sensorMode.outputPixelClkHz =
4531 sensor_mode_info.op_pixel_clk;
4532 }
4533
4534 // Get output configurations.
4535 // Easel may need to output RAW16 buffers if mRawChannel was created.
4536 if (mRawChannel != nullptr) {
4537 pbcamera::StreamConfiguration outputConfig;
4538 rc = fillPbStreamConfig(&outputConfig, kPbRaw16OutputStreamId,
4539 HAL_PIXEL_FORMAT_RAW16, mRawChannel, /*stream index*/0);
4540 if (rc != OK) {
4541 LOGE("%s: Failed to get fill stream config for raw stream.", __FUNCTION__);
4542 pthread_mutex_unlock(&mMutex);
4543 goto error_exit;
4544 }
4545 outputStreamConfigs.push_back(outputConfig);
4546 }
4547
4548 // Easel may need to output YUV output buffers if mPictureChannel was created.
4549 if (mPictureChannel != nullptr) {
4550 pbcamera::StreamConfiguration outputConfig;
4551 rc = fillPbStreamConfig(&outputConfig, kPbYuvOutputStreamId,
4552 HAL_PIXEL_FORMAT_YCrCb_420_SP, mPictureChannel, /*stream index*/0);
4553 if (rc != OK) {
4554 LOGE("%s: Failed to get fill stream config for YUV stream.", __FUNCTION__);
4555 pthread_mutex_unlock(&mMutex);
4556 goto error_exit;
4557 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004558
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004559 outputStreamConfigs.push_back(outputConfig);
4560 }
4561
4562 // TODO: consider other channels for YUV output buffers.
4563
4564 rc = mHdrPlusClient->configureStreams(inputConfig, outputStreamConfigs);
4565 if (rc != OK) {
4566 LOGE("%d: Failed to configure streams with HDR+ client: %s (%d)", __FUNCTION__,
4567 strerror(-rc), rc);
4568 pthread_mutex_unlock(&mMutex);
4569 goto error_exit;
4570 }
4571 }
4572
Thierry Strudel3d639192016-09-09 11:52:26 -07004573 // Set bundle info
4574 rc = setBundleInfo();
4575 if (rc < 0) {
4576 LOGE("setBundleInfo failed %d", rc);
4577 pthread_mutex_unlock(&mMutex);
4578 goto error_exit;
4579 }
4580
4581 //update settings from app here
4582 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
4583 mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
4584 LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
4585 }
4586 if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
4587 mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
4588 LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
4589 }
4590 if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
4591 mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
4592 LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
4593
4594 if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
4595 (mLinkedCameraId != mCameraId) ) {
4596 LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
4597 mLinkedCameraId, mCameraId);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004598 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07004599 goto error_exit;
4600 }
4601 }
4602
4603 // add bundle related cameras
4604 LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
4605 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004606 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
4607 &m_pDualCamCmdPtr->bundle_info;
4608 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07004609 if (mIsDeviceLinked)
4610 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
4611 else
4612 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
4613
4614 pthread_mutex_lock(&gCamLock);
4615
4616 if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
4617 LOGE("Dualcam: Invalid Session Id ");
4618 pthread_mutex_unlock(&gCamLock);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004619 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07004620 goto error_exit;
4621 }
4622
4623 if (mIsMainCamera == 1) {
4624 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
4625 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07004626 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004627 m_pRelCamSyncBuf->cam_role = CAM_ROLE_BAYER;
Thierry Strudel3d639192016-09-09 11:52:26 -07004628 // related session id should be session id of linked session
4629 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
4630 } else {
4631 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
4632 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07004633 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004634 m_pRelCamSyncBuf->cam_role = CAM_ROLE_MONO;
Thierry Strudel3d639192016-09-09 11:52:26 -07004635 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
4636 }
4637 pthread_mutex_unlock(&gCamLock);
4638
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004639 rc = mCameraHandle->ops->set_dual_cam_cmd(
4640 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07004641 if (rc < 0) {
4642 LOGE("Dualcam: link failed");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004643 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07004644 goto error_exit;
4645 }
4646 }
4647
4648 //Then start them.
4649 LOGH("Start META Channel");
4650 rc = mMetadataChannel->start();
4651 if (rc < 0) {
4652 LOGE("META channel start failed");
4653 pthread_mutex_unlock(&mMutex);
4654 goto error_exit;
4655 }
4656
4657 if (mAnalysisChannel) {
4658 rc = mAnalysisChannel->start();
4659 if (rc < 0) {
4660 LOGE("Analysis channel start failed");
4661 mMetadataChannel->stop();
4662 pthread_mutex_unlock(&mMutex);
4663 goto error_exit;
4664 }
4665 }
4666
4667 if (mSupportChannel) {
4668 rc = mSupportChannel->start();
4669 if (rc < 0) {
4670 LOGE("Support channel start failed");
4671 mMetadataChannel->stop();
4672 /* Although support and analysis are mutually exclusive today
4673 adding it in anycase for future proofing */
4674 if (mAnalysisChannel) {
4675 mAnalysisChannel->stop();
4676 }
4677 pthread_mutex_unlock(&mMutex);
4678 goto error_exit;
4679 }
4680 }
4681 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4682 it != mStreamInfo.end(); it++) {
4683 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
4684 LOGH("Start Processing Channel mask=%d",
4685 channel->getStreamTypeMask());
4686 rc = channel->start();
4687 if (rc < 0) {
4688 LOGE("channel start failed");
4689 pthread_mutex_unlock(&mMutex);
4690 goto error_exit;
4691 }
4692 }
4693
4694 if (mRawDumpChannel) {
4695 LOGD("Starting raw dump stream");
4696 rc = mRawDumpChannel->start();
4697 if (rc != NO_ERROR) {
4698 LOGE("Error Starting Raw Dump Channel");
4699 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4700 it != mStreamInfo.end(); it++) {
4701 QCamera3Channel *channel =
4702 (QCamera3Channel *)(*it)->stream->priv;
4703 LOGH("Stopping Processing Channel mask=%d",
4704 channel->getStreamTypeMask());
4705 channel->stop();
4706 }
4707 if (mSupportChannel)
4708 mSupportChannel->stop();
4709 if (mAnalysisChannel) {
4710 mAnalysisChannel->stop();
4711 }
4712 mMetadataChannel->stop();
4713 pthread_mutex_unlock(&mMutex);
4714 goto error_exit;
4715 }
4716 }
4717
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004718 if (mHdrPlusRawSrcChannel) {
4719 LOGD("Starting HDR+ RAW stream");
4720 rc = mHdrPlusRawSrcChannel->start();
4721 if (rc != NO_ERROR) {
4722 LOGE("Error Starting HDR+ RAW Channel");
4723 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4724 it != mStreamInfo.end(); it++) {
4725 QCamera3Channel *channel =
4726 (QCamera3Channel *)(*it)->stream->priv;
4727 LOGH("Stopping Processing Channel mask=%d",
4728 channel->getStreamTypeMask());
4729 channel->stop();
4730 }
4731 if (mSupportChannel)
4732 mSupportChannel->stop();
4733 if (mAnalysisChannel) {
4734 mAnalysisChannel->stop();
4735 }
4736 if (mRawDumpChannel) {
4737 mRawDumpChannel->stop();
4738 }
4739 mMetadataChannel->stop();
4740 pthread_mutex_unlock(&mMutex);
4741 goto error_exit;
4742 }
4743 }
4744
Thierry Strudel3d639192016-09-09 11:52:26 -07004745 if (mChannelHandle) {
4746
4747 rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
4748 mChannelHandle);
4749 if (rc != NO_ERROR) {
4750 LOGE("start_channel failed %d", rc);
4751 pthread_mutex_unlock(&mMutex);
4752 goto error_exit;
4753 }
4754 }
4755
4756 goto no_error;
4757error_exit:
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004758 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004759 return rc;
4760no_error:
Thierry Strudel3d639192016-09-09 11:52:26 -07004761 mWokenUpByDaemon = false;
4762 mPendingLiveRequest = 0;
4763 mFirstConfiguration = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07004764 }
4765
4766 uint32_t frameNumber = request->frame_number;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004767 cam_stream_ID_t streamsArray;
Thierry Strudel3d639192016-09-09 11:52:26 -07004768
4769 if (mFlushPerf) {
4770 //we cannot accept any requests during flush
4771 LOGE("process_capture_request cannot proceed during flush");
4772 pthread_mutex_unlock(&mMutex);
4773 return NO_ERROR; //should return an error
4774 }
4775
4776 if (meta.exists(ANDROID_REQUEST_ID)) {
4777 request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
4778 mCurrentRequestId = request_id;
4779 LOGD("Received request with id: %d", request_id);
4780 } else if (mState == CONFIGURED || mCurrentRequestId == -1){
4781 LOGE("Unable to find request id field, \
4782 & no previous id available");
4783 pthread_mutex_unlock(&mMutex);
4784 return NAME_NOT_FOUND;
4785 } else {
4786 LOGD("Re-using old request id");
4787 request_id = mCurrentRequestId;
4788 }
4789
4790 LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
4791 request->num_output_buffers,
4792 request->input_buffer,
4793 frameNumber);
4794 // Acquire all request buffers first
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004795 streamsArray.num_streams = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07004796 int blob_request = 0;
4797 uint32_t snapshotStreamId = 0;
4798 for (size_t i = 0; i < request->num_output_buffers; i++) {
4799 const camera3_stream_buffer_t& output = request->output_buffers[i];
4800 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
4801
4802 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004803 //FIXME??:Call function to store local copy of jpeg data for encode params.
Thierry Strudel3d639192016-09-09 11:52:26 -07004804 blob_request = 1;
4805 snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
4806 }
4807
4808 if (output.acquire_fence != -1) {
4809 rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
4810 close(output.acquire_fence);
4811 if (rc != OK) {
4812 LOGE("sync wait failed %d", rc);
4813 pthread_mutex_unlock(&mMutex);
4814 return rc;
4815 }
4816 }
4817
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004818 streamsArray.stream_request[streamsArray.num_streams++].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07004819 channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel3d639192016-09-09 11:52:26 -07004820
4821 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
4822 isVidBufRequested = true;
4823 }
4824 }
4825
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004826 //FIXME: Add checks to ensure to dups in validateCaptureRequest
4827 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
4828 itr++) {
4829 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
4830 streamsArray.stream_request[streamsArray.num_streams++].streamID =
4831 channel->getStreamID(channel->getStreamTypeMask());
4832
4833 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
4834 isVidBufRequested = true;
4835 }
4836 }
4837
Thierry Strudel3d639192016-09-09 11:52:26 -07004838 if (blob_request) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004839 KPI_ATRACE_CAMSCOPE_INT("SNAPSHOT", CAMSCOPE_HAL3_SNAPSHOT, 1);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004840 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
Thierry Strudel3d639192016-09-09 11:52:26 -07004841 }
4842 if (blob_request && mRawDumpChannel) {
4843 LOGD("Trigger Raw based on blob request if Raw dump is enabled");
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004844 streamsArray.stream_request[streamsArray.num_streams].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07004845 mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004846 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
Thierry Strudel3d639192016-09-09 11:52:26 -07004847 }
4848
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004849 {
4850 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
4851 // Request a RAW buffer if
4852 // 1. mHdrPlusRawSrcChannel is valid.
4853 // 2. frameNumber is multiples of kHdrPlusRawPeriod (in order to limit RAW capture rate.)
4854 // 3. There is no pending HDR+ request.
4855 if (mHdrPlusRawSrcChannel && frameNumber % kHdrPlusRawPeriod == 0 &&
4856 mHdrPlusPendingRequests.size() == 0) {
4857 streamsArray.stream_request[streamsArray.num_streams].streamID =
4858 mHdrPlusRawSrcChannel->getStreamID(mHdrPlusRawSrcChannel->getStreamTypeMask());
4859 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
4860 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004861 }
4862
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004863 //extract capture intent
4864 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4865 mCaptureIntent =
4866 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4867 }
4868
4869 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
4870 mCacMode =
4871 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
4872 }
4873
4874 bool hdrPlusRequest = false;
4875
4876 // Decide if this is an HDR+ capture request.
4877 if (mHdrPlusClient != nullptr &&
4878 mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE) {
4879 bool highQualityPostProcessing = true;
4880
4881 // Check noise reduction mode is high quality.
4882 if (!meta.exists(ANDROID_NOISE_REDUCTION_MODE) ||
4883 meta.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0] !=
4884 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY) {
4885 highQualityPostProcessing = false;
4886 }
4887
4888 // Check edge mode is high quality.
4889 if (!meta.exists(ANDROID_EDGE_MODE) ||
4890 meta.find(ANDROID_EDGE_MODE).data.u8[0] !=
4891 ANDROID_EDGE_MODE_HIGH_QUALITY) {
4892 highQualityPostProcessing = false;
4893 }
4894
4895 // If all post processing is high quality, this still capture request is an HDR+ request.
4896 // TODO: support more than a single JPEG output buffer.
4897 if (highQualityPostProcessing && request->num_output_buffers == 1 &&
4898 request->output_buffers[0].stream->format == HAL_PIXEL_FORMAT_BLOB) {
4899 auto frame = std::make_shared<mm_camera_buf_def_t>();
4900
4901 // Get a YUV buffer from pic channel.
4902 QCamera3PicChannel *picChannel =
4903 (QCamera3PicChannel*)request->output_buffers[0].stream->priv;
4904 rc = picChannel->getYuvBufferForRequest(frame.get(), frameNumber);
4905 if (rc != OK) {
4906 ALOGE("%s: Getting an available YUV buffer from pic channel failed: %s (%d)",
4907 __FUNCTION__, strerror(-rc), rc);
4908 pthread_mutex_unlock(&mMutex);
4909 return rc;
4910 }
4911
4912 pbcamera::StreamBuffer buffer;
4913 buffer.streamId = kPbYuvOutputStreamId;
4914 buffer.data = frame->buffer;
4915 buffer.dataSize = frame->frame_len;
4916
4917 pbcamera::CaptureRequest pbRequest;
4918 pbRequest.id = frameNumber;
4919 pbRequest.outputBuffers.push_back(buffer);
4920
4921 // Submit an HDR+ capture request to HDR+ service.
4922 rc = mHdrPlusClient->submitCaptureRequest(&pbRequest);
4923 if (rc != OK) {
4924 ALOGE("%s: %d: Submitting a capture request failed: %s (%d)", __FUNCTION__,
4925 __LINE__, strerror(-rc), rc);
4926 }
4927
4928 hdrPlusRequest = true;
4929
4930 HdrPlusPendingRequest pendingHdrPlusRequest = {};
4931 pendingHdrPlusRequest.yuvBuffer = frame;
4932 pendingHdrPlusRequest.frameworkOutputBuffers.push_back(request->output_buffers[0]);
4933 pendingHdrPlusRequest.settings = std::make_shared<metadata_buffer_t>();
4934 memcpy(pendingHdrPlusRequest.settings.get(), mParameters, sizeof(metadata_buffer_t));
4935
4936 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
4937 mHdrPlusPendingRequests.emplace(frameNumber, pendingHdrPlusRequest);
4938
4939 ALOGD("%s: frame number %u is an HDR+ request.", __FUNCTION__, frameNumber);
4940 } else {
4941 ALOGD("%s: Fall back to non HDR+ capture request. high quality: %d, number of "
4942 "output buffers: %d", __FUNCTION__, highQualityPostProcessing,
4943 request->num_output_buffers);
4944 }
4945 }
4946
4947 if(request->input_buffer == NULL && !hdrPlusRequest) {
Thierry Strudel3d639192016-09-09 11:52:26 -07004948 /* Parse the settings:
4949 * - For every request in NORMAL MODE
4950 * - For every request in HFR mode during preview only case
4951 * - For first request of every batch in HFR mode during video
4952 * recording. In batchmode the same settings except frame number is
4953 * repeated in each request of the batch.
4954 */
4955 if (!mBatchSize ||
4956 (mBatchSize && !isVidBufRequested) ||
4957 (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004958 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
Thierry Strudel3d639192016-09-09 11:52:26 -07004959 if (rc < 0) {
4960 LOGE("fail to set frame parameters");
4961 pthread_mutex_unlock(&mMutex);
4962 return rc;
4963 }
4964 }
4965 /* For batchMode HFR, setFrameParameters is not called for every
4966 * request. But only frame number of the latest request is parsed.
4967 * Keep track of first and last frame numbers in a batch so that
4968 * metadata for the frame numbers of batch can be duplicated in
4969 * handleBatchMetadta */
4970 if (mBatchSize) {
4971 if (!mToBeQueuedVidBufs) {
4972 //start of the batch
4973 mFirstFrameNumberInBatch = request->frame_number;
4974 }
4975 if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4976 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
4977 LOGE("Failed to set the frame number in the parameters");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004978 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07004979 return BAD_VALUE;
4980 }
4981 }
4982 if (mNeedSensorRestart) {
4983 /* Unlock the mutex as restartSensor waits on the channels to be
4984 * stopped, which in turn calls stream callback functions -
4985 * handleBufferWithLock and handleMetadataWithLock */
4986 pthread_mutex_unlock(&mMutex);
4987 rc = dynamicUpdateMetaStreamInfo();
4988 if (rc != NO_ERROR) {
4989 LOGE("Restarting the sensor failed");
4990 return BAD_VALUE;
4991 }
4992 mNeedSensorRestart = false;
4993 pthread_mutex_lock(&mMutex);
4994 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004995 if(mResetInstantAEC) {
4996 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4997 CAM_INTF_PARM_INSTANT_AEC, (uint8_t)CAM_AEC_NORMAL_CONVERGENCE);
4998 mResetInstantAEC = false;
4999 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005000 } else if (!hdrPlusRequest) {
Thierry Strudel3d639192016-09-09 11:52:26 -07005001
5002 if (request->input_buffer->acquire_fence != -1) {
5003 rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
5004 close(request->input_buffer->acquire_fence);
5005 if (rc != OK) {
5006 LOGE("input buffer sync wait failed %d", rc);
5007 pthread_mutex_unlock(&mMutex);
5008 return rc;
5009 }
5010 }
5011 }
5012
5013 if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
5014 mLastCustIntentFrmNum = frameNumber;
5015 }
5016 /* Update pending request list and pending buffers map */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005017 PendingRequestInfo pendingRequest = {};
Thierry Strudel3d639192016-09-09 11:52:26 -07005018 pendingRequestIterator latestRequest;
5019 pendingRequest.frame_number = frameNumber;
5020 pendingRequest.num_buffers = request->num_output_buffers;
5021 pendingRequest.request_id = request_id;
5022 pendingRequest.blob_request = blob_request;
5023 pendingRequest.timestamp = 0;
5024 pendingRequest.bUrgentReceived = 0;
5025 if (request->input_buffer) {
5026 pendingRequest.input_buffer =
5027 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
5028 *(pendingRequest.input_buffer) = *(request->input_buffer);
5029 pInputBuffer = pendingRequest.input_buffer;
5030 } else {
5031 pendingRequest.input_buffer = NULL;
5032 pInputBuffer = NULL;
5033 }
5034
5035 pendingRequest.pipeline_depth = 0;
5036 pendingRequest.partial_result_cnt = 0;
5037 extractJpegMetadata(mCurJpegMeta, request);
5038 pendingRequest.jpegMetadata = mCurJpegMeta;
5039 pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
5040 pendingRequest.shutter_notified = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005041 pendingRequest.capture_intent = mCaptureIntent;
Samuel Ha68ba5172016-12-15 18:41:12 -08005042 /* DevCamDebug metadata processCaptureRequest */
5043 if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
5044 mDevCamDebugMetaEnable =
5045 meta.find(DEVCAMDEBUG_META_ENABLE).data.u8[0];
5046 }
5047 pendingRequest.DevCamDebug_meta_enable = mDevCamDebugMetaEnable;
5048 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07005049
5050 //extract CAC info
5051 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5052 mCacMode =
5053 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5054 }
5055 pendingRequest.fwkCacMode = mCacMode;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005056 pendingRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005057
5058 PendingBuffersInRequest bufsForCurRequest;
5059 bufsForCurRequest.frame_number = frameNumber;
5060 // Mark current timestamp for the new request
5061 bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005062 bufsForCurRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005063
5064 for (size_t i = 0; i < request->num_output_buffers; i++) {
5065 RequestedBufferInfo requestedBuf;
5066 memset(&requestedBuf, 0, sizeof(requestedBuf));
5067 requestedBuf.stream = request->output_buffers[i].stream;
5068 requestedBuf.buffer = NULL;
5069 pendingRequest.buffers.push_back(requestedBuf);
5070
5071 // Add to buffer handle the pending buffers list
5072 PendingBufferInfo bufferInfo;
5073 bufferInfo.buffer = request->output_buffers[i].buffer;
5074 bufferInfo.stream = request->output_buffers[i].stream;
5075 bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
5076 QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
5077 LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
5078 frameNumber, bufferInfo.buffer,
5079 channel->getStreamTypeMask(), bufferInfo.stream->format);
5080 }
5081 // Add this request packet into mPendingBuffersMap
5082 mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
5083 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
5084 mPendingBuffersMap.get_num_overall_buffers());
5085
5086 latestRequest = mPendingRequestsList.insert(
5087 mPendingRequestsList.end(), pendingRequest);
5088 if(mFlush) {
5089 LOGI("mFlush is true");
5090 pthread_mutex_unlock(&mMutex);
5091 return NO_ERROR;
5092 }
5093
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005094 // If this is not an HDR+ request, send the request to metadata and each output buffer's
5095 // channel.
5096 if (!hdrPlusRequest) {
5097 int indexUsed;
5098 // Notify metadata channel we receive a request
5099 mMetadataChannel->request(NULL, frameNumber, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07005100
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005101 if(request->input_buffer != NULL){
5102 LOGD("Input request, frame_number %d", frameNumber);
5103 rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
5104 if (NO_ERROR != rc) {
5105 LOGE("fail to set reproc parameters");
5106 pthread_mutex_unlock(&mMutex);
5107 return rc;
5108 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005109 }
5110
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005111 // Call request on other streams
5112 uint32_t streams_need_metadata = 0;
5113 pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
5114 for (size_t i = 0; i < request->num_output_buffers; i++) {
5115 const camera3_stream_buffer_t& output = request->output_buffers[i];
5116 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5117
5118 if (channel == NULL) {
5119 LOGW("invalid channel pointer for stream");
5120 continue;
5121 }
5122
5123 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
5124 LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
5125 output.buffer, request->input_buffer, frameNumber);
5126 if(request->input_buffer != NULL){
Thierry Strudel3d639192016-09-09 11:52:26 -07005127 rc = channel->request(output.buffer, frameNumber,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005128 pInputBuffer, &mReprocMeta, indexUsed, false, false);
5129 if (rc < 0) {
5130 LOGE("Fail to request on picture channel");
5131 pthread_mutex_unlock(&mMutex);
5132 return rc;
5133 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005134 } else {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005135 LOGD("snapshot request with buffer %p, frame_number %d",
5136 output.buffer, frameNumber);
5137 if (!request->settings) {
5138 rc = channel->request(output.buffer, frameNumber,
5139 NULL, mPrevParameters, indexUsed);
5140 } else {
5141 rc = channel->request(output.buffer, frameNumber,
5142 NULL, mParameters, indexUsed);
5143 }
5144 if (rc < 0) {
5145 LOGE("Fail to request on picture channel");
5146 pthread_mutex_unlock(&mMutex);
5147 return rc;
5148 }
5149
5150 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5151 uint32_t j = 0;
5152 for (j = 0; j < streamsArray.num_streams; j++) {
5153 if (streamsArray.stream_request[j].streamID == streamId) {
5154 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5155 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5156 else
5157 streamsArray.stream_request[j].buf_index = indexUsed;
5158 break;
5159 }
5160 }
5161 if (j == streamsArray.num_streams) {
5162 LOGE("Did not find matching stream to update index");
5163 assert(0);
5164 }
5165
5166 pendingBufferIter->need_metadata = true;
5167 streams_need_metadata++;
Thierry Strudel3d639192016-09-09 11:52:26 -07005168 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005169 } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
5170 bool needMetadata = false;
5171 QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
5172 rc = yuvChannel->request(output.buffer, frameNumber,
5173 pInputBuffer, (pInputBuffer ? &mReprocMeta : mParameters),
5174 needMetadata, indexUsed, false, false);
Thierry Strudel3d639192016-09-09 11:52:26 -07005175 if (rc < 0) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005176 LOGE("Fail to request on YUV channel");
Thierry Strudel3d639192016-09-09 11:52:26 -07005177 pthread_mutex_unlock(&mMutex);
5178 return rc;
5179 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005180
5181 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5182 uint32_t j = 0;
5183 for (j = 0; j < streamsArray.num_streams; j++) {
5184 if (streamsArray.stream_request[j].streamID == streamId) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005185 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5186 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5187 else
5188 streamsArray.stream_request[j].buf_index = indexUsed;
5189 break;
5190 }
5191 }
5192 if (j == streamsArray.num_streams) {
5193 LOGE("Did not find matching stream to update index");
5194 assert(0);
5195 }
5196
5197 pendingBufferIter->need_metadata = needMetadata;
5198 if (needMetadata)
5199 streams_need_metadata += 1;
5200 LOGD("calling YUV channel request, need_metadata is %d",
5201 needMetadata);
5202 } else {
5203 LOGD("request with buffer %p, frame_number %d",
5204 output.buffer, frameNumber);
5205
5206 rc = channel->request(output.buffer, frameNumber, indexUsed);
5207
5208 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5209 uint32_t j = 0;
5210 for (j = 0; j < streamsArray.num_streams; j++) {
5211 if (streamsArray.stream_request[j].streamID == streamId) {
5212 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5213 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5214 else
5215 streamsArray.stream_request[j].buf_index = indexUsed;
5216 break;
5217 }
5218 }
5219 if (j == streamsArray.num_streams) {
5220 LOGE("Did not find matching stream to update index");
5221 assert(0);
5222 }
5223
5224 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5225 && mBatchSize) {
5226 mToBeQueuedVidBufs++;
5227 if (mToBeQueuedVidBufs == mBatchSize) {
5228 channel->queueBatchBuf();
5229 }
5230 }
5231 if (rc < 0) {
5232 LOGE("request failed");
5233 pthread_mutex_unlock(&mMutex);
5234 return rc;
5235 }
5236 }
5237 pendingBufferIter++;
5238 }
5239
5240 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5241 itr++) {
5242 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5243
5244 if (channel == NULL) {
5245 LOGE("invalid channel pointer for stream");
5246 assert(0);
5247 return BAD_VALUE;
5248 }
5249
5250 InternalRequest requestedStream;
5251 requestedStream = (*itr);
5252
5253
5254 if ((*itr).stream->format == HAL_PIXEL_FORMAT_BLOB) {
5255 LOGD("snapshot request internally input buffer %p, frame_number %d",
5256 request->input_buffer, frameNumber);
5257 if(request->input_buffer != NULL){
5258 rc = channel->request(NULL, frameNumber,
5259 pInputBuffer, &mReprocMeta, indexUsed, true,
5260 requestedStream.meteringOnly);
5261 if (rc < 0) {
5262 LOGE("Fail to request on picture channel");
5263 pthread_mutex_unlock(&mMutex);
5264 return rc;
5265 }
5266 } else {
5267 LOGD("snapshot request with frame_number %d", frameNumber);
5268 if (!request->settings) {
5269 rc = channel->request(NULL, frameNumber,
5270 NULL, mPrevParameters, indexUsed, true,
5271 requestedStream.meteringOnly);
5272 } else {
5273 rc = channel->request(NULL, frameNumber,
5274 NULL, mParameters, indexUsed, true, requestedStream.meteringOnly);
5275 }
5276 if (rc < 0) {
5277 LOGE("Fail to request on picture channel");
5278 pthread_mutex_unlock(&mMutex);
5279 return rc;
5280 }
5281
5282 if ((*itr).meteringOnly != 1) {
5283 requestedStream.need_metadata = 1;
5284 streams_need_metadata++;
5285 }
5286 }
5287
5288 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5289 uint32_t j = 0;
5290 for (j = 0; j < streamsArray.num_streams; j++) {
5291 if (streamsArray.stream_request[j].streamID == streamId) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005292 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5293 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5294 else
5295 streamsArray.stream_request[j].buf_index = indexUsed;
5296 break;
5297 }
5298 }
5299 if (j == streamsArray.num_streams) {
5300 LOGE("Did not find matching stream to update index");
5301 assert(0);
5302 }
5303
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005304 } else {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005305 LOGE("Internal requests not supported on this stream type");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005306 assert(0);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005307 return INVALID_OPERATION;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005308 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005309 latestRequest->internalRequestList.push_back(requestedStream);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005310 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005311
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005312 //If 2 streams have need_metadata set to true, fail the request, unless
5313 //we copy/reference count the metadata buffer
5314 if (streams_need_metadata > 1) {
5315 LOGE("not supporting request in which two streams requires"
5316 " 2 HAL metadata for reprocessing");
5317 pthread_mutex_unlock(&mMutex);
5318 return -EINVAL;
5319 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005320
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005321 if (request->input_buffer == NULL) {
5322 /* Set the parameters to backend:
5323 * - For every request in NORMAL MODE
5324 * - For every request in HFR mode during preview only case
5325 * - Once every batch in HFR mode during video recording
5326 */
5327 if (!mBatchSize ||
5328 (mBatchSize && !isVidBufRequested) ||
5329 (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
5330 LOGD("set_parms batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
5331 mBatchSize, isVidBufRequested,
5332 mToBeQueuedVidBufs);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005333
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005334 if(mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize)) {
5335 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5336 uint32_t m = 0;
5337 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5338 if (streamsArray.stream_request[k].streamID ==
5339 mBatchedStreamsArray.stream_request[m].streamID)
5340 break;
5341 }
5342 if (m == mBatchedStreamsArray.num_streams) {
5343 mBatchedStreamsArray.stream_request\
5344 [mBatchedStreamsArray.num_streams].streamID =
5345 streamsArray.stream_request[k].streamID;
5346 mBatchedStreamsArray.stream_request\
5347 [mBatchedStreamsArray.num_streams].buf_index =
5348 streamsArray.stream_request[k].buf_index;
5349 mBatchedStreamsArray.num_streams =
5350 mBatchedStreamsArray.num_streams + 1;
5351 }
5352 }
5353 streamsArray = mBatchedStreamsArray;
5354 }
5355 /* Update stream id of all the requested buffers */
5356 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID,
5357 streamsArray)) {
5358 LOGE("Failed to set stream type mask in the parameters");
5359 return BAD_VALUE;
5360 }
5361
5362 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5363 mParameters);
5364 if (rc < 0) {
5365 LOGE("set_parms failed");
5366 }
5367 /* reset to zero coz, the batch is queued */
5368 mToBeQueuedVidBufs = 0;
5369 mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
5370 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
5371 } else if (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs != mBatchSize)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005372 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5373 uint32_t m = 0;
5374 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5375 if (streamsArray.stream_request[k].streamID ==
5376 mBatchedStreamsArray.stream_request[m].streamID)
5377 break;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005378 }
5379 if (m == mBatchedStreamsArray.num_streams) {
5380 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5381 streamID = streamsArray.stream_request[k].streamID;
5382 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5383 buf_index = streamsArray.stream_request[k].buf_index;
5384 mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
5385 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005386 }
5387 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005388 mPendingLiveRequest++;
Thierry Strudel3d639192016-09-09 11:52:26 -07005389 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005390 }
5391
5392 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
5393
5394 mState = STARTED;
5395 // Added a timed condition wait
5396 struct timespec ts;
5397 uint8_t isValidTimeout = 1;
Shuzhen Wangfb961e52016-11-28 11:48:02 -08005398 rc = clock_gettime(CLOCK_MONOTONIC, &ts);
Thierry Strudel3d639192016-09-09 11:52:26 -07005399 if (rc < 0) {
5400 isValidTimeout = 0;
5401 LOGE("Error reading the real time clock!!");
5402 }
5403 else {
5404 // Make timeout as 5 sec for request to be honored
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005405 int64_t timeout = 5;
5406 {
5407 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5408 // If there is a pending HDR+ request, the following requests may be blocked until the
5409 // HDR+ request is done. So allow a longer timeout.
5410 if (mHdrPlusPendingRequests.size() > 0) {
5411 timeout = MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT;
5412 }
5413 }
5414 ts.tv_sec += timeout;
Thierry Strudel3d639192016-09-09 11:52:26 -07005415 }
5416 //Block on conditional variable
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005417 while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
Thierry Strudel3d639192016-09-09 11:52:26 -07005418 (mState != ERROR) && (mState != DEINIT)) {
5419 if (!isValidTimeout) {
5420 LOGD("Blocking on conditional wait");
5421 pthread_cond_wait(&mRequestCond, &mMutex);
5422 }
5423 else {
5424 LOGD("Blocking on timed conditional wait");
5425 rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
5426 if (rc == ETIMEDOUT) {
5427 rc = -ENODEV;
5428 LOGE("Unblocked on timeout!!!!");
5429 break;
5430 }
5431 }
5432 LOGD("Unblocked");
5433 if (mWokenUpByDaemon) {
5434 mWokenUpByDaemon = false;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005435 if (mPendingLiveRequest < mMaxInFlightRequests)
Thierry Strudel3d639192016-09-09 11:52:26 -07005436 break;
5437 }
5438 }
5439 pthread_mutex_unlock(&mMutex);
5440
5441 return rc;
5442}
5443
5444/*===========================================================================
5445 * FUNCTION : dump
5446 *
5447 * DESCRIPTION:
5448 *
5449 * PARAMETERS :
5450 *
5451 *
5452 * RETURN :
5453 *==========================================================================*/
5454void QCamera3HardwareInterface::dump(int fd)
5455{
5456 pthread_mutex_lock(&mMutex);
5457 dprintf(fd, "\n Camera HAL3 information Begin \n");
5458
5459 dprintf(fd, "\nNumber of pending requests: %zu \n",
5460 mPendingRequestsList.size());
5461 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
5462 dprintf(fd, " Frame | Number of Buffers | Req Id: | Blob Req | Input buffer present\n");
5463 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
5464 for(pendingRequestIterator i = mPendingRequestsList.begin();
5465 i != mPendingRequestsList.end(); i++) {
5466 dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
5467 i->frame_number, i->num_buffers, i->request_id, i->blob_request,
5468 i->input_buffer);
5469 }
5470 dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
5471 mPendingBuffersMap.get_num_overall_buffers());
5472 dprintf(fd, "-------+------------------\n");
5473 dprintf(fd, " Frame | Stream type mask \n");
5474 dprintf(fd, "-------+------------------\n");
5475 for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
5476 for(auto &j : req.mPendingBufferList) {
5477 QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
5478 dprintf(fd, " %5d | %11d \n",
5479 req.frame_number, channel->getStreamTypeMask());
5480 }
5481 }
5482 dprintf(fd, "-------+------------------\n");
5483
5484 dprintf(fd, "\nPending frame drop list: %zu\n",
5485 mPendingFrameDropList.size());
5486 dprintf(fd, "-------+-----------\n");
5487 dprintf(fd, " Frame | Stream ID \n");
5488 dprintf(fd, "-------+-----------\n");
5489 for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
5490 i != mPendingFrameDropList.end(); i++) {
5491 dprintf(fd, " %5d | %9d \n",
5492 i->frame_number, i->stream_ID);
5493 }
5494 dprintf(fd, "-------+-----------\n");
5495
5496 dprintf(fd, "\n Camera HAL3 information End \n");
5497
5498 /* use dumpsys media.camera as trigger to send update debug level event */
5499 mUpdateDebugLevel = true;
5500 pthread_mutex_unlock(&mMutex);
5501 return;
5502}
5503
5504/*===========================================================================
5505 * FUNCTION : flush
5506 *
5507 * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
5508 * conditionally restarts channels
5509 *
5510 * PARAMETERS :
5511 * @ restartChannels: re-start all channels
5512 *
5513 *
5514 * RETURN :
5515 * 0 on success
5516 * Error code on failure
5517 *==========================================================================*/
5518int QCamera3HardwareInterface::flush(bool restartChannels)
5519{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08005520 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005521 int32_t rc = NO_ERROR;
5522
5523 LOGD("Unblocking Process Capture Request");
5524 pthread_mutex_lock(&mMutex);
5525 mFlush = true;
5526 pthread_mutex_unlock(&mMutex);
5527
5528 rc = stopAllChannels();
5529 // unlink of dualcam
5530 if (mIsDeviceLinked) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005531 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
5532 &m_pDualCamCmdPtr->bundle_info;
5533 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005534 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
5535 pthread_mutex_lock(&gCamLock);
5536
5537 if (mIsMainCamera == 1) {
5538 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
5539 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005540 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07005541 // related session id should be session id of linked session
5542 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5543 } else {
5544 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
5545 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005546 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07005547 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5548 }
5549 pthread_mutex_unlock(&gCamLock);
5550
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005551 rc = mCameraHandle->ops->set_dual_cam_cmd(
5552 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07005553 if (rc < 0) {
5554 LOGE("Dualcam: Unlink failed, but still proceed to close");
5555 }
5556 }
5557
5558 if (rc < 0) {
5559 LOGE("stopAllChannels failed");
5560 return rc;
5561 }
5562 if (mChannelHandle) {
5563 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
5564 mChannelHandle);
5565 }
5566
5567 // Reset bundle info
5568 rc = setBundleInfo();
5569 if (rc < 0) {
5570 LOGE("setBundleInfo failed %d", rc);
5571 return rc;
5572 }
5573
5574 // Mutex Lock
5575 pthread_mutex_lock(&mMutex);
5576
5577 // Unblock process_capture_request
5578 mPendingLiveRequest = 0;
5579 pthread_cond_signal(&mRequestCond);
5580
5581 rc = notifyErrorForPendingRequests();
5582 if (rc < 0) {
5583 LOGE("notifyErrorForPendingRequests failed");
5584 pthread_mutex_unlock(&mMutex);
5585 return rc;
5586 }
5587
5588 mFlush = false;
5589
5590 // Start the Streams/Channels
5591 if (restartChannels) {
5592 rc = startAllChannels();
5593 if (rc < 0) {
5594 LOGE("startAllChannels failed");
5595 pthread_mutex_unlock(&mMutex);
5596 return rc;
5597 }
5598 }
5599
5600 if (mChannelHandle) {
5601 mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
5602 mChannelHandle);
5603 if (rc < 0) {
5604 LOGE("start_channel failed");
5605 pthread_mutex_unlock(&mMutex);
5606 return rc;
5607 }
5608 }
5609
5610 pthread_mutex_unlock(&mMutex);
5611
5612 return 0;
5613}
5614
5615/*===========================================================================
5616 * FUNCTION : flushPerf
5617 *
5618 * DESCRIPTION: This is the performance optimization version of flush that does
5619 * not use stream off, rather flushes the system
5620 *
5621 * PARAMETERS :
5622 *
5623 *
5624 * RETURN : 0 : success
5625 * -EINVAL: input is malformed (device is not valid)
5626 * -ENODEV: if the device has encountered a serious error
5627 *==========================================================================*/
5628int QCamera3HardwareInterface::flushPerf()
5629{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08005630 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005631 int32_t rc = 0;
5632 struct timespec timeout;
5633 bool timed_wait = false;
5634
5635 pthread_mutex_lock(&mMutex);
5636 mFlushPerf = true;
5637 mPendingBuffersMap.numPendingBufsAtFlush =
5638 mPendingBuffersMap.get_num_overall_buffers();
5639 LOGD("Calling flush. Wait for %d buffers to return",
5640 mPendingBuffersMap.numPendingBufsAtFlush);
5641
5642 /* send the flush event to the backend */
5643 rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
5644 if (rc < 0) {
5645 LOGE("Error in flush: IOCTL failure");
5646 mFlushPerf = false;
5647 pthread_mutex_unlock(&mMutex);
5648 return -ENODEV;
5649 }
5650
5651 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
5652 LOGD("No pending buffers in HAL, return flush");
5653 mFlushPerf = false;
5654 pthread_mutex_unlock(&mMutex);
5655 return rc;
5656 }
5657
5658 /* wait on a signal that buffers were received */
Shuzhen Wangfb961e52016-11-28 11:48:02 -08005659 rc = clock_gettime(CLOCK_MONOTONIC, &timeout);
Thierry Strudel3d639192016-09-09 11:52:26 -07005660 if (rc < 0) {
5661 LOGE("Error reading the real time clock, cannot use timed wait");
5662 } else {
5663 timeout.tv_sec += FLUSH_TIMEOUT;
5664 timed_wait = true;
5665 }
5666
5667 //Block on conditional variable
5668 while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
5669 LOGD("Waiting on mBuffersCond");
5670 if (!timed_wait) {
5671 rc = pthread_cond_wait(&mBuffersCond, &mMutex);
5672 if (rc != 0) {
5673 LOGE("pthread_cond_wait failed due to rc = %s",
5674 strerror(rc));
5675 break;
5676 }
5677 } else {
5678 rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
5679 if (rc != 0) {
5680 LOGE("pthread_cond_timedwait failed due to rc = %s",
5681 strerror(rc));
5682 break;
5683 }
5684 }
5685 }
5686 if (rc != 0) {
5687 mFlushPerf = false;
5688 pthread_mutex_unlock(&mMutex);
5689 return -ENODEV;
5690 }
5691
5692 LOGD("Received buffers, now safe to return them");
5693
5694 //make sure the channels handle flush
5695 //currently only required for the picture channel to release snapshot resources
5696 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5697 it != mStreamInfo.end(); it++) {
5698 QCamera3Channel *channel = (*it)->channel;
5699 if (channel) {
5700 rc = channel->flush();
5701 if (rc) {
5702 LOGE("Flushing the channels failed with error %d", rc);
5703 // even though the channel flush failed we need to continue and
5704 // return the buffers we have to the framework, however the return
5705 // value will be an error
5706 rc = -ENODEV;
5707 }
5708 }
5709 }
5710
5711 /* notify the frameworks and send errored results */
5712 rc = notifyErrorForPendingRequests();
5713 if (rc < 0) {
5714 LOGE("notifyErrorForPendingRequests failed");
5715 pthread_mutex_unlock(&mMutex);
5716 return rc;
5717 }
5718
5719 //unblock process_capture_request
5720 mPendingLiveRequest = 0;
5721 unblockRequestIfNecessary();
5722
5723 mFlushPerf = false;
5724 pthread_mutex_unlock(&mMutex);
5725 LOGD ("Flush Operation complete. rc = %d", rc);
5726 return rc;
5727}
5728
5729/*===========================================================================
5730 * FUNCTION : handleCameraDeviceError
5731 *
5732 * DESCRIPTION: This function calls internal flush and notifies the error to
5733 * framework and updates the state variable.
5734 *
5735 * PARAMETERS : None
5736 *
5737 * RETURN : NO_ERROR on Success
5738 * Error code on failure
5739 *==========================================================================*/
5740int32_t QCamera3HardwareInterface::handleCameraDeviceError()
5741{
5742 int32_t rc = NO_ERROR;
5743
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005744 {
5745 Mutex::Autolock lock(mFlushLock);
5746 pthread_mutex_lock(&mMutex);
5747 if (mState != ERROR) {
5748 //if mState != ERROR, nothing to be done
5749 pthread_mutex_unlock(&mMutex);
5750 return NO_ERROR;
5751 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005752 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005753
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005754 rc = flush(false /* restart channels */);
5755 if (NO_ERROR != rc) {
5756 LOGE("internal flush to handle mState = ERROR failed");
5757 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005758
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005759 pthread_mutex_lock(&mMutex);
5760 mState = DEINIT;
5761 pthread_mutex_unlock(&mMutex);
5762 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005763
5764 camera3_notify_msg_t notify_msg;
5765 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
5766 notify_msg.type = CAMERA3_MSG_ERROR;
5767 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
5768 notify_msg.message.error.error_stream = NULL;
5769 notify_msg.message.error.frame_number = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005770 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07005771
5772 return rc;
5773}
5774
5775/*===========================================================================
5776 * FUNCTION : captureResultCb
5777 *
5778 * DESCRIPTION: Callback handler for all capture result
5779 * (streams, as well as metadata)
5780 *
5781 * PARAMETERS :
5782 * @metadata : metadata information
5783 * @buffer : actual gralloc buffer to be returned to frameworks.
5784 * NULL if metadata.
5785 *
5786 * RETURN : NONE
5787 *==========================================================================*/
5788void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
5789 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
5790{
5791 if (metadata_buf) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005792 pthread_mutex_lock(&mMutex);
5793 uint8_t batchSize = mBatchSize;
5794 pthread_mutex_unlock(&mMutex);
5795 if (batchSize) {
Thierry Strudel3d639192016-09-09 11:52:26 -07005796 handleBatchMetadata(metadata_buf,
5797 true /* free_and_bufdone_meta_buf */);
5798 } else { /* mBatchSize = 0 */
5799 hdrPlusPerfLock(metadata_buf);
5800 pthread_mutex_lock(&mMutex);
5801 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005802 true /* free_and_bufdone_meta_buf */,
5803 false /* first frame of batch metadata */ );
Thierry Strudel3d639192016-09-09 11:52:26 -07005804 pthread_mutex_unlock(&mMutex);
5805 }
5806 } else if (isInputBuffer) {
5807 pthread_mutex_lock(&mMutex);
5808 handleInputBufferWithLock(frame_number);
5809 pthread_mutex_unlock(&mMutex);
5810 } else {
5811 pthread_mutex_lock(&mMutex);
5812 handleBufferWithLock(buffer, frame_number);
5813 pthread_mutex_unlock(&mMutex);
5814 }
5815 return;
5816}
5817
5818/*===========================================================================
5819 * FUNCTION : getReprocessibleOutputStreamId
5820 *
5821 * DESCRIPTION: Get source output stream id for the input reprocess stream
5822 * based on size and format, which would be the largest
5823 * output stream if an input stream exists.
5824 *
5825 * PARAMETERS :
5826 * @id : return the stream id if found
5827 *
5828 * RETURN : int32_t type of status
5829 * NO_ERROR -- success
5830 * none-zero failure code
5831 *==========================================================================*/
5832int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
5833{
5834 /* check if any output or bidirectional stream with the same size and format
5835 and return that stream */
5836 if ((mInputStreamInfo.dim.width > 0) &&
5837 (mInputStreamInfo.dim.height > 0)) {
5838 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5839 it != mStreamInfo.end(); it++) {
5840
5841 camera3_stream_t *stream = (*it)->stream;
5842 if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
5843 (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
5844 (stream->format == mInputStreamInfo.format)) {
5845 // Usage flag for an input stream and the source output stream
5846 // may be different.
5847 LOGD("Found reprocessible output stream! %p", *it);
5848 LOGD("input stream usage 0x%x, current stream usage 0x%x",
5849 stream->usage, mInputStreamInfo.usage);
5850
5851 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
5852 if (channel != NULL && channel->mStreams[0]) {
5853 id = channel->mStreams[0]->getMyServerID();
5854 return NO_ERROR;
5855 }
5856 }
5857 }
5858 } else {
5859 LOGD("No input stream, so no reprocessible output stream");
5860 }
5861 return NAME_NOT_FOUND;
5862}
5863
5864/*===========================================================================
5865 * FUNCTION : lookupFwkName
5866 *
5867 * DESCRIPTION: In case the enum is not same in fwk and backend
5868 * make sure the parameter is correctly propogated
5869 *
5870 * PARAMETERS :
5871 * @arr : map between the two enums
5872 * @len : len of the map
5873 * @hal_name : name of the hal_parm to map
5874 *
5875 * RETURN : int type of status
5876 * fwk_name -- success
5877 * none-zero failure code
5878 *==========================================================================*/
5879template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
5880 size_t len, halType hal_name)
5881{
5882
5883 for (size_t i = 0; i < len; i++) {
5884 if (arr[i].hal_name == hal_name) {
5885 return arr[i].fwk_name;
5886 }
5887 }
5888
5889 /* Not able to find matching framework type is not necessarily
5890 * an error case. This happens when mm-camera supports more attributes
5891 * than the frameworks do */
5892 LOGH("Cannot find matching framework type");
5893 return NAME_NOT_FOUND;
5894}
5895
5896/*===========================================================================
5897 * FUNCTION : lookupHalName
5898 *
5899 * DESCRIPTION: In case the enum is not same in fwk and backend
5900 * make sure the parameter is correctly propogated
5901 *
5902 * PARAMETERS :
5903 * @arr : map between the two enums
5904 * @len : len of the map
5905 * @fwk_name : name of the hal_parm to map
5906 *
5907 * RETURN : int32_t type of status
5908 * hal_name -- success
5909 * none-zero failure code
5910 *==========================================================================*/
5911template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
5912 size_t len, fwkType fwk_name)
5913{
5914 for (size_t i = 0; i < len; i++) {
5915 if (arr[i].fwk_name == fwk_name) {
5916 return arr[i].hal_name;
5917 }
5918 }
5919
5920 LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
5921 return NAME_NOT_FOUND;
5922}
5923
5924/*===========================================================================
5925 * FUNCTION : lookupProp
5926 *
5927 * DESCRIPTION: lookup a value by its name
5928 *
5929 * PARAMETERS :
5930 * @arr : map between the two enums
5931 * @len : size of the map
5932 * @name : name to be looked up
5933 *
5934 * RETURN : Value if found
5935 * CAM_CDS_MODE_MAX if not found
5936 *==========================================================================*/
5937template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
5938 size_t len, const char *name)
5939{
5940 if (name) {
5941 for (size_t i = 0; i < len; i++) {
5942 if (!strcmp(arr[i].desc, name)) {
5943 return arr[i].val;
5944 }
5945 }
5946 }
5947 return CAM_CDS_MODE_MAX;
5948}
5949
5950/*===========================================================================
5951 *
5952 * DESCRIPTION:
5953 *
5954 * PARAMETERS :
5955 * @metadata : metadata information from callback
5956 * @timestamp: metadata buffer timestamp
5957 * @request_id: request id
5958 * @jpegMetadata: additional jpeg metadata
Samuel Ha68ba5172016-12-15 18:41:12 -08005959 * @DevCamDebug_meta_enable: enable DevCamDebug meta
5960 * // DevCamDebug metadata end
Thierry Strudel3d639192016-09-09 11:52:26 -07005961 * @pprocDone: whether internal offline postprocsesing is done
5962 *
5963 * RETURN : camera_metadata_t*
5964 * metadata in a format specified by fwk
5965 *==========================================================================*/
5966camera_metadata_t*
5967QCamera3HardwareInterface::translateFromHalMetadata(
5968 metadata_buffer_t *metadata,
5969 nsecs_t timestamp,
5970 int32_t request_id,
5971 const CameraMetadata& jpegMetadata,
5972 uint8_t pipeline_depth,
5973 uint8_t capture_intent,
Samuel Ha68ba5172016-12-15 18:41:12 -08005974 /* DevCamDebug metadata translateFromHalMetadata argument */
5975 uint8_t DevCamDebug_meta_enable,
5976 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07005977 bool pprocDone,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005978 uint8_t fwk_cacMode,
5979 bool firstMetadataInBatch)
Thierry Strudel3d639192016-09-09 11:52:26 -07005980{
5981 CameraMetadata camMetadata;
5982 camera_metadata_t *resultMetadata;
5983
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005984 if (mBatchSize && !firstMetadataInBatch) {
5985 /* In batch mode, use cached metadata from the first metadata
5986 in the batch */
5987 camMetadata.clear();
5988 camMetadata = mCachedMetadata;
5989 }
5990
Thierry Strudel3d639192016-09-09 11:52:26 -07005991 if (jpegMetadata.entryCount())
5992 camMetadata.append(jpegMetadata);
5993
5994 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
5995 camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
5996 camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
5997 camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &capture_intent, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08005998 if (mBatchSize == 0) {
5999 // DevCamDebug metadata translateFromHalMetadata. Only update this one for non-HFR mode
6000 camMetadata.update(DEVCAMDEBUG_META_ENABLE, &DevCamDebug_meta_enable, 1);
6001 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006002
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006003 if (mBatchSize && !firstMetadataInBatch) {
6004 /* In batch mode, use cached metadata instead of parsing metadata buffer again */
6005 resultMetadata = camMetadata.release();
6006 return resultMetadata;
6007 }
6008
Samuel Ha68ba5172016-12-15 18:41:12 -08006009 // atrace_begin(ATRACE_TAG_ALWAYS, "DevCamDebugInfo");
6010 // Only update DevCameraDebug metadta conditionally: non-HFR mode and it is enabled.
6011 if (mBatchSize == 0 && DevCamDebug_meta_enable != 0) {
6012 // DevCamDebug metadata translateFromHalMetadata AF
6013 IF_META_AVAILABLE(int32_t, DevCamDebug_af_lens_position,
6014 CAM_INTF_META_DEV_CAM_AF_LENS_POSITION, metadata) {
6015 int32_t fwk_DevCamDebug_af_lens_position = *DevCamDebug_af_lens_position;
6016 camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
6017 }
6018 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_confidence,
6019 CAM_INTF_META_DEV_CAM_AF_TOF_CONFIDENCE, metadata) {
6020 int32_t fwk_DevCamDebug_af_tof_confidence = *DevCamDebug_af_tof_confidence;
6021 camMetadata.update(DEVCAMDEBUG_AF_TOF_CONFIDENCE, &fwk_DevCamDebug_af_tof_confidence, 1);
6022 }
6023 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_distance,
6024 CAM_INTF_META_DEV_CAM_AF_TOF_DISTANCE, metadata) {
6025 int32_t fwk_DevCamDebug_af_tof_distance = *DevCamDebug_af_tof_distance;
6026 camMetadata.update(DEVCAMDEBUG_AF_TOF_DISTANCE, &fwk_DevCamDebug_af_tof_distance, 1);
6027 }
6028 IF_META_AVAILABLE(int32_t, DevCamDebug_af_luma,
6029 CAM_INTF_META_DEV_CAM_AF_LUMA, metadata) {
6030 int32_t fwk_DevCamDebug_af_luma = *DevCamDebug_af_luma;
6031 camMetadata.update(DEVCAMDEBUG_AF_LUMA, &fwk_DevCamDebug_af_luma, 1);
6032 }
6033 IF_META_AVAILABLE(int32_t, DevCamDebug_af_haf_state,
6034 CAM_INTF_META_DEV_CAM_AF_HAF_STATE, metadata) {
6035 int32_t fwk_DevCamDebug_af_haf_state = *DevCamDebug_af_haf_state;
6036 camMetadata.update(DEVCAMDEBUG_AF_HAF_STATE, &fwk_DevCamDebug_af_haf_state, 1);
6037 }
6038 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_target_pos,
6039 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS, metadata) {
6040 int32_t fwk_DevCamDebug_af_monitor_pdaf_target_pos =
6041 *DevCamDebug_af_monitor_pdaf_target_pos;
6042 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
6043 &fwk_DevCamDebug_af_monitor_pdaf_target_pos, 1);
6044 }
6045 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_confidence,
6046 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_CONFIDENCE, metadata) {
6047 int32_t fwk_DevCamDebug_af_monitor_pdaf_confidence =
6048 *DevCamDebug_af_monitor_pdaf_confidence;
6049 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
6050 &fwk_DevCamDebug_af_monitor_pdaf_confidence, 1);
6051 }
6052 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_refocus,
6053 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_REFOCUS, metadata) {
6054 int32_t fwk_DevCamDebug_af_monitor_pdaf_refocus = *DevCamDebug_af_monitor_pdaf_refocus;
6055 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
6056 &fwk_DevCamDebug_af_monitor_pdaf_refocus, 1);
6057 }
6058 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_target_pos,
6059 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_TARGET_POS, metadata) {
6060 int32_t fwk_DevCamDebug_af_monitor_tof_target_pos =
6061 *DevCamDebug_af_monitor_tof_target_pos;
6062 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
6063 &fwk_DevCamDebug_af_monitor_tof_target_pos, 1);
6064 }
6065 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_confidence,
6066 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_CONFIDENCE, metadata) {
6067 int32_t fwk_DevCamDebug_af_monitor_tof_confidence =
6068 *DevCamDebug_af_monitor_tof_confidence;
6069 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
6070 &fwk_DevCamDebug_af_monitor_tof_confidence, 1);
6071 }
6072 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_refocus,
6073 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_REFOCUS, metadata) {
6074 int32_t fwk_DevCamDebug_af_monitor_tof_refocus = *DevCamDebug_af_monitor_tof_refocus;
6075 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
6076 &fwk_DevCamDebug_af_monitor_tof_refocus, 1);
6077 }
6078 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_type_select,
6079 CAM_INTF_META_DEV_CAM_AF_MONITOR_TYPE_SELECT, metadata) {
6080 int32_t fwk_DevCamDebug_af_monitor_type_select = *DevCamDebug_af_monitor_type_select;
6081 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
6082 &fwk_DevCamDebug_af_monitor_type_select, 1);
6083 }
6084 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_refocus,
6085 CAM_INTF_META_DEV_CAM_AF_MONITOR_REFOCUS, metadata) {
6086 int32_t fwk_DevCamDebug_af_monitor_refocus = *DevCamDebug_af_monitor_refocus;
6087 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_REFOCUS,
6088 &fwk_DevCamDebug_af_monitor_refocus, 1);
6089 }
6090 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_target_pos,
6091 CAM_INTF_META_DEV_CAM_AF_MONITOR_TARGET_POS, metadata) {
6092 int32_t fwk_DevCamDebug_af_monitor_target_pos = *DevCamDebug_af_monitor_target_pos;
6093 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
6094 &fwk_DevCamDebug_af_monitor_target_pos, 1);
6095 }
6096 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_target_pos,
6097 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_TARGET_POS, metadata) {
6098 int32_t fwk_DevCamDebug_af_search_pdaf_target_pos =
6099 *DevCamDebug_af_search_pdaf_target_pos;
6100 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
6101 &fwk_DevCamDebug_af_search_pdaf_target_pos, 1);
6102 }
6103 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_next_pos,
6104 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEXT_POS, metadata) {
6105 int32_t fwk_DevCamDebug_af_search_pdaf_next_pos = *DevCamDebug_af_search_pdaf_next_pos;
6106 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
6107 &fwk_DevCamDebug_af_search_pdaf_next_pos, 1);
6108 }
6109 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_near_pos,
6110 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEAR_POS, metadata) {
6111 int32_t fwk_DevCamDebug_af_search_pdaf_near_pos = *DevCamDebug_af_search_pdaf_near_pos;
6112 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
6113 &fwk_DevCamDebug_af_search_pdaf_near_pos, 1);
6114 }
6115 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_far_pos,
6116 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_FAR_POS, metadata) {
6117 int32_t fwk_DevCamDebug_af_search_pdaf_far_pos = *DevCamDebug_af_search_pdaf_far_pos;
6118 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
6119 &fwk_DevCamDebug_af_search_pdaf_far_pos, 1);
6120 }
6121 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_confidence,
6122 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_CONFIDENCE, metadata) {
6123 int32_t fwk_DevCamDebug_af_search_pdaf_confidence = *DevCamDebug_af_search_pdaf_confidence;
6124 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
6125 &fwk_DevCamDebug_af_search_pdaf_confidence, 1);
6126 }
6127 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_target_pos,
6128 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_TARGET_POS, metadata) {
6129 int32_t fwk_DevCamDebug_af_search_tof_target_pos =
6130 *DevCamDebug_af_search_tof_target_pos;
6131 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
6132 &fwk_DevCamDebug_af_search_tof_target_pos, 1);
6133 }
6134 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_next_pos,
6135 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEXT_POS, metadata) {
6136 int32_t fwk_DevCamDebug_af_search_tof_next_pos = *DevCamDebug_af_search_tof_next_pos;
6137 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
6138 &fwk_DevCamDebug_af_search_tof_next_pos, 1);
6139 }
6140 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_near_pos,
6141 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEAR_POS, metadata) {
6142 int32_t fwk_DevCamDebug_af_search_tof_near_pos = *DevCamDebug_af_search_tof_near_pos;
6143 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
6144 &fwk_DevCamDebug_af_search_tof_near_pos, 1);
6145 }
6146 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_far_pos,
6147 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_FAR_POS, metadata) {
6148 int32_t fwk_DevCamDebug_af_search_tof_far_pos = *DevCamDebug_af_search_tof_far_pos;
6149 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
6150 &fwk_DevCamDebug_af_search_tof_far_pos, 1);
6151 }
6152 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_confidence,
6153 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_CONFIDENCE, metadata) {
6154 int32_t fwk_DevCamDebug_af_search_tof_confidence = *DevCamDebug_af_search_tof_confidence;
6155 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
6156 &fwk_DevCamDebug_af_search_tof_confidence, 1);
6157 }
6158 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_type_select,
6159 CAM_INTF_META_DEV_CAM_AF_SEARCH_TYPE_SELECT, metadata) {
6160 int32_t fwk_DevCamDebug_af_search_type_select = *DevCamDebug_af_search_type_select;
6161 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
6162 &fwk_DevCamDebug_af_search_type_select, 1);
6163 }
6164 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_next_pos,
6165 CAM_INTF_META_DEV_CAM_AF_SEARCH_NEXT_POS, metadata) {
6166 int32_t fwk_DevCamDebug_af_search_next_pos = *DevCamDebug_af_search_next_pos;
6167 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
6168 &fwk_DevCamDebug_af_search_next_pos, 1);
6169 }
6170 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_target_pos,
6171 CAM_INTF_META_DEV_CAM_AF_SEARCH_TARGET_POS, metadata) {
6172 int32_t fwk_DevCamDebug_af_search_target_pos = *DevCamDebug_af_search_target_pos;
6173 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
6174 &fwk_DevCamDebug_af_search_target_pos, 1);
6175 }
6176 // DevCamDebug metadata translateFromHalMetadata AEC
6177 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_target_luma,
6178 CAM_INTF_META_DEV_CAM_AEC_TARGET_LUMA, metadata) {
6179 int32_t fwk_DevCamDebug_aec_target_luma = *DevCamDebug_aec_target_luma;
6180 camMetadata.update(DEVCAMDEBUG_AEC_TARGET_LUMA, &fwk_DevCamDebug_aec_target_luma, 1);
6181 }
6182 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_comp_luma,
6183 CAM_INTF_META_DEV_CAM_AEC_COMP_LUMA, metadata) {
6184 int32_t fwk_DevCamDebug_aec_comp_luma = *DevCamDebug_aec_comp_luma;
6185 camMetadata.update(DEVCAMDEBUG_AEC_COMP_LUMA, &fwk_DevCamDebug_aec_comp_luma, 1);
6186 }
6187 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_avg_luma,
6188 CAM_INTF_META_DEV_CAM_AEC_AVG_LUMA, metadata) {
6189 int32_t fwk_DevCamDebug_aec_avg_luma = *DevCamDebug_aec_avg_luma;
6190 camMetadata.update(DEVCAMDEBUG_AEC_AVG_LUMA, &fwk_DevCamDebug_aec_avg_luma, 1);
6191 }
6192 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_cur_luma,
6193 CAM_INTF_META_DEV_CAM_AEC_CUR_LUMA, metadata) {
6194 int32_t fwk_DevCamDebug_aec_cur_luma = *DevCamDebug_aec_cur_luma;
6195 camMetadata.update(DEVCAMDEBUG_AEC_CUR_LUMA, &fwk_DevCamDebug_aec_cur_luma, 1);
6196 }
6197 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_linecount,
6198 CAM_INTF_META_DEV_CAM_AEC_LINECOUNT, metadata) {
6199 int32_t fwk_DevCamDebug_aec_linecount = *DevCamDebug_aec_linecount;
6200 camMetadata.update(DEVCAMDEBUG_AEC_LINECOUNT, &fwk_DevCamDebug_aec_linecount, 1);
6201 }
6202 IF_META_AVAILABLE(float, DevCamDebug_aec_real_gain,
6203 CAM_INTF_META_DEV_CAM_AEC_REAL_GAIN, metadata) {
6204 float fwk_DevCamDebug_aec_real_gain = *DevCamDebug_aec_real_gain;
6205 camMetadata.update(DEVCAMDEBUG_AEC_REAL_GAIN, &fwk_DevCamDebug_aec_real_gain, 1);
6206 }
6207 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_exp_index,
6208 CAM_INTF_META_DEV_CAM_AEC_EXP_INDEX, metadata) {
6209 int32_t fwk_DevCamDebug_aec_exp_index = *DevCamDebug_aec_exp_index;
6210 camMetadata.update(DEVCAMDEBUG_AEC_EXP_INDEX, &fwk_DevCamDebug_aec_exp_index, 1);
6211 }
6212 IF_META_AVAILABLE(float, DevCamDebug_aec_lux_idx,
6213 CAM_INTF_META_DEV_CAM_AEC_LUX_IDX, metadata) {
6214 float fwk_DevCamDebug_aec_lux_idx = *DevCamDebug_aec_lux_idx;
6215 camMetadata.update(DEVCAMDEBUG_AEC_LUX_IDX, &fwk_DevCamDebug_aec_lux_idx, 1);
6216 }
6217 // DevCamDebug metadata translateFromHalMetadata AWB
6218 IF_META_AVAILABLE(float, DevCamDebug_awb_r_gain,
6219 CAM_INTF_META_DEV_CAM_AWB_R_GAIN, metadata) {
6220 float fwk_DevCamDebug_awb_r_gain = *DevCamDebug_awb_r_gain;
6221 camMetadata.update(DEVCAMDEBUG_AWB_R_GAIN, &fwk_DevCamDebug_awb_r_gain, 1);
6222 }
6223 IF_META_AVAILABLE(float, DevCamDebug_awb_g_gain,
6224 CAM_INTF_META_DEV_CAM_AWB_G_GAIN, metadata) {
6225 float fwk_DevCamDebug_awb_g_gain = *DevCamDebug_awb_g_gain;
6226 camMetadata.update(DEVCAMDEBUG_AWB_G_GAIN, &fwk_DevCamDebug_awb_g_gain, 1);
6227 }
6228 IF_META_AVAILABLE(float, DevCamDebug_awb_b_gain,
6229 CAM_INTF_META_DEV_CAM_AWB_B_GAIN, metadata) {
6230 float fwk_DevCamDebug_awb_b_gain = *DevCamDebug_awb_b_gain;
6231 camMetadata.update(DEVCAMDEBUG_AWB_B_GAIN, &fwk_DevCamDebug_awb_b_gain, 1);
6232 }
6233 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_cct,
6234 CAM_INTF_META_DEV_CAM_AWB_CCT, metadata) {
6235 int32_t fwk_DevCamDebug_awb_cct = *DevCamDebug_awb_cct;
6236 camMetadata.update(DEVCAMDEBUG_AWB_CCT, &fwk_DevCamDebug_awb_cct, 1);
6237 }
6238 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_decision,
6239 CAM_INTF_META_DEV_CAM_AWB_DECISION, metadata) {
6240 int32_t fwk_DevCamDebug_awb_decision = *DevCamDebug_awb_decision;
6241 camMetadata.update(DEVCAMDEBUG_AWB_DECISION, &fwk_DevCamDebug_awb_decision, 1);
6242 }
6243 }
6244 // atrace_end(ATRACE_TAG_ALWAYS);
6245
Thierry Strudel3d639192016-09-09 11:52:26 -07006246 IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
6247 int64_t fwk_frame_number = *frame_number;
6248 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
6249 }
6250
6251 IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
6252 int32_t fps_range[2];
6253 fps_range[0] = (int32_t)float_range->min_fps;
6254 fps_range[1] = (int32_t)float_range->max_fps;
6255 camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
6256 fps_range, 2);
6257 LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
6258 fps_range[0], fps_range[1]);
6259 }
6260
6261 IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
6262 camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
6263 }
6264
6265 IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
6266 int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
6267 METADATA_MAP_SIZE(SCENE_MODES_MAP),
6268 *sceneMode);
6269 if (NAME_NOT_FOUND != val) {
6270 uint8_t fwkSceneMode = (uint8_t)val;
6271 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
6272 LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
6273 fwkSceneMode);
6274 }
6275 }
6276
6277 IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
6278 uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
6279 camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
6280 }
6281
6282 IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
6283 uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
6284 camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
6285 }
6286
6287 IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
6288 uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
6289 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
6290 }
6291
6292 IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
6293 CAM_INTF_META_EDGE_MODE, metadata) {
6294 camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
6295 }
6296
6297 IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
6298 uint8_t fwk_flashPower = (uint8_t) *flashPower;
6299 camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
6300 }
6301
6302 IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
6303 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
6304 }
6305
6306 IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
6307 if (0 <= *flashState) {
6308 uint8_t fwk_flashState = (uint8_t) *flashState;
6309 if (!gCamCapability[mCameraId]->flash_available) {
6310 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
6311 }
6312 camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
6313 }
6314 }
6315
6316 IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
6317 int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
6318 if (NAME_NOT_FOUND != val) {
6319 uint8_t fwk_flashMode = (uint8_t)val;
6320 camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
6321 }
6322 }
6323
6324 IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
6325 uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
6326 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
6327 }
6328
6329 IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
6330 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
6331 }
6332
6333 IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
6334 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
6335 }
6336
6337 IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
6338 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
6339 }
6340
6341 IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
6342 uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
6343 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
6344 }
6345
6346 IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
6347 uint8_t fwk_videoStab = (uint8_t) *videoStab;
6348 LOGD("fwk_videoStab = %d", fwk_videoStab);
6349 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
6350 } else {
6351 // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
6352 // and so hardcoding the Video Stab result to OFF mode.
6353 uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
6354 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006355 LOGD("EIS result default to OFF mode");
Thierry Strudel3d639192016-09-09 11:52:26 -07006356 }
6357
6358 IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
6359 uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
6360 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
6361 }
6362
6363 IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
6364 camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
6365 }
6366
Thierry Strudel3d639192016-09-09 11:52:26 -07006367 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
6368 CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006369 float fwk_blackLevelInd[BLACK_LEVEL_PATTERN_CNT];
Thierry Strudel3d639192016-09-09 11:52:26 -07006370
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006371 adjustBlackLevelForCFA(blackLevelAppliedPattern->cam_black_level, fwk_blackLevelInd,
6372 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07006373
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006374 LOGD("applied dynamicblackLevel in RGGB order = %f %f %f %f",
Thierry Strudel3d639192016-09-09 11:52:26 -07006375 blackLevelAppliedPattern->cam_black_level[0],
6376 blackLevelAppliedPattern->cam_black_level[1],
6377 blackLevelAppliedPattern->cam_black_level[2],
6378 blackLevelAppliedPattern->cam_black_level[3]);
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006379 camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd,
6380 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006381
6382#ifndef USE_HAL_3_3
6383 // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
Zhijun Heb753c672016-06-15 14:50:48 -07006384 // Need convert the internal 12 bit depth to sensor 10 bit sensor raw
6385 // depth space.
6386 fwk_blackLevelInd[0] /= 4.0;
6387 fwk_blackLevelInd[1] /= 4.0;
6388 fwk_blackLevelInd[2] /= 4.0;
6389 fwk_blackLevelInd[3] /= 4.0;
Shuzhen Wanga5da1022016-07-13 20:18:42 -07006390 camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd,
6391 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006392#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07006393 }
6394
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006395#ifndef USE_HAL_3_3
6396 // Fixed whitelevel is used by ISP/Sensor
6397 camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
6398 &gCamCapability[mCameraId]->white_level, 1);
6399#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07006400
6401 IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
6402 CAM_INTF_META_SCALER_CROP_REGION, metadata) {
6403 int32_t scalerCropRegion[4];
6404 scalerCropRegion[0] = hScalerCropRegion->left;
6405 scalerCropRegion[1] = hScalerCropRegion->top;
6406 scalerCropRegion[2] = hScalerCropRegion->width;
6407 scalerCropRegion[3] = hScalerCropRegion->height;
6408
6409 // Adjust crop region from sensor output coordinate system to active
6410 // array coordinate system.
6411 mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
6412 scalerCropRegion[2], scalerCropRegion[3]);
6413
6414 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
6415 }
6416
6417 IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
6418 LOGD("sensorExpTime = %lld", *sensorExpTime);
6419 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
6420 }
6421
6422 IF_META_AVAILABLE(int64_t, sensorFameDuration,
6423 CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
6424 LOGD("sensorFameDuration = %lld", *sensorFameDuration);
6425 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
6426 }
6427
6428 IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
6429 CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
6430 LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
6431 camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
6432 sensorRollingShutterSkew, 1);
6433 }
6434
6435 IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
6436 LOGD("sensorSensitivity = %d", *sensorSensitivity);
6437 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
6438
6439 //calculate the noise profile based on sensitivity
6440 double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
6441 double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
6442 double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
6443 for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
6444 noise_profile[i] = noise_profile_S;
6445 noise_profile[i+1] = noise_profile_O;
6446 }
6447 LOGD("noise model entry (S, O) is (%f, %f)",
6448 noise_profile_S, noise_profile_O);
6449 camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
6450 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
6451 }
6452
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006453#ifndef USE_HAL_3_3
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07006454 int32_t fwk_ispSensitivity = 100;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006455 IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07006456 fwk_ispSensitivity = (int32_t) *ispSensitivity;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006457 }
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07006458 IF_META_AVAILABLE(float, postStatsSensitivity, CAM_INTF_META_ISP_POST_STATS_SENSITIVITY, metadata) {
6459 fwk_ispSensitivity = (int32_t) (*postStatsSensitivity * fwk_ispSensitivity);
6460 }
6461 camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006462#endif
6463
Thierry Strudel3d639192016-09-09 11:52:26 -07006464 IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
6465 uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
6466 camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
6467 }
6468
6469 IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
6470 int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
6471 *faceDetectMode);
6472 if (NAME_NOT_FOUND != val) {
6473 uint8_t fwk_faceDetectMode = (uint8_t)val;
6474 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
6475
6476 if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
6477 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
6478 CAM_INTF_META_FACE_DETECTION, metadata) {
6479 uint8_t numFaces = MIN(
6480 faceDetectionInfo->num_faces_detected, MAX_ROI);
6481 int32_t faceIds[MAX_ROI];
6482 uint8_t faceScores[MAX_ROI];
6483 int32_t faceRectangles[MAX_ROI * 4];
6484 int32_t faceLandmarks[MAX_ROI * 6];
6485 size_t j = 0, k = 0;
6486
6487 for (size_t i = 0; i < numFaces; i++) {
6488 faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
6489 // Adjust crop region from sensor output coordinate system to active
6490 // array coordinate system.
6491 cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
6492 mCropRegionMapper.toActiveArray(rect.left, rect.top,
6493 rect.width, rect.height);
6494
6495 convertToRegions(faceDetectionInfo->faces[i].face_boundary,
6496 faceRectangles+j, -1);
6497
6498 j+= 4;
6499 }
6500 if (numFaces <= 0) {
6501 memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
6502 memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
6503 memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
6504 memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
6505 }
6506
6507 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
6508 numFaces);
6509 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
6510 faceRectangles, numFaces * 4U);
6511 if (fwk_faceDetectMode ==
6512 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
6513 IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
6514 CAM_INTF_META_FACE_LANDMARK, metadata) {
6515
6516 for (size_t i = 0; i < numFaces; i++) {
6517 // Map the co-ordinate sensor output coordinate system to active
6518 // array coordinate system.
6519 mCropRegionMapper.toActiveArray(
6520 landmarks->face_landmarks[i].left_eye_center.x,
6521 landmarks->face_landmarks[i].left_eye_center.y);
6522 mCropRegionMapper.toActiveArray(
6523 landmarks->face_landmarks[i].right_eye_center.x,
6524 landmarks->face_landmarks[i].right_eye_center.y);
6525 mCropRegionMapper.toActiveArray(
6526 landmarks->face_landmarks[i].mouth_center.x,
6527 landmarks->face_landmarks[i].mouth_center.y);
6528
6529 convertLandmarks(landmarks->face_landmarks[i], faceLandmarks+k);
Thierry Strudel04e026f2016-10-10 11:27:36 -07006530 k+= TOTAL_LANDMARK_INDICES;
6531 }
6532 } else {
6533 for (size_t i = 0; i < numFaces; i++) {
6534 setInvalidLandmarks(faceLandmarks+k);
6535 k+= TOTAL_LANDMARK_INDICES;
Thierry Strudel3d639192016-09-09 11:52:26 -07006536 }
6537 }
6538
6539 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
6540 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
6541 faceLandmarks, numFaces * 6U);
6542 }
6543 }
6544 }
6545 }
6546 }
6547
6548 IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
6549 uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
6550 camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &fwk_histogramMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006551
6552 if (fwk_histogramMode == ANDROID_STATISTICS_HISTOGRAM_MODE_ON) {
6553 IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, metadata) {
6554 // process histogram statistics info
6555 uint32_t hist_buf[3][CAM_HISTOGRAM_STATS_SIZE];
6556 uint32_t hist_size = sizeof(cam_histogram_data_t::hist_buf);
6557 cam_histogram_data_t rHistData, gHistData, bHistData;
6558 memset(&rHistData, 0, sizeof(rHistData));
6559 memset(&gHistData, 0, sizeof(gHistData));
6560 memset(&bHistData, 0, sizeof(bHistData));
6561
6562 switch (stats_data->type) {
6563 case CAM_HISTOGRAM_TYPE_BAYER:
6564 switch (stats_data->bayer_stats.data_type) {
6565 case CAM_STATS_CHANNEL_GR:
6566 rHistData = gHistData = bHistData = stats_data->bayer_stats.gr_stats;
6567 break;
6568 case CAM_STATS_CHANNEL_GB:
6569 rHistData = gHistData = bHistData = stats_data->bayer_stats.gb_stats;
6570 break;
6571 case CAM_STATS_CHANNEL_B:
6572 rHistData = gHistData = bHistData = stats_data->bayer_stats.b_stats;
6573 break;
6574 case CAM_STATS_CHANNEL_ALL:
6575 rHistData = stats_data->bayer_stats.r_stats;
6576 //Framework expects only 3 channels. So, for now,
6577 //use gb stats for G channel.
6578 gHistData = stats_data->bayer_stats.gb_stats;
6579 bHistData = stats_data->bayer_stats.b_stats;
6580 break;
6581 case CAM_STATS_CHANNEL_Y:
6582 case CAM_STATS_CHANNEL_R:
6583 default:
6584 rHistData = gHistData = bHistData = stats_data->bayer_stats.r_stats;
6585 break;
6586 }
6587 break;
6588 case CAM_HISTOGRAM_TYPE_YUV:
6589 rHistData = gHistData = bHistData = stats_data->yuv_stats;
6590 break;
6591 }
6592
6593 memcpy(hist_buf, rHistData.hist_buf, hist_size);
6594 memcpy(hist_buf[1], gHistData.hist_buf, hist_size);
6595 memcpy(hist_buf[2], bHistData.hist_buf, hist_size);
6596
6597 camMetadata.update(ANDROID_STATISTICS_HISTOGRAM, (int32_t*)hist_buf, hist_size*3);
6598 }
6599 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006600 }
6601
6602 IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
6603 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
6604 uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
6605 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
6606 }
6607
6608 IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
6609 CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
6610 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
6611 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
6612 }
6613
6614 IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
6615 CAM_INTF_META_LENS_SHADING_MAP, metadata) {
6616 size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
6617 CAM_MAX_SHADING_MAP_HEIGHT);
6618 size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
6619 CAM_MAX_SHADING_MAP_WIDTH);
6620 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
6621 lensShadingMap->lens_shading, 4U * map_width * map_height);
6622 }
6623
6624 IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
6625 uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
6626 camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
6627 }
6628
6629 IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
6630 //Populate CAM_INTF_META_TONEMAP_CURVES
6631 /* ch0 = G, ch 1 = B, ch 2 = R*/
6632 if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
6633 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
6634 tonemap->tonemap_points_cnt,
6635 CAM_MAX_TONEMAP_CURVE_SIZE);
6636 tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
6637 }
6638
6639 camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
6640 &tonemap->curves[0].tonemap_points[0][0],
6641 tonemap->tonemap_points_cnt * 2);
6642
6643 camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
6644 &tonemap->curves[1].tonemap_points[0][0],
6645 tonemap->tonemap_points_cnt * 2);
6646
6647 camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
6648 &tonemap->curves[2].tonemap_points[0][0],
6649 tonemap->tonemap_points_cnt * 2);
6650 }
6651
6652 IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
6653 CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
6654 camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
6655 CC_GAIN_MAX);
6656 }
6657
6658 IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
6659 CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
6660 camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
6661 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
6662 CC_MATRIX_COLS * CC_MATRIX_ROWS);
6663 }
6664
6665 IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
6666 CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
6667 if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
6668 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
6669 toneCurve->tonemap_points_cnt,
6670 CAM_MAX_TONEMAP_CURVE_SIZE);
6671 toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
6672 }
6673 camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
6674 (float*)toneCurve->curve.tonemap_points,
6675 toneCurve->tonemap_points_cnt * 2);
6676 }
6677
6678 IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
6679 CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
6680 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
6681 predColorCorrectionGains->gains, 4);
6682 }
6683
6684 IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
6685 CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
6686 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
6687 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
6688 CC_MATRIX_ROWS * CC_MATRIX_COLS);
6689 }
6690
6691 IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
6692 camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
6693 }
6694
6695 IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
6696 uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
6697 camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
6698 }
6699
6700 IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
6701 uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
6702 camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
6703 }
6704
6705 IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
6706 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
6707 *effectMode);
6708 if (NAME_NOT_FOUND != val) {
6709 uint8_t fwk_effectMode = (uint8_t)val;
6710 camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
6711 }
6712 }
6713
6714 IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
6715 CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
6716 int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
6717 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
6718 if (NAME_NOT_FOUND != fwk_testPatternMode) {
6719 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
6720 }
6721 int32_t fwk_testPatternData[4];
6722 fwk_testPatternData[0] = testPatternData->r;
6723 fwk_testPatternData[3] = testPatternData->b;
6724 switch (gCamCapability[mCameraId]->color_arrangement) {
6725 case CAM_FILTER_ARRANGEMENT_RGGB:
6726 case CAM_FILTER_ARRANGEMENT_GRBG:
6727 fwk_testPatternData[1] = testPatternData->gr;
6728 fwk_testPatternData[2] = testPatternData->gb;
6729 break;
6730 case CAM_FILTER_ARRANGEMENT_GBRG:
6731 case CAM_FILTER_ARRANGEMENT_BGGR:
6732 fwk_testPatternData[2] = testPatternData->gr;
6733 fwk_testPatternData[1] = testPatternData->gb;
6734 break;
6735 default:
6736 LOGE("color arrangement %d is not supported",
6737 gCamCapability[mCameraId]->color_arrangement);
6738 break;
6739 }
6740 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
6741 }
6742
6743 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
6744 camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
6745 }
6746
6747 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
6748 String8 str((const char *)gps_methods);
6749 camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
6750 }
6751
6752 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
6753 camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
6754 }
6755
6756 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
6757 camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
6758 }
6759
6760 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
6761 uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
6762 camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
6763 }
6764
6765 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
6766 uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
6767 camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
6768 }
6769
6770 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
6771 int32_t fwk_thumb_size[2];
6772 fwk_thumb_size[0] = thumb_size->width;
6773 fwk_thumb_size[1] = thumb_size->height;
6774 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
6775 }
6776
6777 IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
6778 camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
6779 privateData,
6780 MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
6781 }
6782
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006783 IF_META_AVAILABLE(int32_t, meteringMode, CAM_INTF_PARM_AEC_ALGO_TYPE, metadata) {
6784 camMetadata.update(QCAMERA3_EXPOSURE_METERING_MODE,
6785 meteringMode, 1);
6786 }
6787
Thierry Strudel3d639192016-09-09 11:52:26 -07006788 if (metadata->is_tuning_params_valid) {
6789 uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
6790 uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
6791 metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
6792
6793
6794 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
6795 sizeof(uint32_t));
6796 data += sizeof(uint32_t);
6797
6798 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
6799 sizeof(uint32_t));
6800 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
6801 data += sizeof(uint32_t);
6802
6803 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
6804 sizeof(uint32_t));
6805 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
6806 data += sizeof(uint32_t);
6807
6808 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
6809 sizeof(uint32_t));
6810 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
6811 data += sizeof(uint32_t);
6812
6813 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
6814 sizeof(uint32_t));
6815 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
6816 data += sizeof(uint32_t);
6817
6818 metadata->tuning_params.tuning_mod3_data_size = 0;
6819 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
6820 sizeof(uint32_t));
6821 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
6822 data += sizeof(uint32_t);
6823
6824 size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
6825 TUNING_SENSOR_DATA_MAX);
6826 memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
6827 count);
6828 data += count;
6829
6830 count = MIN(metadata->tuning_params.tuning_vfe_data_size,
6831 TUNING_VFE_DATA_MAX);
6832 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
6833 count);
6834 data += count;
6835
6836 count = MIN(metadata->tuning_params.tuning_cpp_data_size,
6837 TUNING_CPP_DATA_MAX);
6838 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
6839 count);
6840 data += count;
6841
6842 count = MIN(metadata->tuning_params.tuning_cac_data_size,
6843 TUNING_CAC_DATA_MAX);
6844 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
6845 count);
6846 data += count;
6847
6848 camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
6849 (int32_t *)(void *)tuning_meta_data_blob,
6850 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
6851 }
6852
6853 IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
6854 CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
6855 camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
6856 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
6857 NEUTRAL_COL_POINTS);
6858 }
6859
6860 IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
6861 uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
6862 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
6863 }
6864
6865 IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
6866 int32_t aeRegions[REGIONS_TUPLE_COUNT];
6867 // Adjust crop region from sensor output coordinate system to active
6868 // array coordinate system.
6869 mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
6870 hAeRegions->rect.width, hAeRegions->rect.height);
6871
6872 convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
6873 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
6874 REGIONS_TUPLE_COUNT);
6875 LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
6876 aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
6877 hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
6878 hAeRegions->rect.height);
6879 }
6880
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07006881 IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
6882 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
6883 if (NAME_NOT_FOUND != val) {
6884 uint8_t fwkAfMode = (uint8_t)val;
6885 camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
6886 LOGD("Metadata : ANDROID_CONTROL_AF_MODE %d", val);
6887 } else {
6888 LOGH("Metadata not found : ANDROID_CONTROL_AF_MODE %d",
6889 val);
6890 }
6891 }
6892
Thierry Strudel3d639192016-09-09 11:52:26 -07006893 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
6894 uint8_t fwk_afState = (uint8_t) *afState;
6895 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07006896 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
Thierry Strudel3d639192016-09-09 11:52:26 -07006897 }
6898
6899 IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
6900 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
6901 }
6902
6903 IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
6904 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
6905 }
6906
6907 IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
6908 uint8_t fwk_lensState = *lensState;
6909 camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
6910 }
6911
6912 IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
6913 /*af regions*/
6914 int32_t afRegions[REGIONS_TUPLE_COUNT];
6915 // Adjust crop region from sensor output coordinate system to active
6916 // array coordinate system.
6917 mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
6918 hAfRegions->rect.width, hAfRegions->rect.height);
6919
6920 convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
6921 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
6922 REGIONS_TUPLE_COUNT);
6923 LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
6924 afRegions[0], afRegions[1], afRegions[2], afRegions[3],
6925 hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
6926 hAfRegions->rect.height);
6927 }
6928
6929 IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
Shuzhen Wangf6890e02016-08-12 14:28:54 -07006930 uint32_t ab_mode = *hal_ab_mode;
6931 if (ab_mode == CAM_ANTIBANDING_MODE_AUTO_60HZ ||
6932 ab_mode == CAM_ANTIBANDING_MODE_AUTO_50HZ) {
6933 ab_mode = CAM_ANTIBANDING_MODE_AUTO;
6934 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006935 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
Shuzhen Wangf6890e02016-08-12 14:28:54 -07006936 ab_mode);
Thierry Strudel3d639192016-09-09 11:52:26 -07006937 if (NAME_NOT_FOUND != val) {
6938 uint8_t fwk_ab_mode = (uint8_t)val;
6939 camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
6940 }
6941 }
6942
6943 IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
6944 int val = lookupFwkName(SCENE_MODES_MAP,
6945 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
6946 if (NAME_NOT_FOUND != val) {
6947 uint8_t fwkBestshotMode = (uint8_t)val;
6948 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
6949 LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
6950 } else {
6951 LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
6952 }
6953 }
6954
6955 IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
6956 uint8_t fwk_mode = (uint8_t) *mode;
6957 camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
6958 }
6959
6960 /* Constant metadata values to be update*/
6961 uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
6962 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
6963
6964 uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
6965 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
6966
6967 int32_t hotPixelMap[2];
6968 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
6969
6970 // CDS
6971 IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
6972 camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
6973 }
6974
Thierry Strudel04e026f2016-10-10 11:27:36 -07006975 IF_META_AVAILABLE(cam_sensor_hdr_type_t, vhdr, CAM_INTF_PARM_SENSOR_HDR, metadata) {
6976 int32_t fwk_hdr;
6977 if(*vhdr == CAM_SENSOR_HDR_OFF) {
6978 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_OFF;
6979 } else {
6980 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_ON;
6981 }
6982 camMetadata.update(QCAMERA3_VIDEO_HDR_MODE, &fwk_hdr, 1);
6983 }
6984
6985 IF_META_AVAILABLE(cam_ir_mode_type_t, ir, CAM_INTF_META_IR_MODE, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07006986 int32_t fwk_ir = (int32_t) *ir;
6987 camMetadata.update(QCAMERA3_IR_MODE, &fwk_ir, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -07006988 }
6989
Thierry Strudel269c81a2016-10-12 12:13:59 -07006990 // AEC SPEED
6991 IF_META_AVAILABLE(float, aec, CAM_INTF_META_AEC_CONVERGENCE_SPEED, metadata) {
6992 camMetadata.update(QCAMERA3_AEC_CONVERGENCE_SPEED, aec, 1);
6993 }
6994
6995 // AWB SPEED
6996 IF_META_AVAILABLE(float, awb, CAM_INTF_META_AWB_CONVERGENCE_SPEED, metadata) {
6997 camMetadata.update(QCAMERA3_AWB_CONVERGENCE_SPEED, awb, 1);
6998 }
6999
Thierry Strudel3d639192016-09-09 11:52:26 -07007000 // TNR
7001 IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
7002 uint8_t tnr_enable = tnr->denoise_enable;
7003 int32_t tnr_process_type = (int32_t)tnr->process_plates;
7004
7005 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
7006 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
7007 }
7008
7009 // Reprocess crop data
7010 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
7011 uint8_t cnt = crop_data->num_of_streams;
7012 if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
7013 // mm-qcamera-daemon only posts crop_data for streams
7014 // not linked to pproc. So no valid crop metadata is not
7015 // necessarily an error case.
7016 LOGD("No valid crop metadata entries");
7017 } else {
7018 uint32_t reproc_stream_id;
7019 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7020 LOGD("No reprocessible stream found, ignore crop data");
7021 } else {
7022 int rc = NO_ERROR;
7023 Vector<int32_t> roi_map;
7024 int32_t *crop = new int32_t[cnt*4];
7025 if (NULL == crop) {
7026 rc = NO_MEMORY;
7027 }
7028 if (NO_ERROR == rc) {
7029 int32_t streams_found = 0;
7030 for (size_t i = 0; i < cnt; i++) {
7031 if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
7032 if (pprocDone) {
7033 // HAL already does internal reprocessing,
7034 // either via reprocessing before JPEG encoding,
7035 // or offline postprocessing for pproc bypass case.
7036 crop[0] = 0;
7037 crop[1] = 0;
7038 crop[2] = mInputStreamInfo.dim.width;
7039 crop[3] = mInputStreamInfo.dim.height;
7040 } else {
7041 crop[0] = crop_data->crop_info[i].crop.left;
7042 crop[1] = crop_data->crop_info[i].crop.top;
7043 crop[2] = crop_data->crop_info[i].crop.width;
7044 crop[3] = crop_data->crop_info[i].crop.height;
7045 }
7046 roi_map.add(crop_data->crop_info[i].roi_map.left);
7047 roi_map.add(crop_data->crop_info[i].roi_map.top);
7048 roi_map.add(crop_data->crop_info[i].roi_map.width);
7049 roi_map.add(crop_data->crop_info[i].roi_map.height);
7050 streams_found++;
7051 LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
7052 crop[0], crop[1], crop[2], crop[3]);
7053 LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
7054 crop_data->crop_info[i].roi_map.left,
7055 crop_data->crop_info[i].roi_map.top,
7056 crop_data->crop_info[i].roi_map.width,
7057 crop_data->crop_info[i].roi_map.height);
7058 break;
7059
7060 }
7061 }
7062 camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
7063 &streams_found, 1);
7064 camMetadata.update(QCAMERA3_CROP_REPROCESS,
7065 crop, (size_t)(streams_found * 4));
7066 if (roi_map.array()) {
7067 camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
7068 roi_map.array(), roi_map.size());
7069 }
7070 }
7071 if (crop) {
7072 delete [] crop;
7073 }
7074 }
7075 }
7076 }
7077
7078 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
7079 // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
7080 // so hardcoding the CAC result to OFF mode.
7081 uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
7082 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
7083 } else {
7084 IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
7085 int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
7086 *cacMode);
7087 if (NAME_NOT_FOUND != val) {
7088 uint8_t resultCacMode = (uint8_t)val;
7089 // check whether CAC result from CB is equal to Framework set CAC mode
7090 // If not equal then set the CAC mode came in corresponding request
7091 if (fwk_cacMode != resultCacMode) {
7092 resultCacMode = fwk_cacMode;
7093 }
7094 LOGD("fwk_cacMode=%d resultCacMode=%d", fwk_cacMode, resultCacMode);
7095 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
7096 } else {
7097 LOGE("Invalid CAC camera parameter: %d", *cacMode);
7098 }
7099 }
7100 }
7101
7102 // Post blob of cam_cds_data through vendor tag.
7103 IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
7104 uint8_t cnt = cdsInfo->num_of_streams;
7105 cam_cds_data_t cdsDataOverride;
7106 memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
7107 cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
7108 cdsDataOverride.num_of_streams = 1;
7109 if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
7110 uint32_t reproc_stream_id;
7111 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7112 LOGD("No reprocessible stream found, ignore cds data");
7113 } else {
7114 for (size_t i = 0; i < cnt; i++) {
7115 if (cdsInfo->cds_info[i].stream_id ==
7116 reproc_stream_id) {
7117 cdsDataOverride.cds_info[0].cds_enable =
7118 cdsInfo->cds_info[i].cds_enable;
7119 break;
7120 }
7121 }
7122 }
7123 } else {
7124 LOGD("Invalid stream count %d in CDS_DATA", cnt);
7125 }
7126 camMetadata.update(QCAMERA3_CDS_INFO,
7127 (uint8_t *)&cdsDataOverride,
7128 sizeof(cam_cds_data_t));
7129 }
7130
7131 // Ldaf calibration data
7132 if (!mLdafCalibExist) {
7133 IF_META_AVAILABLE(uint32_t, ldafCalib,
7134 CAM_INTF_META_LDAF_EXIF, metadata) {
7135 mLdafCalibExist = true;
7136 mLdafCalib[0] = ldafCalib[0];
7137 mLdafCalib[1] = ldafCalib[1];
7138 LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
7139 ldafCalib[0], ldafCalib[1]);
7140 }
7141 }
7142
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007143 // Reprocess and DDM debug data through vendor tag
7144 cam_reprocess_info_t repro_info;
7145 memset(&repro_info, 0, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007146 IF_META_AVAILABLE(cam_stream_crop_info_t, sensorCropInfo,
7147 CAM_INTF_META_SNAP_CROP_INFO_SENSOR, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007148 memcpy(&(repro_info.sensor_crop_info), sensorCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007149 }
7150 IF_META_AVAILABLE(cam_stream_crop_info_t, camifCropInfo,
7151 CAM_INTF_META_SNAP_CROP_INFO_CAMIF, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007152 memcpy(&(repro_info.camif_crop_info), camifCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007153 }
7154 IF_META_AVAILABLE(cam_stream_crop_info_t, ispCropInfo,
7155 CAM_INTF_META_SNAP_CROP_INFO_ISP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007156 memcpy(&(repro_info.isp_crop_info), ispCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007157 }
7158 IF_META_AVAILABLE(cam_stream_crop_info_t, cppCropInfo,
7159 CAM_INTF_META_SNAP_CROP_INFO_CPP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007160 memcpy(&(repro_info.cpp_crop_info), cppCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007161 }
7162 IF_META_AVAILABLE(cam_focal_length_ratio_t, ratio,
7163 CAM_INTF_META_AF_FOCAL_LENGTH_RATIO, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007164 memcpy(&(repro_info.af_focal_length_ratio), ratio, sizeof(cam_focal_length_ratio_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007165 }
7166 IF_META_AVAILABLE(int32_t, flip, CAM_INTF_PARM_FLIP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007167 memcpy(&(repro_info.pipeline_flip), flip, sizeof(int32_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007168 }
7169 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
7170 CAM_INTF_PARM_ROTATION, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007171 memcpy(&(repro_info.rotation_info), rotationInfo, sizeof(cam_rotation_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007172 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007173 IF_META_AVAILABLE(cam_area_t, afRoi, CAM_INTF_META_AF_ROI, metadata) {
7174 memcpy(&(repro_info.af_roi), afRoi, sizeof(cam_area_t));
7175 }
7176 IF_META_AVAILABLE(cam_dyn_img_data_t, dynMask, CAM_INTF_META_IMG_DYN_FEAT, metadata) {
7177 memcpy(&(repro_info.dyn_mask), dynMask, sizeof(cam_dyn_img_data_t));
7178 }
7179 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
7180 (uint8_t *)&repro_info, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007181
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007182 // INSTANT AEC MODE
7183 IF_META_AVAILABLE(uint8_t, instant_aec_mode,
7184 CAM_INTF_PARM_INSTANT_AEC, metadata) {
7185 camMetadata.update(QCAMERA3_INSTANT_AEC_MODE, instant_aec_mode, 1);
7186 }
7187
Shuzhen Wange763e802016-03-31 10:24:29 -07007188 // AF scene change
7189 IF_META_AVAILABLE(uint8_t, afSceneChange, CAM_INTF_META_AF_SCENE_CHANGE, metadata) {
7190 camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, afSceneChange, 1);
7191 }
7192
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007193 /* In batch mode, cache the first metadata in the batch */
7194 if (mBatchSize && firstMetadataInBatch) {
7195 mCachedMetadata.clear();
7196 mCachedMetadata = camMetadata;
7197 }
7198
Thierry Strudel3d639192016-09-09 11:52:26 -07007199 resultMetadata = camMetadata.release();
7200 return resultMetadata;
7201}
7202
7203/*===========================================================================
7204 * FUNCTION : saveExifParams
7205 *
7206 * DESCRIPTION:
7207 *
7208 * PARAMETERS :
7209 * @metadata : metadata information from callback
7210 *
7211 * RETURN : none
7212 *
7213 *==========================================================================*/
7214void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
7215{
7216 IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
7217 CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
7218 if (mExifParams.debug_params) {
7219 mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
7220 mExifParams.debug_params->ae_debug_params_valid = TRUE;
7221 }
7222 }
7223 IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
7224 CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
7225 if (mExifParams.debug_params) {
7226 mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
7227 mExifParams.debug_params->awb_debug_params_valid = TRUE;
7228 }
7229 }
7230 IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
7231 CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
7232 if (mExifParams.debug_params) {
7233 mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
7234 mExifParams.debug_params->af_debug_params_valid = TRUE;
7235 }
7236 }
7237 IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
7238 CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
7239 if (mExifParams.debug_params) {
7240 mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
7241 mExifParams.debug_params->asd_debug_params_valid = TRUE;
7242 }
7243 }
7244 IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
7245 CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
7246 if (mExifParams.debug_params) {
7247 mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
7248 mExifParams.debug_params->stats_debug_params_valid = TRUE;
7249 }
7250 }
7251 IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
7252 CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
7253 if (mExifParams.debug_params) {
7254 mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
7255 mExifParams.debug_params->bestats_debug_params_valid = TRUE;
7256 }
7257 }
7258 IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
7259 CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
7260 if (mExifParams.debug_params) {
7261 mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
7262 mExifParams.debug_params->bhist_debug_params_valid = TRUE;
7263 }
7264 }
7265 IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
7266 CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
7267 if (mExifParams.debug_params) {
7268 mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
7269 mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
7270 }
7271 }
7272}
7273
7274/*===========================================================================
7275 * FUNCTION : get3AExifParams
7276 *
7277 * DESCRIPTION:
7278 *
7279 * PARAMETERS : none
7280 *
7281 *
7282 * RETURN : mm_jpeg_exif_params_t
7283 *
7284 *==========================================================================*/
7285mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
7286{
7287 return mExifParams;
7288}
7289
7290/*===========================================================================
7291 * FUNCTION : translateCbUrgentMetadataToResultMetadata
7292 *
7293 * DESCRIPTION:
7294 *
7295 * PARAMETERS :
7296 * @metadata : metadata information from callback
7297 *
7298 * RETURN : camera_metadata_t*
7299 * metadata in a format specified by fwk
7300 *==========================================================================*/
7301camera_metadata_t*
7302QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
7303 (metadata_buffer_t *metadata)
7304{
7305 CameraMetadata camMetadata;
7306 camera_metadata_t *resultMetadata;
7307
7308
7309 IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
7310 uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
7311 camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
7312 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
7313 }
7314
7315 IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
7316 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
7317 &aecTrigger->trigger, 1);
7318 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
7319 &aecTrigger->trigger_id, 1);
7320 LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
7321 aecTrigger->trigger);
7322 LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
7323 aecTrigger->trigger_id);
7324 }
7325
7326 IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
7327 uint8_t fwk_ae_state = (uint8_t) *ae_state;
7328 camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
7329 LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
7330 }
7331
Thierry Strudel3d639192016-09-09 11:52:26 -07007332 IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
7333 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
7334 &af_trigger->trigger, 1);
7335 LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
7336 af_trigger->trigger);
7337 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &af_trigger->trigger_id, 1);
7338 LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
7339 af_trigger->trigger_id);
7340 }
7341
7342 IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
7343 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
7344 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
7345 if (NAME_NOT_FOUND != val) {
7346 uint8_t fwkWhiteBalanceMode = (uint8_t)val;
7347 camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
7348 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
7349 } else {
7350 LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
7351 }
7352 }
7353
7354 uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
7355 uint32_t aeMode = CAM_AE_MODE_MAX;
7356 int32_t flashMode = CAM_FLASH_MODE_MAX;
7357 int32_t redeye = -1;
7358 IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
7359 aeMode = *pAeMode;
7360 }
7361 IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
7362 flashMode = *pFlashMode;
7363 }
7364 IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
7365 redeye = *pRedeye;
7366 }
7367
7368 if (1 == redeye) {
7369 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
7370 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
7371 } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
7372 int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
7373 flashMode);
7374 if (NAME_NOT_FOUND != val) {
7375 fwk_aeMode = (uint8_t)val;
7376 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
7377 } else {
7378 LOGE("Unsupported flash mode %d", flashMode);
7379 }
7380 } else if (aeMode == CAM_AE_MODE_ON) {
7381 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
7382 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
7383 } else if (aeMode == CAM_AE_MODE_OFF) {
7384 fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
7385 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
7386 } else {
7387 LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
7388 "flashMode:%d, aeMode:%u!!!",
7389 redeye, flashMode, aeMode);
7390 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007391 if (mInstantAEC) {
7392 // Increment frame Idx count untill a bound reached for instant AEC.
7393 mInstantAecFrameIdxCount++;
7394 IF_META_AVAILABLE(cam_3a_params_t, ae_params,
7395 CAM_INTF_META_AEC_INFO, metadata) {
7396 LOGH("ae_params->settled = %d",ae_params->settled);
7397 // If AEC settled, or if number of frames reached bound value,
7398 // should reset instant AEC.
7399 if (ae_params->settled ||
7400 (mInstantAecFrameIdxCount > mAecSkipDisplayFrameBound)) {
7401 LOGH("AEC settled or Frames reached instantAEC bound, resetting instantAEC");
7402 mInstantAEC = false;
7403 mResetInstantAEC = true;
7404 mInstantAecFrameIdxCount = 0;
7405 }
7406 }
7407 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007408 resultMetadata = camMetadata.release();
7409 return resultMetadata;
7410}
7411
7412/*===========================================================================
7413 * FUNCTION : dumpMetadataToFile
7414 *
7415 * DESCRIPTION: Dumps tuning metadata to file system
7416 *
7417 * PARAMETERS :
7418 * @meta : tuning metadata
7419 * @dumpFrameCount : current dump frame count
7420 * @enabled : Enable mask
7421 *
7422 *==========================================================================*/
7423void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
7424 uint32_t &dumpFrameCount,
7425 bool enabled,
7426 const char *type,
7427 uint32_t frameNumber)
7428{
7429 //Some sanity checks
7430 if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
7431 LOGE("Tuning sensor data size bigger than expected %d: %d",
7432 meta.tuning_sensor_data_size,
7433 TUNING_SENSOR_DATA_MAX);
7434 return;
7435 }
7436
7437 if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
7438 LOGE("Tuning VFE data size bigger than expected %d: %d",
7439 meta.tuning_vfe_data_size,
7440 TUNING_VFE_DATA_MAX);
7441 return;
7442 }
7443
7444 if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
7445 LOGE("Tuning CPP data size bigger than expected %d: %d",
7446 meta.tuning_cpp_data_size,
7447 TUNING_CPP_DATA_MAX);
7448 return;
7449 }
7450
7451 if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
7452 LOGE("Tuning CAC data size bigger than expected %d: %d",
7453 meta.tuning_cac_data_size,
7454 TUNING_CAC_DATA_MAX);
7455 return;
7456 }
7457 //
7458
7459 if(enabled){
7460 char timeBuf[FILENAME_MAX];
7461 char buf[FILENAME_MAX];
7462 memset(buf, 0, sizeof(buf));
7463 memset(timeBuf, 0, sizeof(timeBuf));
7464 time_t current_time;
7465 struct tm * timeinfo;
7466 time (&current_time);
7467 timeinfo = localtime (&current_time);
7468 if (timeinfo != NULL) {
7469 strftime (timeBuf, sizeof(timeBuf),
7470 QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
7471 }
7472 String8 filePath(timeBuf);
7473 snprintf(buf,
7474 sizeof(buf),
7475 "%dm_%s_%d.bin",
7476 dumpFrameCount,
7477 type,
7478 frameNumber);
7479 filePath.append(buf);
7480 int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
7481 if (file_fd >= 0) {
7482 ssize_t written_len = 0;
7483 meta.tuning_data_version = TUNING_DATA_VERSION;
7484 void *data = (void *)((uint8_t *)&meta.tuning_data_version);
7485 written_len += write(file_fd, data, sizeof(uint32_t));
7486 data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
7487 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
7488 written_len += write(file_fd, data, sizeof(uint32_t));
7489 data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
7490 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
7491 written_len += write(file_fd, data, sizeof(uint32_t));
7492 data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
7493 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
7494 written_len += write(file_fd, data, sizeof(uint32_t));
7495 data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
7496 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
7497 written_len += write(file_fd, data, sizeof(uint32_t));
7498 meta.tuning_mod3_data_size = 0;
7499 data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
7500 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
7501 written_len += write(file_fd, data, sizeof(uint32_t));
7502 size_t total_size = meta.tuning_sensor_data_size;
7503 data = (void *)((uint8_t *)&meta.data);
7504 written_len += write(file_fd, data, total_size);
7505 total_size = meta.tuning_vfe_data_size;
7506 data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
7507 written_len += write(file_fd, data, total_size);
7508 total_size = meta.tuning_cpp_data_size;
7509 data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
7510 written_len += write(file_fd, data, total_size);
7511 total_size = meta.tuning_cac_data_size;
7512 data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
7513 written_len += write(file_fd, data, total_size);
7514 close(file_fd);
7515 }else {
7516 LOGE("fail to open file for metadata dumping");
7517 }
7518 }
7519}
7520
7521/*===========================================================================
7522 * FUNCTION : cleanAndSortStreamInfo
7523 *
7524 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
7525 * and sort them such that raw stream is at the end of the list
7526 * This is a workaround for camera daemon constraint.
7527 *
7528 * PARAMETERS : None
7529 *
7530 *==========================================================================*/
7531void QCamera3HardwareInterface::cleanAndSortStreamInfo()
7532{
7533 List<stream_info_t *> newStreamInfo;
7534
7535 /*clean up invalid streams*/
7536 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
7537 it != mStreamInfo.end();) {
7538 if(((*it)->status) == INVALID){
7539 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
7540 delete channel;
7541 free(*it);
7542 it = mStreamInfo.erase(it);
7543 } else {
7544 it++;
7545 }
7546 }
7547
7548 // Move preview/video/callback/snapshot streams into newList
7549 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
7550 it != mStreamInfo.end();) {
7551 if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
7552 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
7553 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
7554 newStreamInfo.push_back(*it);
7555 it = mStreamInfo.erase(it);
7556 } else
7557 it++;
7558 }
7559 // Move raw streams into newList
7560 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
7561 it != mStreamInfo.end();) {
7562 newStreamInfo.push_back(*it);
7563 it = mStreamInfo.erase(it);
7564 }
7565
7566 mStreamInfo = newStreamInfo;
7567}
7568
7569/*===========================================================================
7570 * FUNCTION : extractJpegMetadata
7571 *
7572 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
7573 * JPEG metadata is cached in HAL, and return as part of capture
7574 * result when metadata is returned from camera daemon.
7575 *
7576 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
7577 * @request: capture request
7578 *
7579 *==========================================================================*/
7580void QCamera3HardwareInterface::extractJpegMetadata(
7581 CameraMetadata& jpegMetadata,
7582 const camera3_capture_request_t *request)
7583{
7584 CameraMetadata frame_settings;
7585 frame_settings = request->settings;
7586
7587 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
7588 jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
7589 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
7590 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
7591
7592 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
7593 jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
7594 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
7595 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
7596
7597 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
7598 jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
7599 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
7600 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
7601
7602 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
7603 jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
7604 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
7605 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
7606
7607 if (frame_settings.exists(ANDROID_JPEG_QUALITY))
7608 jpegMetadata.update(ANDROID_JPEG_QUALITY,
7609 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
7610 frame_settings.find(ANDROID_JPEG_QUALITY).count);
7611
7612 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
7613 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
7614 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
7615 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
7616
7617 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
7618 int32_t thumbnail_size[2];
7619 thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
7620 thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
7621 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
7622 int32_t orientation =
7623 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007624 if ((!needJpegExifRotation()) && ((orientation == 90) || (orientation == 270))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07007625 //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
7626 int32_t temp;
7627 temp = thumbnail_size[0];
7628 thumbnail_size[0] = thumbnail_size[1];
7629 thumbnail_size[1] = temp;
7630 }
7631 }
7632 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
7633 thumbnail_size,
7634 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
7635 }
7636
7637}
7638
7639/*===========================================================================
7640 * FUNCTION : convertToRegions
7641 *
7642 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
7643 *
7644 * PARAMETERS :
7645 * @rect : cam_rect_t struct to convert
7646 * @region : int32_t destination array
7647 * @weight : if we are converting from cam_area_t, weight is valid
7648 * else weight = -1
7649 *
7650 *==========================================================================*/
7651void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
7652 int32_t *region, int weight)
7653{
7654 region[0] = rect.left;
7655 region[1] = rect.top;
7656 region[2] = rect.left + rect.width;
7657 region[3] = rect.top + rect.height;
7658 if (weight > -1) {
7659 region[4] = weight;
7660 }
7661}
7662
7663/*===========================================================================
7664 * FUNCTION : convertFromRegions
7665 *
7666 * DESCRIPTION: helper method to convert from array to cam_rect_t
7667 *
7668 * PARAMETERS :
7669 * @rect : cam_rect_t struct to convert
7670 * @region : int32_t destination array
7671 * @weight : if we are converting from cam_area_t, weight is valid
7672 * else weight = -1
7673 *
7674 *==========================================================================*/
7675void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
7676 const camera_metadata_t *settings, uint32_t tag)
7677{
7678 CameraMetadata frame_settings;
7679 frame_settings = settings;
7680 int32_t x_min = frame_settings.find(tag).data.i32[0];
7681 int32_t y_min = frame_settings.find(tag).data.i32[1];
7682 int32_t x_max = frame_settings.find(tag).data.i32[2];
7683 int32_t y_max = frame_settings.find(tag).data.i32[3];
7684 roi.weight = frame_settings.find(tag).data.i32[4];
7685 roi.rect.left = x_min;
7686 roi.rect.top = y_min;
7687 roi.rect.width = x_max - x_min;
7688 roi.rect.height = y_max - y_min;
7689}
7690
7691/*===========================================================================
7692 * FUNCTION : resetIfNeededROI
7693 *
7694 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
7695 * crop region
7696 *
7697 * PARAMETERS :
7698 * @roi : cam_area_t struct to resize
7699 * @scalerCropRegion : cam_crop_region_t region to compare against
7700 *
7701 *
7702 *==========================================================================*/
7703bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
7704 const cam_crop_region_t* scalerCropRegion)
7705{
7706 int32_t roi_x_max = roi->rect.width + roi->rect.left;
7707 int32_t roi_y_max = roi->rect.height + roi->rect.top;
7708 int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
7709 int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
7710
7711 /* According to spec weight = 0 is used to indicate roi needs to be disabled
7712 * without having this check the calculations below to validate if the roi
7713 * is inside scalar crop region will fail resulting in the roi not being
7714 * reset causing algorithm to continue to use stale roi window
7715 */
7716 if (roi->weight == 0) {
7717 return true;
7718 }
7719
7720 if ((roi_x_max < scalerCropRegion->left) ||
7721 // right edge of roi window is left of scalar crop's left edge
7722 (roi_y_max < scalerCropRegion->top) ||
7723 // bottom edge of roi window is above scalar crop's top edge
7724 (roi->rect.left > crop_x_max) ||
7725 // left edge of roi window is beyond(right) of scalar crop's right edge
7726 (roi->rect.top > crop_y_max)){
7727 // top edge of roi windo is above scalar crop's top edge
7728 return false;
7729 }
7730 if (roi->rect.left < scalerCropRegion->left) {
7731 roi->rect.left = scalerCropRegion->left;
7732 }
7733 if (roi->rect.top < scalerCropRegion->top) {
7734 roi->rect.top = scalerCropRegion->top;
7735 }
7736 if (roi_x_max > crop_x_max) {
7737 roi_x_max = crop_x_max;
7738 }
7739 if (roi_y_max > crop_y_max) {
7740 roi_y_max = crop_y_max;
7741 }
7742 roi->rect.width = roi_x_max - roi->rect.left;
7743 roi->rect.height = roi_y_max - roi->rect.top;
7744 return true;
7745}
7746
7747/*===========================================================================
7748 * FUNCTION : convertLandmarks
7749 *
7750 * DESCRIPTION: helper method to extract the landmarks from face detection info
7751 *
7752 * PARAMETERS :
7753 * @landmark_data : input landmark data to be converted
7754 * @landmarks : int32_t destination array
7755 *
7756 *
7757 *==========================================================================*/
7758void QCamera3HardwareInterface::convertLandmarks(
7759 cam_face_landmarks_info_t landmark_data,
7760 int32_t *landmarks)
7761{
Thierry Strudel04e026f2016-10-10 11:27:36 -07007762 if (landmark_data.is_left_eye_valid) {
7763 landmarks[LEFT_EYE_X] = (int32_t)landmark_data.left_eye_center.x;
7764 landmarks[LEFT_EYE_Y] = (int32_t)landmark_data.left_eye_center.y;
7765 } else {
7766 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
7767 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
7768 }
7769
7770 if (landmark_data.is_right_eye_valid) {
7771 landmarks[RIGHT_EYE_X] = (int32_t)landmark_data.right_eye_center.x;
7772 landmarks[RIGHT_EYE_Y] = (int32_t)landmark_data.right_eye_center.y;
7773 } else {
7774 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
7775 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
7776 }
7777
7778 if (landmark_data.is_mouth_valid) {
7779 landmarks[MOUTH_X] = (int32_t)landmark_data.mouth_center.x;
7780 landmarks[MOUTH_Y] = (int32_t)landmark_data.mouth_center.y;
7781 } else {
7782 landmarks[MOUTH_X] = FACE_INVALID_POINT;
7783 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
7784 }
7785}
7786
7787/*===========================================================================
7788 * FUNCTION : setInvalidLandmarks
7789 *
7790 * DESCRIPTION: helper method to set invalid landmarks
7791 *
7792 * PARAMETERS :
7793 * @landmarks : int32_t destination array
7794 *
7795 *
7796 *==========================================================================*/
7797void QCamera3HardwareInterface::setInvalidLandmarks(
7798 int32_t *landmarks)
7799{
7800 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
7801 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
7802 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
7803 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
7804 landmarks[MOUTH_X] = FACE_INVALID_POINT;
7805 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
Thierry Strudel3d639192016-09-09 11:52:26 -07007806}
7807
7808#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007809
7810/*===========================================================================
7811 * FUNCTION : getCapabilities
7812 *
7813 * DESCRIPTION: query camera capability from back-end
7814 *
7815 * PARAMETERS :
7816 * @ops : mm-interface ops structure
7817 * @cam_handle : camera handle for which we need capability
7818 *
7819 * RETURN : ptr type of capability structure
7820 * capability for success
7821 * NULL for failure
7822 *==========================================================================*/
7823cam_capability_t *QCamera3HardwareInterface::getCapabilities(mm_camera_ops_t *ops,
7824 uint32_t cam_handle)
7825{
7826 int rc = NO_ERROR;
7827 QCamera3HeapMemory *capabilityHeap = NULL;
7828 cam_capability_t *cap_ptr = NULL;
7829
7830 if (ops == NULL) {
7831 LOGE("Invalid arguments");
7832 return NULL;
7833 }
7834
7835 capabilityHeap = new QCamera3HeapMemory(1);
7836 if (capabilityHeap == NULL) {
7837 LOGE("creation of capabilityHeap failed");
7838 return NULL;
7839 }
7840
7841 /* Allocate memory for capability buffer */
7842 rc = capabilityHeap->allocate(sizeof(cam_capability_t));
7843 if(rc != OK) {
7844 LOGE("No memory for cappability");
7845 goto allocate_failed;
7846 }
7847
7848 /* Map memory for capability buffer */
7849 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
7850
7851 rc = ops->map_buf(cam_handle,
7852 CAM_MAPPING_BUF_TYPE_CAPABILITY, capabilityHeap->getFd(0),
7853 sizeof(cam_capability_t), capabilityHeap->getPtr(0));
7854 if(rc < 0) {
7855 LOGE("failed to map capability buffer");
7856 rc = FAILED_TRANSACTION;
7857 goto map_failed;
7858 }
7859
7860 /* Query Capability */
7861 rc = ops->query_capability(cam_handle);
7862 if(rc < 0) {
7863 LOGE("failed to query capability");
7864 rc = FAILED_TRANSACTION;
7865 goto query_failed;
7866 }
7867
7868 cap_ptr = (cam_capability_t *)malloc(sizeof(cam_capability_t));
7869 if (cap_ptr == NULL) {
7870 LOGE("out of memory");
7871 rc = NO_MEMORY;
7872 goto query_failed;
7873 }
7874
7875 memset(cap_ptr, 0, sizeof(cam_capability_t));
7876 memcpy(cap_ptr, DATA_PTR(capabilityHeap, 0), sizeof(cam_capability_t));
7877
7878 int index;
7879 for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
7880 cam_analysis_info_t *p_analysis_info = &cap_ptr->analysis_info[index];
7881 p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
7882 p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
7883 }
7884
7885query_failed:
7886 ops->unmap_buf(cam_handle, CAM_MAPPING_BUF_TYPE_CAPABILITY);
7887map_failed:
7888 capabilityHeap->deallocate();
7889allocate_failed:
7890 delete capabilityHeap;
7891
7892 if (rc != NO_ERROR) {
7893 return NULL;
7894 } else {
7895 return cap_ptr;
7896 }
7897}
7898
Thierry Strudel3d639192016-09-09 11:52:26 -07007899/*===========================================================================
7900 * FUNCTION : initCapabilities
7901 *
7902 * DESCRIPTION: initialize camera capabilities in static data struct
7903 *
7904 * PARAMETERS :
7905 * @cameraId : camera Id
7906 *
7907 * RETURN : int32_t type of status
7908 * NO_ERROR -- success
7909 * none-zero failure code
7910 *==========================================================================*/
7911int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
7912{
7913 int rc = 0;
7914 mm_camera_vtbl_t *cameraHandle = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007915 uint32_t handle = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07007916
7917 rc = camera_open((uint8_t)cameraId, &cameraHandle);
7918 if (rc) {
7919 LOGE("camera_open failed. rc = %d", rc);
7920 goto open_failed;
7921 }
7922 if (!cameraHandle) {
7923 LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
7924 goto open_failed;
7925 }
7926
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007927 handle = get_main_camera_handle(cameraHandle->camera_handle);
7928 gCamCapability[cameraId] = getCapabilities(cameraHandle->ops, handle);
7929 if (gCamCapability[cameraId] == NULL) {
7930 rc = FAILED_TRANSACTION;
7931 goto failed_op;
Thierry Strudel3d639192016-09-09 11:52:26 -07007932 }
7933
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007934 gCamCapability[cameraId]->camera_index = cameraId;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007935 if (is_dual_camera_by_idx(cameraId)) {
7936 handle = get_aux_camera_handle(cameraHandle->camera_handle);
7937 gCamCapability[cameraId]->aux_cam_cap =
7938 getCapabilities(cameraHandle->ops, handle);
7939 if (gCamCapability[cameraId]->aux_cam_cap == NULL) {
7940 rc = FAILED_TRANSACTION;
7941 free(gCamCapability[cameraId]);
7942 goto failed_op;
7943 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08007944
7945 // Copy the main camera capability to main_cam_cap struct
7946 gCamCapability[cameraId]->main_cam_cap =
7947 (cam_capability_t *)malloc(sizeof(cam_capability_t));
7948 if (gCamCapability[cameraId]->main_cam_cap == NULL) {
7949 LOGE("out of memory");
7950 rc = NO_MEMORY;
7951 goto failed_op;
7952 }
7953 memcpy(gCamCapability[cameraId]->main_cam_cap, gCamCapability[cameraId],
7954 sizeof(cam_capability_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07007955 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007956failed_op:
Thierry Strudel3d639192016-09-09 11:52:26 -07007957 cameraHandle->ops->close_camera(cameraHandle->camera_handle);
7958 cameraHandle = NULL;
7959open_failed:
7960 return rc;
7961}
7962
7963/*==========================================================================
7964 * FUNCTION : get3Aversion
7965 *
7966 * DESCRIPTION: get the Q3A S/W version
7967 *
7968 * PARAMETERS :
7969 * @sw_version: Reference of Q3A structure which will hold version info upon
7970 * return
7971 *
7972 * RETURN : None
7973 *
7974 *==========================================================================*/
7975void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
7976{
7977 if(gCamCapability[mCameraId])
7978 sw_version = gCamCapability[mCameraId]->q3a_version;
7979 else
7980 LOGE("Capability structure NULL!");
7981}
7982
7983
7984/*===========================================================================
7985 * FUNCTION : initParameters
7986 *
7987 * DESCRIPTION: initialize camera parameters
7988 *
7989 * PARAMETERS :
7990 *
7991 * RETURN : int32_t type of status
7992 * NO_ERROR -- success
7993 * none-zero failure code
7994 *==========================================================================*/
7995int QCamera3HardwareInterface::initParameters()
7996{
7997 int rc = 0;
7998
7999 //Allocate Set Param Buffer
8000 mParamHeap = new QCamera3HeapMemory(1);
8001 rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
8002 if(rc != OK) {
8003 rc = NO_MEMORY;
8004 LOGE("Failed to allocate SETPARM Heap memory");
8005 delete mParamHeap;
8006 mParamHeap = NULL;
8007 return rc;
8008 }
8009
8010 //Map memory for parameters buffer
8011 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
8012 CAM_MAPPING_BUF_TYPE_PARM_BUF,
8013 mParamHeap->getFd(0),
8014 sizeof(metadata_buffer_t),
8015 (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
8016 if(rc < 0) {
8017 LOGE("failed to map SETPARM buffer");
8018 rc = FAILED_TRANSACTION;
8019 mParamHeap->deallocate();
8020 delete mParamHeap;
8021 mParamHeap = NULL;
8022 return rc;
8023 }
8024
8025 mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
8026
8027 mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
8028 return rc;
8029}
8030
8031/*===========================================================================
8032 * FUNCTION : deinitParameters
8033 *
8034 * DESCRIPTION: de-initialize camera parameters
8035 *
8036 * PARAMETERS :
8037 *
8038 * RETURN : NONE
8039 *==========================================================================*/
8040void QCamera3HardwareInterface::deinitParameters()
8041{
8042 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
8043 CAM_MAPPING_BUF_TYPE_PARM_BUF);
8044
8045 mParamHeap->deallocate();
8046 delete mParamHeap;
8047 mParamHeap = NULL;
8048
8049 mParameters = NULL;
8050
8051 free(mPrevParameters);
8052 mPrevParameters = NULL;
8053}
8054
8055/*===========================================================================
8056 * FUNCTION : calcMaxJpegSize
8057 *
8058 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
8059 *
8060 * PARAMETERS :
8061 *
8062 * RETURN : max_jpeg_size
8063 *==========================================================================*/
8064size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
8065{
8066 size_t max_jpeg_size = 0;
8067 size_t temp_width, temp_height;
8068 size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
8069 MAX_SIZES_CNT);
8070 for (size_t i = 0; i < count; i++) {
8071 temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
8072 temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
8073 if (temp_width * temp_height > max_jpeg_size ) {
8074 max_jpeg_size = temp_width * temp_height;
8075 }
8076 }
8077 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
8078 return max_jpeg_size;
8079}
8080
8081/*===========================================================================
8082 * FUNCTION : getMaxRawSize
8083 *
8084 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
8085 *
8086 * PARAMETERS :
8087 *
8088 * RETURN : Largest supported Raw Dimension
8089 *==========================================================================*/
8090cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
8091{
8092 int max_width = 0;
8093 cam_dimension_t maxRawSize;
8094
8095 memset(&maxRawSize, 0, sizeof(cam_dimension_t));
8096 for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
8097 if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
8098 max_width = gCamCapability[camera_id]->raw_dim[i].width;
8099 maxRawSize = gCamCapability[camera_id]->raw_dim[i];
8100 }
8101 }
8102 return maxRawSize;
8103}
8104
8105
8106/*===========================================================================
8107 * FUNCTION : calcMaxJpegDim
8108 *
8109 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
8110 *
8111 * PARAMETERS :
8112 *
8113 * RETURN : max_jpeg_dim
8114 *==========================================================================*/
8115cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
8116{
8117 cam_dimension_t max_jpeg_dim;
8118 cam_dimension_t curr_jpeg_dim;
8119 max_jpeg_dim.width = 0;
8120 max_jpeg_dim.height = 0;
8121 curr_jpeg_dim.width = 0;
8122 curr_jpeg_dim.height = 0;
8123 for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
8124 curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
8125 curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
8126 if (curr_jpeg_dim.width * curr_jpeg_dim.height >
8127 max_jpeg_dim.width * max_jpeg_dim.height ) {
8128 max_jpeg_dim.width = curr_jpeg_dim.width;
8129 max_jpeg_dim.height = curr_jpeg_dim.height;
8130 }
8131 }
8132 return max_jpeg_dim;
8133}
8134
8135/*===========================================================================
8136 * FUNCTION : addStreamConfig
8137 *
8138 * DESCRIPTION: adds the stream configuration to the array
8139 *
8140 * PARAMETERS :
8141 * @available_stream_configs : pointer to stream configuration array
8142 * @scalar_format : scalar format
8143 * @dim : configuration dimension
8144 * @config_type : input or output configuration type
8145 *
8146 * RETURN : NONE
8147 *==========================================================================*/
8148void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
8149 int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
8150{
8151 available_stream_configs.add(scalar_format);
8152 available_stream_configs.add(dim.width);
8153 available_stream_configs.add(dim.height);
8154 available_stream_configs.add(config_type);
8155}
8156
8157/*===========================================================================
8158 * FUNCTION : suppportBurstCapture
8159 *
8160 * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
8161 *
8162 * PARAMETERS :
8163 * @cameraId : camera Id
8164 *
8165 * RETURN : true if camera supports BURST_CAPTURE
8166 * false otherwise
8167 *==========================================================================*/
8168bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
8169{
8170 const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
8171 const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
8172 const int32_t highResWidth = 3264;
8173 const int32_t highResHeight = 2448;
8174
8175 if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
8176 // Maximum resolution images cannot be captured at >= 10fps
8177 // -> not supporting BURST_CAPTURE
8178 return false;
8179 }
8180
8181 if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
8182 // Maximum resolution images can be captured at >= 20fps
8183 // --> supporting BURST_CAPTURE
8184 return true;
8185 }
8186
8187 // Find the smallest highRes resolution, or largest resolution if there is none
8188 size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
8189 MAX_SIZES_CNT);
8190 size_t highRes = 0;
8191 while ((highRes + 1 < totalCnt) &&
8192 (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
8193 gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
8194 highResWidth * highResHeight)) {
8195 highRes++;
8196 }
8197 if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
8198 return true;
8199 } else {
8200 return false;
8201 }
8202}
8203
8204/*===========================================================================
8205 * FUNCTION : initStaticMetadata
8206 *
8207 * DESCRIPTION: initialize the static metadata
8208 *
8209 * PARAMETERS :
8210 * @cameraId : camera Id
8211 *
8212 * RETURN : int32_t type of status
8213 * 0 -- success
8214 * non-zero failure code
8215 *==========================================================================*/
8216int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
8217{
8218 int rc = 0;
8219 CameraMetadata staticInfo;
8220 size_t count = 0;
8221 bool limitedDevice = false;
8222 char prop[PROPERTY_VALUE_MAX];
8223 bool supportBurst = false;
8224
8225 supportBurst = supportBurstCapture(cameraId);
8226
8227 /* If sensor is YUV sensor (no raw support) or if per-frame control is not
8228 * guaranteed or if min fps of max resolution is less than 20 fps, its
8229 * advertised as limited device*/
8230 limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
8231 (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
8232 (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
8233 !supportBurst;
8234
8235 uint8_t supportedHwLvl = limitedDevice ?
8236 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008237#ifndef USE_HAL_3_3
8238 // LEVEL_3 - This device will support level 3.
8239 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
8240#else
Thierry Strudel3d639192016-09-09 11:52:26 -07008241 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008242#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07008243
8244 staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
8245 &supportedHwLvl, 1);
8246
8247 bool facingBack = false;
8248 if ((gCamCapability[cameraId]->position == CAM_POSITION_BACK) ||
8249 (gCamCapability[cameraId]->position == CAM_POSITION_BACK_AUX)) {
8250 facingBack = true;
8251 }
8252 /*HAL 3 only*/
8253 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
8254 &gCamCapability[cameraId]->min_focus_distance, 1);
8255
8256 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
8257 &gCamCapability[cameraId]->hyper_focal_distance, 1);
8258
8259 /*should be using focal lengths but sensor doesn't provide that info now*/
8260 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
8261 &gCamCapability[cameraId]->focal_length,
8262 1);
8263
8264 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
8265 gCamCapability[cameraId]->apertures,
8266 MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
8267
8268 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
8269 gCamCapability[cameraId]->filter_densities,
8270 MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
8271
8272
8273 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
8274 (uint8_t *)gCamCapability[cameraId]->optical_stab_modes,
8275 MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count));
8276
8277 int32_t lens_shading_map_size[] = {
8278 MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
8279 MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
8280 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
8281 lens_shading_map_size,
8282 sizeof(lens_shading_map_size)/sizeof(int32_t));
8283
8284 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
8285 gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
8286
8287 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
8288 gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
8289
8290 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
8291 &gCamCapability[cameraId]->max_frame_duration, 1);
8292
8293 camera_metadata_rational baseGainFactor = {
8294 gCamCapability[cameraId]->base_gain_factor.numerator,
8295 gCamCapability[cameraId]->base_gain_factor.denominator};
8296 staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
8297 &baseGainFactor, 1);
8298
8299 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
8300 (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
8301
8302 int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
8303 gCamCapability[cameraId]->pixel_array_size.height};
8304 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
8305 pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
8306
8307 int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
8308 gCamCapability[cameraId]->active_array_size.top,
8309 gCamCapability[cameraId]->active_array_size.width,
8310 gCamCapability[cameraId]->active_array_size.height};
8311 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
8312 active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
8313
8314 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
8315 &gCamCapability[cameraId]->white_level, 1);
8316
Shuzhen Wanga5da1022016-07-13 20:18:42 -07008317 int32_t adjusted_bl_per_cfa[BLACK_LEVEL_PATTERN_CNT];
8318 adjustBlackLevelForCFA(gCamCapability[cameraId]->black_level_pattern, adjusted_bl_per_cfa,
8319 gCamCapability[cameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07008320 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
Shuzhen Wanga5da1022016-07-13 20:18:42 -07008321 adjusted_bl_per_cfa, BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel3d639192016-09-09 11:52:26 -07008322
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008323#ifndef USE_HAL_3_3
8324 bool hasBlackRegions = false;
8325 if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
8326 LOGW("black_region_count: %d is bounded to %d",
8327 gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
8328 gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
8329 }
8330 if (gCamCapability[cameraId]->optical_black_region_count != 0) {
8331 int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
8332 for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
8333 opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
8334 }
8335 staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
8336 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
8337 hasBlackRegions = true;
8338 }
8339#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07008340 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
8341 &gCamCapability[cameraId]->flash_charge_duration, 1);
8342
8343 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
8344 &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
8345
Shuzhen Wang98d5efb2016-09-07 18:08:22 -07008346 uint8_t timestampSource = (gCamCapability[cameraId]->timestamp_calibrated ?
8347 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME :
8348 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN);
Thierry Strudel3d639192016-09-09 11:52:26 -07008349 staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
8350 &timestampSource, 1);
8351
8352 staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
8353 &gCamCapability[cameraId]->histogram_size, 1);
8354
8355 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
8356 &gCamCapability[cameraId]->max_histogram_count, 1);
8357
8358 int32_t sharpness_map_size[] = {
8359 gCamCapability[cameraId]->sharpness_map_size.width,
8360 gCamCapability[cameraId]->sharpness_map_size.height};
8361
8362 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
8363 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
8364
8365 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
8366 &gCamCapability[cameraId]->max_sharpness_map_value, 1);
8367
8368 int32_t scalar_formats[] = {
8369 ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
8370 ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
8371 ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
8372 ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
8373 HAL_PIXEL_FORMAT_RAW10,
8374 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
8375 size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(int32_t);
8376 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
8377 scalar_formats,
8378 scalar_formats_count);
8379
8380 int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
8381 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
8382 makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
8383 count, MAX_SIZES_CNT, available_processed_sizes);
8384 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
8385 available_processed_sizes, count * 2);
8386
8387 int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
8388 count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
8389 makeTable(gCamCapability[cameraId]->raw_dim,
8390 count, MAX_SIZES_CNT, available_raw_sizes);
8391 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
8392 available_raw_sizes, count * 2);
8393
8394 int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
8395 count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
8396 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
8397 count, MAX_SIZES_CNT, available_fps_ranges);
8398 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
8399 available_fps_ranges, count * 2);
8400
8401 camera_metadata_rational exposureCompensationStep = {
8402 gCamCapability[cameraId]->exp_compensation_step.numerator,
8403 gCamCapability[cameraId]->exp_compensation_step.denominator};
8404 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
8405 &exposureCompensationStep, 1);
8406
8407 Vector<uint8_t> availableVstabModes;
8408 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
8409 char eis_prop[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008410 bool eisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07008411 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008412 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07008413 uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008414 count = IS_TYPE_MAX;
8415 count = MIN(gCamCapability[cameraId]->supported_is_types_cnt, count);
8416 for (size_t i = 0; i < count; i++) {
8417 if ((gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
8418 (gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
8419 eisSupported = true;
8420 break;
8421 }
8422 }
8423 if (facingBack && eis_prop_set && eisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07008424 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
8425 }
8426 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
8427 availableVstabModes.array(), availableVstabModes.size());
8428
8429 /*HAL 1 and HAL 3 common*/
8430 uint32_t zoomSteps = gCamCapability[cameraId]->zoom_ratio_tbl_cnt;
8431 uint32_t maxZoomStep = gCamCapability[cameraId]->zoom_ratio_tbl[zoomSteps - 1];
8432 uint32_t minZoomStep = 100; //as per HAL1/API1 spec
8433 float maxZoom = maxZoomStep/minZoomStep;
8434 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
8435 &maxZoom, 1);
8436
8437 uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
8438 staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
8439
8440 int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
8441 if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
8442 max3aRegions[2] = 0; /* AF not supported */
8443 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
8444 max3aRegions, 3);
8445
8446 /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
8447 memset(prop, 0, sizeof(prop));
8448 property_get("persist.camera.facedetect", prop, "1");
8449 uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
8450 LOGD("Support face detection mode: %d",
8451 supportedFaceDetectMode);
8452
8453 int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
Thierry Strudel04e026f2016-10-10 11:27:36 -07008454 /* support mode should be OFF if max number of face is 0 */
8455 if (maxFaces <= 0) {
8456 supportedFaceDetectMode = 0;
8457 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008458 Vector<uint8_t> availableFaceDetectModes;
8459 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
8460 if (supportedFaceDetectMode == 1) {
8461 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
8462 } else if (supportedFaceDetectMode == 2) {
8463 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
8464 } else if (supportedFaceDetectMode == 3) {
8465 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
8466 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
8467 } else {
8468 maxFaces = 0;
8469 }
8470 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
8471 availableFaceDetectModes.array(),
8472 availableFaceDetectModes.size());
8473 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
8474 (int32_t *)&maxFaces, 1);
8475
8476 int32_t exposureCompensationRange[] = {
8477 gCamCapability[cameraId]->exposure_compensation_min,
8478 gCamCapability[cameraId]->exposure_compensation_max};
8479 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
8480 exposureCompensationRange,
8481 sizeof(exposureCompensationRange)/sizeof(int32_t));
8482
8483 uint8_t lensFacing = (facingBack) ?
8484 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
8485 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
8486
8487 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
8488 available_thumbnail_sizes,
8489 sizeof(available_thumbnail_sizes)/sizeof(int32_t));
8490
8491 /*all sizes will be clubbed into this tag*/
8492 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
8493 /*android.scaler.availableStreamConfigurations*/
8494 Vector<int32_t> available_stream_configs;
8495 cam_dimension_t active_array_dim;
8496 active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
8497 active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
8498 /* Add input/output stream configurations for each scalar formats*/
8499 for (size_t j = 0; j < scalar_formats_count; j++) {
8500 switch (scalar_formats[j]) {
8501 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
8502 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
8503 case HAL_PIXEL_FORMAT_RAW10:
8504 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
8505 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
8506 addStreamConfig(available_stream_configs, scalar_formats[j],
8507 gCamCapability[cameraId]->raw_dim[i],
8508 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
8509 }
8510 break;
8511 case HAL_PIXEL_FORMAT_BLOB:
8512 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
8513 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
8514 addStreamConfig(available_stream_configs, scalar_formats[j],
8515 gCamCapability[cameraId]->picture_sizes_tbl[i],
8516 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
8517 }
8518 break;
8519 case HAL_PIXEL_FORMAT_YCbCr_420_888:
8520 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
8521 default:
8522 cam_dimension_t largest_picture_size;
8523 memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
8524 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
8525 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
8526 addStreamConfig(available_stream_configs, scalar_formats[j],
8527 gCamCapability[cameraId]->picture_sizes_tbl[i],
8528 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
8529 /* Book keep largest */
8530 if (gCamCapability[cameraId]->picture_sizes_tbl[i].width
8531 >= largest_picture_size.width &&
8532 gCamCapability[cameraId]->picture_sizes_tbl[i].height
8533 >= largest_picture_size.height)
8534 largest_picture_size = gCamCapability[cameraId]->picture_sizes_tbl[i];
8535 }
8536 /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
8537 if (scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
8538 scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) {
8539 addStreamConfig(available_stream_configs, scalar_formats[j],
8540 largest_picture_size,
8541 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
8542 }
8543 break;
8544 }
8545 }
8546
8547 staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
8548 available_stream_configs.array(), available_stream_configs.size());
8549 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
8550 staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
8551
8552 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
8553 staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
8554
8555 /* android.scaler.availableMinFrameDurations */
8556 Vector<int64_t> available_min_durations;
8557 for (size_t j = 0; j < scalar_formats_count; j++) {
8558 switch (scalar_formats[j]) {
8559 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
8560 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
8561 case HAL_PIXEL_FORMAT_RAW10:
8562 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
8563 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
8564 available_min_durations.add(scalar_formats[j]);
8565 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
8566 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
8567 available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
8568 }
8569 break;
8570 default:
8571 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
8572 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
8573 available_min_durations.add(scalar_formats[j]);
8574 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
8575 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
8576 available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
8577 }
8578 break;
8579 }
8580 }
8581 staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
8582 available_min_durations.array(), available_min_durations.size());
8583
8584 Vector<int32_t> available_hfr_configs;
8585 for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
8586 int32_t fps = 0;
8587 switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
8588 case CAM_HFR_MODE_60FPS:
8589 fps = 60;
8590 break;
8591 case CAM_HFR_MODE_90FPS:
8592 fps = 90;
8593 break;
8594 case CAM_HFR_MODE_120FPS:
8595 fps = 120;
8596 break;
8597 case CAM_HFR_MODE_150FPS:
8598 fps = 150;
8599 break;
8600 case CAM_HFR_MODE_180FPS:
8601 fps = 180;
8602 break;
8603 case CAM_HFR_MODE_210FPS:
8604 fps = 210;
8605 break;
8606 case CAM_HFR_MODE_240FPS:
8607 fps = 240;
8608 break;
8609 case CAM_HFR_MODE_480FPS:
8610 fps = 480;
8611 break;
8612 case CAM_HFR_MODE_OFF:
8613 case CAM_HFR_MODE_MAX:
8614 default:
8615 break;
8616 }
8617
8618 /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
8619 if (fps >= MIN_FPS_FOR_BATCH_MODE) {
8620 /* For each HFR frame rate, need to advertise one variable fps range
8621 * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
8622 * and [120, 120]. While camcorder preview alone is running [30, 120] is
8623 * set by the app. When video recording is started, [120, 120] is
8624 * set. This way sensor configuration does not change when recording
8625 * is started */
8626
8627 /* (width, height, fps_min, fps_max, batch_size_max) */
8628 for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
8629 j < MAX_SIZES_CNT; j++) {
8630 available_hfr_configs.add(
8631 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
8632 available_hfr_configs.add(
8633 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
8634 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
8635 available_hfr_configs.add(fps);
8636 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
8637
8638 /* (width, height, fps_min, fps_max, batch_size_max) */
8639 available_hfr_configs.add(
8640 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
8641 available_hfr_configs.add(
8642 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
8643 available_hfr_configs.add(fps);
8644 available_hfr_configs.add(fps);
8645 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
8646 }
8647 }
8648 }
8649 //Advertise HFR capability only if the property is set
8650 memset(prop, 0, sizeof(prop));
8651 property_get("persist.camera.hal3hfr.enable", prop, "1");
8652 uint8_t hfrEnable = (uint8_t)atoi(prop);
8653
8654 if(hfrEnable && available_hfr_configs.array()) {
8655 staticInfo.update(
8656 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
8657 available_hfr_configs.array(), available_hfr_configs.size());
8658 }
8659
8660 int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
8661 staticInfo.update(ANDROID_JPEG_MAX_SIZE,
8662 &max_jpeg_size, 1);
8663
8664 uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
8665 size_t size = 0;
8666 count = CAM_EFFECT_MODE_MAX;
8667 count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
8668 for (size_t i = 0; i < count; i++) {
8669 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
8670 gCamCapability[cameraId]->supported_effects[i]);
8671 if (NAME_NOT_FOUND != val) {
8672 avail_effects[size] = (uint8_t)val;
8673 size++;
8674 }
8675 }
8676 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
8677 avail_effects,
8678 size);
8679
8680 uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
8681 uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
8682 size_t supported_scene_modes_cnt = 0;
8683 count = CAM_SCENE_MODE_MAX;
8684 count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
8685 for (size_t i = 0; i < count; i++) {
8686 if (gCamCapability[cameraId]->supported_scene_modes[i] !=
8687 CAM_SCENE_MODE_OFF) {
8688 int val = lookupFwkName(SCENE_MODES_MAP,
8689 METADATA_MAP_SIZE(SCENE_MODES_MAP),
8690 gCamCapability[cameraId]->supported_scene_modes[i]);
8691 if (NAME_NOT_FOUND != val) {
8692 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
8693 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
8694 supported_scene_modes_cnt++;
8695 }
8696 }
8697 }
8698 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
8699 avail_scene_modes,
8700 supported_scene_modes_cnt);
8701
8702 uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
8703 makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
8704 supported_scene_modes_cnt,
8705 CAM_SCENE_MODE_MAX,
8706 scene_mode_overrides,
8707 supported_indexes,
8708 cameraId);
8709
8710 if (supported_scene_modes_cnt == 0) {
8711 supported_scene_modes_cnt = 1;
8712 avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
8713 }
8714
8715 staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
8716 scene_mode_overrides, supported_scene_modes_cnt * 3);
8717
8718 uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
8719 ANDROID_CONTROL_MODE_AUTO,
8720 ANDROID_CONTROL_MODE_USE_SCENE_MODE};
8721 staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
8722 available_control_modes,
8723 3);
8724
8725 uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
8726 size = 0;
8727 count = CAM_ANTIBANDING_MODE_MAX;
8728 count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
8729 for (size_t i = 0; i < count; i++) {
8730 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
8731 gCamCapability[cameraId]->supported_antibandings[i]);
8732 if (NAME_NOT_FOUND != val) {
8733 avail_antibanding_modes[size] = (uint8_t)val;
8734 size++;
8735 }
8736
8737 }
8738 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
8739 avail_antibanding_modes,
8740 size);
8741
8742 uint8_t avail_abberation_modes[] = {
8743 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
8744 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
8745 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
8746 count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
8747 count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
8748 if (0 == count) {
8749 // If no aberration correction modes are available for a device, this advertise OFF mode
8750 size = 1;
8751 } else {
8752 // If count is not zero then atleast one among the FAST or HIGH quality is supported
8753 // So, advertize all 3 modes if atleast any one mode is supported as per the
8754 // new M requirement
8755 size = 3;
8756 }
8757 staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
8758 avail_abberation_modes,
8759 size);
8760
8761 uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
8762 size = 0;
8763 count = CAM_FOCUS_MODE_MAX;
8764 count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
8765 for (size_t i = 0; i < count; i++) {
8766 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
8767 gCamCapability[cameraId]->supported_focus_modes[i]);
8768 if (NAME_NOT_FOUND != val) {
8769 avail_af_modes[size] = (uint8_t)val;
8770 size++;
8771 }
8772 }
8773 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
8774 avail_af_modes,
8775 size);
8776
8777 uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
8778 size = 0;
8779 count = CAM_WB_MODE_MAX;
8780 count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
8781 for (size_t i = 0; i < count; i++) {
8782 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
8783 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
8784 gCamCapability[cameraId]->supported_white_balances[i]);
8785 if (NAME_NOT_FOUND != val) {
8786 avail_awb_modes[size] = (uint8_t)val;
8787 size++;
8788 }
8789 }
8790 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
8791 avail_awb_modes,
8792 size);
8793
8794 uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
8795 count = CAM_FLASH_FIRING_LEVEL_MAX;
8796 count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
8797 count);
8798 for (size_t i = 0; i < count; i++) {
8799 available_flash_levels[i] =
8800 gCamCapability[cameraId]->supported_firing_levels[i];
8801 }
8802 staticInfo.update(ANDROID_FLASH_FIRING_POWER,
8803 available_flash_levels, count);
8804
8805 uint8_t flashAvailable;
8806 if (gCamCapability[cameraId]->flash_available)
8807 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
8808 else
8809 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
8810 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
8811 &flashAvailable, 1);
8812
8813 Vector<uint8_t> avail_ae_modes;
8814 count = CAM_AE_MODE_MAX;
8815 count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
8816 for (size_t i = 0; i < count; i++) {
8817 avail_ae_modes.add(gCamCapability[cameraId]->supported_ae_modes[i]);
8818 }
8819 if (flashAvailable) {
8820 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
8821 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
8822 }
8823 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
8824 avail_ae_modes.array(),
8825 avail_ae_modes.size());
8826
8827 int32_t sensitivity_range[2];
8828 sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
8829 sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
8830 staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
8831 sensitivity_range,
8832 sizeof(sensitivity_range) / sizeof(int32_t));
8833
8834 staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
8835 &gCamCapability[cameraId]->max_analog_sensitivity,
8836 1);
8837
8838 int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
8839 staticInfo.update(ANDROID_SENSOR_ORIENTATION,
8840 &sensor_orientation,
8841 1);
8842
8843 int32_t max_output_streams[] = {
8844 MAX_STALLING_STREAMS,
8845 MAX_PROCESSED_STREAMS,
8846 MAX_RAW_STREAMS};
8847 staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
8848 max_output_streams,
8849 sizeof(max_output_streams)/sizeof(max_output_streams[0]));
8850
8851 uint8_t avail_leds = 0;
8852 staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
8853 &avail_leds, 0);
8854
8855 uint8_t focus_dist_calibrated;
8856 int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
8857 gCamCapability[cameraId]->focus_dist_calibrated);
8858 if (NAME_NOT_FOUND != val) {
8859 focus_dist_calibrated = (uint8_t)val;
8860 staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
8861 &focus_dist_calibrated, 1);
8862 }
8863
8864 int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
8865 size = 0;
8866 count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
8867 MAX_TEST_PATTERN_CNT);
8868 for (size_t i = 0; i < count; i++) {
8869 int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
8870 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
8871 if (NAME_NOT_FOUND != testpatternMode) {
8872 avail_testpattern_modes[size] = testpatternMode;
8873 size++;
8874 }
8875 }
8876 staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
8877 avail_testpattern_modes,
8878 size);
8879
8880 uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
8881 staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
8882 &max_pipeline_depth,
8883 1);
8884
8885 int32_t partial_result_count = PARTIAL_RESULT_COUNT;
8886 staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
8887 &partial_result_count,
8888 1);
8889
8890 int32_t max_stall_duration = MAX_REPROCESS_STALL;
8891 staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
8892
8893 Vector<uint8_t> available_capabilities;
8894 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
8895 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
8896 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
8897 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
8898 if (supportBurst) {
8899 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
8900 }
8901 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
8902 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
8903 if (hfrEnable && available_hfr_configs.array()) {
8904 available_capabilities.add(
8905 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
8906 }
8907
8908 if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
8909 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
8910 }
8911 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
8912 available_capabilities.array(),
8913 available_capabilities.size());
8914
8915 //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
8916 //Assumption is that all bayer cameras support MANUAL_SENSOR.
8917 uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
8918 ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
8919
8920 staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
8921 &aeLockAvailable, 1);
8922
8923 //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
8924 //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
8925 uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
8926 ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
8927
8928 staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
8929 &awbLockAvailable, 1);
8930
8931 int32_t max_input_streams = 1;
8932 staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
8933 &max_input_streams,
8934 1);
8935
8936 /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
8937 int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
8938 HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
8939 HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
8940 HAL_PIXEL_FORMAT_YCbCr_420_888};
8941 staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
8942 io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
8943
8944 int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
8945 staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
8946 &max_latency,
8947 1);
8948
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008949#ifndef USE_HAL_3_3
8950 int32_t isp_sensitivity_range[2];
8951 isp_sensitivity_range[0] =
8952 gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
8953 isp_sensitivity_range[1] =
8954 gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
8955 staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
8956 isp_sensitivity_range,
8957 sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
8958#endif
8959
Thierry Strudel3d639192016-09-09 11:52:26 -07008960 uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
8961 ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
8962 staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
8963 available_hot_pixel_modes,
8964 sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
8965
8966 uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
8967 ANDROID_SHADING_MODE_FAST,
8968 ANDROID_SHADING_MODE_HIGH_QUALITY};
8969 staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
8970 available_shading_modes,
8971 3);
8972
8973 uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
8974 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
8975 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
8976 available_lens_shading_map_modes,
8977 2);
8978
8979 uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
8980 ANDROID_EDGE_MODE_FAST,
8981 ANDROID_EDGE_MODE_HIGH_QUALITY,
8982 ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
8983 staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
8984 available_edge_modes,
8985 sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
8986
8987 uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
8988 ANDROID_NOISE_REDUCTION_MODE_FAST,
8989 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
8990 ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
8991 ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
8992 staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
8993 available_noise_red_modes,
8994 sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
8995
8996 uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
8997 ANDROID_TONEMAP_MODE_FAST,
8998 ANDROID_TONEMAP_MODE_HIGH_QUALITY};
8999 staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
9000 available_tonemap_modes,
9001 sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
9002
9003 uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
9004 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
9005 available_hot_pixel_map_modes,
9006 sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
9007
9008 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
9009 gCamCapability[cameraId]->reference_illuminant1);
9010 if (NAME_NOT_FOUND != val) {
9011 uint8_t fwkReferenceIlluminant = (uint8_t)val;
9012 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
9013 }
9014
9015 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
9016 gCamCapability[cameraId]->reference_illuminant2);
9017 if (NAME_NOT_FOUND != val) {
9018 uint8_t fwkReferenceIlluminant = (uint8_t)val;
9019 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
9020 }
9021
9022 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
9023 (void *)gCamCapability[cameraId]->forward_matrix1,
9024 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
9025
9026 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
9027 (void *)gCamCapability[cameraId]->forward_matrix2,
9028 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
9029
9030 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
9031 (void *)gCamCapability[cameraId]->color_transform1,
9032 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
9033
9034 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
9035 (void *)gCamCapability[cameraId]->color_transform2,
9036 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
9037
9038 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
9039 (void *)gCamCapability[cameraId]->calibration_transform1,
9040 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
9041
9042 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
9043 (void *)gCamCapability[cameraId]->calibration_transform2,
9044 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
9045
9046 int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
9047 ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
9048 ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
9049 ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
9050 ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
9051 ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
9052 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
9053 ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
9054 ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
9055 ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
9056 ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
9057 ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
9058 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
9059 ANDROID_JPEG_GPS_COORDINATES,
9060 ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
9061 ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
9062 ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
9063 ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
9064 ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
9065 ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
9066 ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
9067 ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
9068 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
9069 ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009070#ifndef USE_HAL_3_3
9071 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
9072#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009073 ANDROID_STATISTICS_FACE_DETECT_MODE,
9074 ANDROID_STATISTICS_HISTOGRAM_MODE, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
9075 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
9076 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Samuel Ha68ba5172016-12-15 18:41:12 -08009077 ANDROID_BLACK_LEVEL_LOCK,
9078 /* DevCamDebug metadata request_keys_basic */
9079 DEVCAMDEBUG_META_ENABLE,
9080 /* DevCamDebug metadata end */
9081 };
Thierry Strudel3d639192016-09-09 11:52:26 -07009082
9083 size_t request_keys_cnt =
9084 sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
9085 Vector<int32_t> available_request_keys;
9086 available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
9087 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
9088 available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
9089 }
9090
9091 staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
9092 available_request_keys.array(), available_request_keys.size());
9093
9094 int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
9095 ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
9096 ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
9097 ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
9098 ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
9099 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
9100 ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
9101 ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
9102 ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
9103 ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
9104 ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
9105 ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
9106 ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
9107 ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
9108 ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
9109 ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
9110 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
9111 ANDROID_STATISTICS_FACE_DETECT_MODE, ANDROID_STATISTICS_HISTOGRAM_MODE,
9112 ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
9113 ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
9114 ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009115 ANDROID_STATISTICS_FACE_SCORES,
9116#ifndef USE_HAL_3_3
9117 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
9118#endif
Shuzhen Wange763e802016-03-31 10:24:29 -07009119 NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE,
Samuel Ha68ba5172016-12-15 18:41:12 -08009120 // DevCamDebug metadata result_keys_basic
9121 DEVCAMDEBUG_META_ENABLE,
9122 // DevCamDebug metadata result_keys AF
9123 DEVCAMDEBUG_AF_LENS_POSITION,
9124 DEVCAMDEBUG_AF_TOF_CONFIDENCE,
9125 DEVCAMDEBUG_AF_TOF_DISTANCE,
9126 DEVCAMDEBUG_AF_LUMA,
9127 DEVCAMDEBUG_AF_HAF_STATE,
9128 DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
9129 DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
9130 DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
9131 DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
9132 DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
9133 DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
9134 DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
9135 DEVCAMDEBUG_AF_MONITOR_REFOCUS,
9136 DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
9137 DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
9138 DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
9139 DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
9140 DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
9141 DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
9142 DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
9143 DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
9144 DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
9145 DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
9146 DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
9147 DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
9148 DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
9149 DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
9150 // DevCamDebug metadata result_keys AEC
9151 DEVCAMDEBUG_AEC_TARGET_LUMA,
9152 DEVCAMDEBUG_AEC_COMP_LUMA,
9153 DEVCAMDEBUG_AEC_AVG_LUMA,
9154 DEVCAMDEBUG_AEC_CUR_LUMA,
9155 DEVCAMDEBUG_AEC_LINECOUNT,
9156 DEVCAMDEBUG_AEC_REAL_GAIN,
9157 DEVCAMDEBUG_AEC_EXP_INDEX,
9158 DEVCAMDEBUG_AEC_LUX_IDX,
9159 // DevCamDebug metadata result_keys AWB
9160 DEVCAMDEBUG_AWB_R_GAIN,
9161 DEVCAMDEBUG_AWB_G_GAIN,
9162 DEVCAMDEBUG_AWB_B_GAIN,
9163 DEVCAMDEBUG_AWB_CCT,
9164 DEVCAMDEBUG_AWB_DECISION,
9165 /* DevCamDebug metadata end */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009166 };
9167
Thierry Strudel3d639192016-09-09 11:52:26 -07009168 size_t result_keys_cnt =
9169 sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
9170
9171 Vector<int32_t> available_result_keys;
9172 available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
9173 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
9174 available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
9175 }
9176 if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
9177 available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
9178 available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
9179 }
9180 if (supportedFaceDetectMode == 1) {
9181 available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
9182 available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
9183 } else if ((supportedFaceDetectMode == 2) ||
9184 (supportedFaceDetectMode == 3)) {
9185 available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
9186 available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
9187 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009188#ifndef USE_HAL_3_3
9189 if (hasBlackRegions) {
9190 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
9191 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
9192 }
9193#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009194 staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
9195 available_result_keys.array(), available_result_keys.size());
9196
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009197 int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
Thierry Strudel3d639192016-09-09 11:52:26 -07009198 ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
9199 ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
9200 ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
9201 ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
9202 ANDROID_SCALER_CROPPING_TYPE,
9203 ANDROID_SYNC_MAX_LATENCY,
9204 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
9205 ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
9206 ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
9207 ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
9208 ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
9209 ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
9210 ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
9211 ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
9212 ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
9213 ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
9214 ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
9215 ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
9216 ANDROID_LENS_FACING,
9217 ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
9218 ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
9219 ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
9220 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
9221 ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
9222 ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
9223 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
9224 /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
9225 ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
9226 ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
9227 ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
9228 ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
9229 ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
9230 ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
9231 ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
9232 ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
9233 ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
9234 ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
9235 ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
9236 ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
9237 ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
9238 ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
9239 ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
9240 ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
9241 ANDROID_EDGE_AVAILABLE_EDGE_MODES,
9242 ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
9243 ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
9244 ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
9245 ANDROID_TONEMAP_MAX_CURVE_POINTS,
9246 ANDROID_CONTROL_AVAILABLE_MODES,
9247 ANDROID_CONTROL_AE_LOCK_AVAILABLE,
9248 ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
9249 ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
9250 ANDROID_SHADING_AVAILABLE_MODES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009251 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
9252#ifndef USE_HAL_3_3
9253 ANDROID_SENSOR_OPAQUE_RAW_SIZE,
9254 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
9255#endif
9256 };
9257
9258 Vector<int32_t> available_characteristics_keys;
9259 available_characteristics_keys.appendArray(characteristics_keys_basic,
9260 sizeof(characteristics_keys_basic)/sizeof(int32_t));
9261#ifndef USE_HAL_3_3
9262 if (hasBlackRegions) {
9263 available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
9264 }
9265#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009266 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009267 available_characteristics_keys.array(),
9268 available_characteristics_keys.size());
Thierry Strudel3d639192016-09-09 11:52:26 -07009269
9270 /*available stall durations depend on the hw + sw and will be different for different devices */
9271 /*have to add for raw after implementation*/
9272 int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
9273 size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
9274
9275 Vector<int64_t> available_stall_durations;
9276 for (uint32_t j = 0; j < stall_formats_count; j++) {
9277 if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
9278 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
9279 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9280 available_stall_durations.add(stall_formats[j]);
9281 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
9282 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
9283 available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
9284 }
9285 } else {
9286 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
9287 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9288 available_stall_durations.add(stall_formats[j]);
9289 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
9290 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
9291 available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
9292 }
9293 }
9294 }
9295 staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
9296 available_stall_durations.array(),
9297 available_stall_durations.size());
9298
9299 //QCAMERA3_OPAQUE_RAW
9300 uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
9301 cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
9302 switch (gCamCapability[cameraId]->opaque_raw_fmt) {
9303 case LEGACY_RAW:
9304 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
9305 fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
9306 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
9307 fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
9308 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
9309 fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
9310 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
9311 break;
9312 case MIPI_RAW:
9313 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
9314 fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
9315 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
9316 fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
9317 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
9318 fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
9319 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
9320 break;
9321 default:
9322 LOGE("unknown opaque_raw_format %d",
9323 gCamCapability[cameraId]->opaque_raw_fmt);
9324 break;
9325 }
9326 staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
9327
9328 Vector<int32_t> strides;
9329 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9330 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9331 cam_stream_buf_plane_info_t buf_planes;
9332 strides.add(gCamCapability[cameraId]->raw_dim[i].width);
9333 strides.add(gCamCapability[cameraId]->raw_dim[i].height);
9334 mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
9335 &gCamCapability[cameraId]->padding_info, &buf_planes);
9336 strides.add(buf_planes.plane_info.mp[0].stride);
9337 }
9338 staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
9339 strides.size());
9340
Thierry Strudel04e026f2016-10-10 11:27:36 -07009341 //Video HDR default
9342 if ((gCamCapability[cameraId]->qcom_supported_feature_mask) &
9343 (CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR |
9344 CAM_QCOM_FEATURE_ZIGZAG_VIDEO_HDR | CAM_QCOM_FEATURE_SENSOR_HDR)) {
9345 int32_t vhdr_mode[] = {
9346 QCAMERA3_VIDEO_HDR_MODE_OFF,
9347 QCAMERA3_VIDEO_HDR_MODE_ON};
9348
9349 size_t vhdr_mode_count = sizeof(vhdr_mode) / sizeof(int32_t);
9350 staticInfo.update(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES,
9351 vhdr_mode, vhdr_mode_count);
9352 }
9353
Thierry Strudel3d639192016-09-09 11:52:26 -07009354 staticInfo.update(QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
9355 (const uint8_t*)&gCamCapability[cameraId]->related_cam_calibration,
9356 sizeof(gCamCapability[cameraId]->related_cam_calibration));
9357
9358 uint8_t isMonoOnly =
9359 (gCamCapability[cameraId]->color_arrangement == CAM_FILTER_ARRANGEMENT_Y);
9360 staticInfo.update(QCAMERA3_SENSOR_IS_MONO_ONLY,
9361 &isMonoOnly, 1);
9362
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009363#ifndef USE_HAL_3_3
9364 Vector<int32_t> opaque_size;
9365 for (size_t j = 0; j < scalar_formats_count; j++) {
9366 if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
9367 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9368 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9369 cam_stream_buf_plane_info_t buf_planes;
9370
9371 rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
9372 &gCamCapability[cameraId]->padding_info, &buf_planes);
9373
9374 if (rc == 0) {
9375 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
9376 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
9377 opaque_size.add(buf_planes.plane_info.frame_len);
9378 }else {
9379 LOGE("raw frame calculation failed!");
9380 }
9381 }
9382 }
9383 }
9384
9385 if ((opaque_size.size() > 0) &&
9386 (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
9387 staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
9388 else
9389 LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
9390#endif
9391
Thierry Strudel04e026f2016-10-10 11:27:36 -07009392 if (gCamCapability[cameraId]->supported_ir_mode_cnt > 0) {
9393 int32_t avail_ir_modes[CAM_IR_MODE_MAX];
9394 size = 0;
9395 count = CAM_IR_MODE_MAX;
9396 count = MIN(gCamCapability[cameraId]->supported_ir_mode_cnt, count);
9397 for (size_t i = 0; i < count; i++) {
9398 int val = lookupFwkName(IR_MODES_MAP, METADATA_MAP_SIZE(IR_MODES_MAP),
9399 gCamCapability[cameraId]->supported_ir_modes[i]);
9400 if (NAME_NOT_FOUND != val) {
9401 avail_ir_modes[size] = (int32_t)val;
9402 size++;
9403 }
9404 }
9405 staticInfo.update(QCAMERA3_IR_AVAILABLE_MODES,
9406 avail_ir_modes, size);
9407 }
9408
Thierry Strudel295a0ca2016-11-03 18:38:47 -07009409 if (gCamCapability[cameraId]->supported_instant_aec_modes_cnt > 0) {
9410 int32_t available_instant_aec_modes[CAM_AEC_CONVERGENCE_MAX];
9411 size = 0;
9412 count = CAM_AEC_CONVERGENCE_MAX;
9413 count = MIN(gCamCapability[cameraId]->supported_instant_aec_modes_cnt, count);
9414 for (size_t i = 0; i < count; i++) {
9415 int val = lookupFwkName(INSTANT_AEC_MODES_MAP, METADATA_MAP_SIZE(INSTANT_AEC_MODES_MAP),
9416 gCamCapability[cameraId]->supported_instant_aec_modes[i]);
9417 if (NAME_NOT_FOUND != val) {
9418 available_instant_aec_modes[size] = (int32_t)val;
9419 size++;
9420 }
9421 }
9422 staticInfo.update(QCAMERA3_INSTANT_AEC_AVAILABLE_MODES,
9423 available_instant_aec_modes, size);
9424 }
9425
Thierry Strudel3d639192016-09-09 11:52:26 -07009426 gStaticMetadata[cameraId] = staticInfo.release();
9427 return rc;
9428}
9429
9430/*===========================================================================
9431 * FUNCTION : makeTable
9432 *
9433 * DESCRIPTION: make a table of sizes
9434 *
9435 * PARAMETERS :
9436 *
9437 *
9438 *==========================================================================*/
9439void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
9440 size_t max_size, int32_t *sizeTable)
9441{
9442 size_t j = 0;
9443 if (size > max_size) {
9444 size = max_size;
9445 }
9446 for (size_t i = 0; i < size; i++) {
9447 sizeTable[j] = dimTable[i].width;
9448 sizeTable[j+1] = dimTable[i].height;
9449 j+=2;
9450 }
9451}
9452
9453/*===========================================================================
9454 * FUNCTION : makeFPSTable
9455 *
9456 * DESCRIPTION: make a table of fps ranges
9457 *
9458 * PARAMETERS :
9459 *
9460 *==========================================================================*/
9461void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
9462 size_t max_size, int32_t *fpsRangesTable)
9463{
9464 size_t j = 0;
9465 if (size > max_size) {
9466 size = max_size;
9467 }
9468 for (size_t i = 0; i < size; i++) {
9469 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
9470 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
9471 j+=2;
9472 }
9473}
9474
9475/*===========================================================================
9476 * FUNCTION : makeOverridesList
9477 *
9478 * DESCRIPTION: make a list of scene mode overrides
9479 *
9480 * PARAMETERS :
9481 *
9482 *
9483 *==========================================================================*/
9484void QCamera3HardwareInterface::makeOverridesList(
9485 cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
9486 uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
9487{
9488 /*daemon will give a list of overrides for all scene modes.
9489 However we should send the fwk only the overrides for the scene modes
9490 supported by the framework*/
9491 size_t j = 0;
9492 if (size > max_size) {
9493 size = max_size;
9494 }
9495 size_t focus_count = CAM_FOCUS_MODE_MAX;
9496 focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
9497 focus_count);
9498 for (size_t i = 0; i < size; i++) {
9499 bool supt = false;
9500 size_t index = supported_indexes[i];
9501 overridesList[j] = gCamCapability[camera_id]->flash_available ?
9502 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
9503 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
9504 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
9505 overridesTable[index].awb_mode);
9506 if (NAME_NOT_FOUND != val) {
9507 overridesList[j+1] = (uint8_t)val;
9508 }
9509 uint8_t focus_override = overridesTable[index].af_mode;
9510 for (size_t k = 0; k < focus_count; k++) {
9511 if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
9512 supt = true;
9513 break;
9514 }
9515 }
9516 if (supt) {
9517 val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
9518 focus_override);
9519 if (NAME_NOT_FOUND != val) {
9520 overridesList[j+2] = (uint8_t)val;
9521 }
9522 } else {
9523 overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
9524 }
9525 j+=3;
9526 }
9527}
9528
9529/*===========================================================================
9530 * FUNCTION : filterJpegSizes
9531 *
9532 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
9533 * could be downscaled to
9534 *
9535 * PARAMETERS :
9536 *
9537 * RETURN : length of jpegSizes array
9538 *==========================================================================*/
9539
9540size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
9541 size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
9542 uint8_t downscale_factor)
9543{
9544 if (0 == downscale_factor) {
9545 downscale_factor = 1;
9546 }
9547
9548 int32_t min_width = active_array_size.width / downscale_factor;
9549 int32_t min_height = active_array_size.height / downscale_factor;
9550 size_t jpegSizesCnt = 0;
9551 if (processedSizesCnt > maxCount) {
9552 processedSizesCnt = maxCount;
9553 }
9554 for (size_t i = 0; i < processedSizesCnt; i+=2) {
9555 if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
9556 jpegSizes[jpegSizesCnt] = processedSizes[i];
9557 jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
9558 jpegSizesCnt += 2;
9559 }
9560 }
9561 return jpegSizesCnt;
9562}
9563
9564/*===========================================================================
9565 * FUNCTION : computeNoiseModelEntryS
9566 *
9567 * DESCRIPTION: function to map a given sensitivity to the S noise
9568 * model parameters in the DNG noise model.
9569 *
9570 * PARAMETERS : sens : the sensor sensitivity
9571 *
9572 ** RETURN : S (sensor amplification) noise
9573 *
9574 *==========================================================================*/
9575double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
9576 double s = gCamCapability[mCameraId]->gradient_S * sens +
9577 gCamCapability[mCameraId]->offset_S;
9578 return ((s < 0.0) ? 0.0 : s);
9579}
9580
9581/*===========================================================================
9582 * FUNCTION : computeNoiseModelEntryO
9583 *
9584 * DESCRIPTION: function to map a given sensitivity to the O noise
9585 * model parameters in the DNG noise model.
9586 *
9587 * PARAMETERS : sens : the sensor sensitivity
9588 *
9589 ** RETURN : O (sensor readout) noise
9590 *
9591 *==========================================================================*/
9592double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
9593 int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
9594 double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
9595 1.0 : (1.0 * sens / max_analog_sens);
9596 double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
9597 gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
9598 return ((o < 0.0) ? 0.0 : o);
9599}
9600
9601/*===========================================================================
9602 * FUNCTION : getSensorSensitivity
9603 *
9604 * DESCRIPTION: convert iso_mode to an integer value
9605 *
9606 * PARAMETERS : iso_mode : the iso_mode supported by sensor
9607 *
9608 ** RETURN : sensitivity supported by sensor
9609 *
9610 *==========================================================================*/
9611int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
9612{
9613 int32_t sensitivity;
9614
9615 switch (iso_mode) {
9616 case CAM_ISO_MODE_100:
9617 sensitivity = 100;
9618 break;
9619 case CAM_ISO_MODE_200:
9620 sensitivity = 200;
9621 break;
9622 case CAM_ISO_MODE_400:
9623 sensitivity = 400;
9624 break;
9625 case CAM_ISO_MODE_800:
9626 sensitivity = 800;
9627 break;
9628 case CAM_ISO_MODE_1600:
9629 sensitivity = 1600;
9630 break;
9631 default:
9632 sensitivity = -1;
9633 break;
9634 }
9635 return sensitivity;
9636}
9637
9638/*===========================================================================
9639 * FUNCTION : getCamInfo
9640 *
9641 * DESCRIPTION: query camera capabilities
9642 *
9643 * PARAMETERS :
9644 * @cameraId : camera Id
9645 * @info : camera info struct to be filled in with camera capabilities
9646 *
9647 * RETURN : int type of status
9648 * NO_ERROR -- success
9649 * none-zero failure code
9650 *==========================================================================*/
9651int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
9652 struct camera_info *info)
9653{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08009654 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_GET_CAM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -07009655 int rc = 0;
9656
9657 pthread_mutex_lock(&gCamLock);
9658 if (NULL == gCamCapability[cameraId]) {
9659 rc = initCapabilities(cameraId);
9660 if (rc < 0) {
9661 pthread_mutex_unlock(&gCamLock);
9662 return rc;
9663 }
9664 }
9665
9666 if (NULL == gStaticMetadata[cameraId]) {
9667 rc = initStaticMetadata(cameraId);
9668 if (rc < 0) {
9669 pthread_mutex_unlock(&gCamLock);
9670 return rc;
9671 }
9672 }
9673
9674 switch(gCamCapability[cameraId]->position) {
9675 case CAM_POSITION_BACK:
9676 case CAM_POSITION_BACK_AUX:
9677 info->facing = CAMERA_FACING_BACK;
9678 break;
9679
9680 case CAM_POSITION_FRONT:
9681 case CAM_POSITION_FRONT_AUX:
9682 info->facing = CAMERA_FACING_FRONT;
9683 break;
9684
9685 default:
9686 LOGE("Unknown position type %d for camera id:%d",
9687 gCamCapability[cameraId]->position, cameraId);
9688 rc = -1;
9689 break;
9690 }
9691
9692
9693 info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009694#ifndef USE_HAL_3_3
9695 info->device_version = CAMERA_DEVICE_API_VERSION_3_4;
9696#else
Thierry Strudel3d639192016-09-09 11:52:26 -07009697 info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009698#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009699 info->static_camera_characteristics = gStaticMetadata[cameraId];
9700
9701 //For now assume both cameras can operate independently.
9702 info->conflicting_devices = NULL;
9703 info->conflicting_devices_length = 0;
9704
9705 //resource cost is 100 * MIN(1.0, m/M),
9706 //where m is throughput requirement with maximum stream configuration
9707 //and M is CPP maximum throughput.
9708 float max_fps = 0.0;
9709 for (uint32_t i = 0;
9710 i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
9711 if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
9712 max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
9713 }
9714 float ratio = 1.0 * MAX_PROCESSED_STREAMS *
9715 gCamCapability[cameraId]->active_array_size.width *
9716 gCamCapability[cameraId]->active_array_size.height * max_fps /
9717 gCamCapability[cameraId]->max_pixel_bandwidth;
9718 info->resource_cost = 100 * MIN(1.0, ratio);
9719 LOGI("camera %d resource cost is %d", cameraId,
9720 info->resource_cost);
9721
9722 pthread_mutex_unlock(&gCamLock);
9723 return rc;
9724}
9725
9726/*===========================================================================
9727 * FUNCTION : translateCapabilityToMetadata
9728 *
9729 * DESCRIPTION: translate the capability into camera_metadata_t
9730 *
9731 * PARAMETERS : type of the request
9732 *
9733 *
9734 * RETURN : success: camera_metadata_t*
9735 * failure: NULL
9736 *
9737 *==========================================================================*/
9738camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
9739{
9740 if (mDefaultMetadata[type] != NULL) {
9741 return mDefaultMetadata[type];
9742 }
9743 //first time we are handling this request
9744 //fill up the metadata structure using the wrapper class
9745 CameraMetadata settings;
9746 //translate from cam_capability_t to camera_metadata_tag_t
9747 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
9748 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
9749 int32_t defaultRequestID = 0;
9750 settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
9751
9752 /* OIS disable */
9753 char ois_prop[PROPERTY_VALUE_MAX];
9754 memset(ois_prop, 0, sizeof(ois_prop));
9755 property_get("persist.camera.ois.disable", ois_prop, "0");
9756 uint8_t ois_disable = (uint8_t)atoi(ois_prop);
9757
9758 /* Force video to use OIS */
9759 char videoOisProp[PROPERTY_VALUE_MAX];
9760 memset(videoOisProp, 0, sizeof(videoOisProp));
9761 property_get("persist.camera.ois.video", videoOisProp, "1");
9762 uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
Shuzhen Wang19463d72016-03-08 11:09:52 -08009763
9764 // Hybrid AE enable/disable
9765 char hybrid_ae_prop[PROPERTY_VALUE_MAX];
9766 memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
9767 property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
9768 const uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
9769
Thierry Strudel3d639192016-09-09 11:52:26 -07009770 uint8_t controlIntent = 0;
9771 uint8_t focusMode;
9772 uint8_t vsMode;
9773 uint8_t optStabMode;
9774 uint8_t cacMode;
9775 uint8_t edge_mode;
9776 uint8_t noise_red_mode;
9777 uint8_t tonemap_mode;
9778 bool highQualityModeEntryAvailable = FALSE;
9779 bool fastModeEntryAvailable = FALSE;
9780 vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
9781 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
9782 switch (type) {
9783 case CAMERA3_TEMPLATE_PREVIEW:
9784 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
9785 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
9786 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
9787 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
9788 edge_mode = ANDROID_EDGE_MODE_FAST;
9789 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
9790 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
9791 break;
9792 case CAMERA3_TEMPLATE_STILL_CAPTURE:
9793 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
9794 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
9795 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
9796 edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
9797 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
9798 tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
9799 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
9800 // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
9801 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
9802 if (gCamCapability[mCameraId]->aberration_modes[i] ==
9803 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
9804 highQualityModeEntryAvailable = TRUE;
9805 } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
9806 CAM_COLOR_CORRECTION_ABERRATION_FAST) {
9807 fastModeEntryAvailable = TRUE;
9808 }
9809 }
9810 if (highQualityModeEntryAvailable) {
9811 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
9812 } else if (fastModeEntryAvailable) {
9813 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
9814 }
9815 break;
9816 case CAMERA3_TEMPLATE_VIDEO_RECORD:
9817 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
9818 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
9819 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -07009820 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
9821 edge_mode = ANDROID_EDGE_MODE_FAST;
9822 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
9823 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
9824 if (forceVideoOis)
9825 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
9826 break;
9827 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
9828 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
9829 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
9830 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -07009831 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
9832 edge_mode = ANDROID_EDGE_MODE_FAST;
9833 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
9834 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
9835 if (forceVideoOis)
9836 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
9837 break;
9838 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
9839 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
9840 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
9841 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
9842 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
9843 edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
9844 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
9845 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
9846 break;
9847 case CAMERA3_TEMPLATE_MANUAL:
9848 edge_mode = ANDROID_EDGE_MODE_FAST;
9849 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
9850 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
9851 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
9852 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
9853 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
9854 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
9855 break;
9856 default:
9857 edge_mode = ANDROID_EDGE_MODE_FAST;
9858 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
9859 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
9860 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
9861 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
9862 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
9863 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
9864 break;
9865 }
Thierry Strudel04e026f2016-10-10 11:27:36 -07009866 // Set CAC to OFF if underlying device doesn't support
9867 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
9868 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
9869 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009870 settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
9871 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
9872 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
9873 if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
9874 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
9875 }
9876 settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
9877
9878 if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
9879 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
9880 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
9881 else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
9882 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
9883 || ois_disable)
9884 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
9885 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
9886
9887 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
9888 &gCamCapability[mCameraId]->exposure_compensation_default, 1);
9889
9890 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
9891 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
9892
9893 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
9894 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
9895
9896 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
9897 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
9898
9899 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
9900 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
9901
9902 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
9903 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
9904
9905 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
9906 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
9907
9908 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
9909 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
9910
9911 /*flash*/
9912 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
9913 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
9914
9915 static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
9916 settings.update(ANDROID_FLASH_FIRING_POWER,
9917 &flashFiringLevel, 1);
9918
9919 /* lens */
9920 float default_aperture = gCamCapability[mCameraId]->apertures[0];
9921 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
9922
9923 if (gCamCapability[mCameraId]->filter_densities_count) {
9924 float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
9925 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
9926 gCamCapability[mCameraId]->filter_densities_count);
9927 }
9928
9929 float default_focal_length = gCamCapability[mCameraId]->focal_length;
9930 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
9931
9932 if (focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
9933 float default_focus_distance = 0;
9934 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &default_focus_distance, 1);
9935 }
9936
9937 static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
9938 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
9939
9940 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
9941 settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
9942
9943 static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
9944 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
9945
9946 /* face detection (default to OFF) */
9947 static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
9948 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
9949
9950 static const uint8_t histogramMode = ANDROID_STATISTICS_HISTOGRAM_MODE_OFF;
9951 settings.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &histogramMode, 1);
9952
9953 static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
9954 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
9955
9956 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
9957 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
9958
9959 static const uint8_t lensShadingMode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
9960 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &lensShadingMode, 1);
9961
9962 static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
9963 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
9964
9965 /* Exposure time(Update the Min Exposure Time)*/
9966 int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
9967 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
9968
9969 /* frame duration */
9970 static const int64_t default_frame_duration = NSEC_PER_33MSEC;
9971 settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
9972
9973 /* sensitivity */
9974 static const int32_t default_sensitivity = 100;
9975 settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009976#ifndef USE_HAL_3_3
9977 static const int32_t default_isp_sensitivity =
9978 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
9979 settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
9980#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009981
9982 /*edge mode*/
9983 settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
9984
9985 /*noise reduction mode*/
9986 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
9987
9988 /*color correction mode*/
9989 static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
9990 settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
9991
9992 /*transform matrix mode*/
9993 settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
9994
9995 int32_t scaler_crop_region[4];
9996 scaler_crop_region[0] = 0;
9997 scaler_crop_region[1] = 0;
9998 scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
9999 scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
10000 settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
10001
10002 static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
10003 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
10004
10005 /*focus distance*/
10006 float focus_distance = 0.0;
10007 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
10008
10009 /*target fps range: use maximum range for picture, and maximum fixed range for video*/
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010010 /* Restrict template max_fps to 30 */
Thierry Strudel3d639192016-09-09 11:52:26 -070010011 float max_range = 0.0;
10012 float max_fixed_fps = 0.0;
10013 int32_t fps_range[2] = {0, 0};
10014 for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
10015 i++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080010016 if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
10017 TEMPLATE_MAX_PREVIEW_FPS) {
10018 continue;
10019 }
Thierry Strudel3d639192016-09-09 11:52:26 -070010020 float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
10021 gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
10022 if (type == CAMERA3_TEMPLATE_PREVIEW ||
10023 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
10024 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
10025 if (range > max_range) {
10026 fps_range[0] =
10027 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
10028 fps_range[1] =
10029 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
10030 max_range = range;
10031 }
10032 } else {
10033 if (range < 0.01 && max_fixed_fps <
10034 gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
10035 fps_range[0] =
10036 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
10037 fps_range[1] =
10038 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
10039 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
10040 }
10041 }
10042 }
10043 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
10044
10045 /*precapture trigger*/
10046 uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
10047 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
10048
10049 /*af trigger*/
10050 uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
10051 settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
10052
10053 /* ae & af regions */
10054 int32_t active_region[] = {
10055 gCamCapability[mCameraId]->active_array_size.left,
10056 gCamCapability[mCameraId]->active_array_size.top,
10057 gCamCapability[mCameraId]->active_array_size.left +
10058 gCamCapability[mCameraId]->active_array_size.width,
10059 gCamCapability[mCameraId]->active_array_size.top +
10060 gCamCapability[mCameraId]->active_array_size.height,
10061 0};
10062 settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
10063 sizeof(active_region) / sizeof(active_region[0]));
10064 settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
10065 sizeof(active_region) / sizeof(active_region[0]));
10066
10067 /* black level lock */
10068 uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
10069 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
10070
10071 /* lens shading map mode */
10072 uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
10073 if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
10074 shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
10075 }
10076 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
10077
10078 //special defaults for manual template
10079 if (type == CAMERA3_TEMPLATE_MANUAL) {
10080 static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
10081 settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
10082
10083 static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
10084 settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
10085
10086 static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
10087 settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
10088
10089 static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
10090 settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
10091
10092 static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
10093 settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
10094
10095 static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
10096 settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
10097 }
10098
10099
10100 /* TNR
10101 * We'll use this location to determine which modes TNR will be set.
10102 * We will enable TNR to be on if either of the Preview/Video stream requires TNR
10103 * This is not to be confused with linking on a per stream basis that decision
10104 * is still on per-session basis and will be handled as part of config stream
10105 */
10106 uint8_t tnr_enable = 0;
10107
10108 if (m_bTnrPreview || m_bTnrVideo) {
10109
10110 switch (type) {
10111 case CAMERA3_TEMPLATE_VIDEO_RECORD:
10112 tnr_enable = 1;
10113 break;
10114
10115 default:
10116 tnr_enable = 0;
10117 break;
10118 }
10119
10120 int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
10121 settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
10122 settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
10123
10124 LOGD("TNR:%d with process plate %d for template:%d",
10125 tnr_enable, tnr_process_type, type);
10126 }
10127
10128 //Update Link tags to default
10129 int32_t sync_type = CAM_TYPE_STANDALONE;
10130 settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
10131
10132 int32_t is_main = 0; //this doesn't matter as app should overwrite
10133 settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
10134
10135 settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &is_main, 1);
10136
10137 /* CDS default */
10138 char prop[PROPERTY_VALUE_MAX];
10139 memset(prop, 0, sizeof(prop));
10140 property_get("persist.camera.CDS", prop, "Auto");
10141 cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
10142 cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
10143 if (CAM_CDS_MODE_MAX == cds_mode) {
10144 cds_mode = CAM_CDS_MODE_AUTO;
10145 }
10146
10147 /* Disabling CDS in templates which have TNR enabled*/
10148 if (tnr_enable)
10149 cds_mode = CAM_CDS_MODE_OFF;
10150
10151 int32_t mode = cds_mode;
10152 settings.update(QCAMERA3_CDS_MODE, &mode, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -070010153
10154 int32_t hdr_mode = (int32_t)QCAMERA3_VIDEO_HDR_MODE_OFF;
10155 settings.update(QCAMERA3_VIDEO_HDR_MODE, &hdr_mode, 1);
10156
10157 /* IR Mode Default Off */
10158 int32_t ir_mode = (int32_t)QCAMERA3_IR_MODE_OFF;
10159 settings.update(QCAMERA3_IR_MODE, &ir_mode, 1);
10160
Thierry Strudel269c81a2016-10-12 12:13:59 -070010161 /* Manual Convergence AEC Speed is disabled by default*/
10162 float default_aec_speed = 0;
10163 settings.update(QCAMERA3_AEC_CONVERGENCE_SPEED, &default_aec_speed, 1);
10164
10165 /* Manual Convergence AWB Speed is disabled by default*/
10166 float default_awb_speed = 0;
10167 settings.update(QCAMERA3_AWB_CONVERGENCE_SPEED, &default_awb_speed, 1);
10168
Thierry Strudel295a0ca2016-11-03 18:38:47 -070010169 // Set instant AEC to normal convergence by default
10170 int32_t instant_aec_mode = (int32_t)QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE;
10171 settings.update(QCAMERA3_INSTANT_AEC_MODE, &instant_aec_mode, 1);
10172
Shuzhen Wang19463d72016-03-08 11:09:52 -080010173 /* hybrid ae */
10174 settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
10175
Thierry Strudel3d639192016-09-09 11:52:26 -070010176 mDefaultMetadata[type] = settings.release();
10177
10178 return mDefaultMetadata[type];
10179}
10180
10181/*===========================================================================
10182 * FUNCTION : setFrameParameters
10183 *
10184 * DESCRIPTION: set parameters per frame as requested in the metadata from
10185 * framework
10186 *
10187 * PARAMETERS :
10188 * @request : request that needs to be serviced
Thierry Strudelc2ee3302016-11-17 12:33:12 -080010189 * @streamsArray : Stream ID of all the requested streams
Thierry Strudel3d639192016-09-09 11:52:26 -070010190 * @blob_request: Whether this request is a blob request or not
10191 *
10192 * RETURN : success: NO_ERROR
10193 * failure:
10194 *==========================================================================*/
10195int QCamera3HardwareInterface::setFrameParameters(
10196 camera3_capture_request_t *request,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080010197 cam_stream_ID_t streamsArray,
Thierry Strudel3d639192016-09-09 11:52:26 -070010198 int blob_request,
10199 uint32_t snapshotStreamId)
10200{
10201 /*translate from camera_metadata_t type to parm_type_t*/
10202 int rc = 0;
10203 int32_t hal_version = CAM_HAL_V3;
10204
10205 clear_metadata_buffer(mParameters);
10206 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
10207 LOGE("Failed to set hal version in the parameters");
10208 return BAD_VALUE;
10209 }
10210
10211 /*we need to update the frame number in the parameters*/
10212 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
10213 request->frame_number)) {
10214 LOGE("Failed to set the frame number in the parameters");
10215 return BAD_VALUE;
10216 }
10217
10218 /* Update stream id of all the requested buffers */
Thierry Strudelc2ee3302016-11-17 12:33:12 -080010219 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070010220 LOGE("Failed to set stream type mask in the parameters");
10221 return BAD_VALUE;
10222 }
10223
10224 if (mUpdateDebugLevel) {
10225 uint32_t dummyDebugLevel = 0;
10226 /* The value of dummyDebugLevel is irrelavent. On
10227 * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
10228 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
10229 dummyDebugLevel)) {
10230 LOGE("Failed to set UPDATE_DEBUG_LEVEL");
10231 return BAD_VALUE;
10232 }
10233 mUpdateDebugLevel = false;
10234 }
10235
10236 if(request->settings != NULL){
10237 rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
10238 if (blob_request)
10239 memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
10240 }
10241
10242 return rc;
10243}
10244
10245/*===========================================================================
10246 * FUNCTION : setReprocParameters
10247 *
10248 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
10249 * return it.
10250 *
10251 * PARAMETERS :
10252 * @request : request that needs to be serviced
10253 *
10254 * RETURN : success: NO_ERROR
10255 * failure:
10256 *==========================================================================*/
10257int32_t QCamera3HardwareInterface::setReprocParameters(
10258 camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
10259 uint32_t snapshotStreamId)
10260{
10261 /*translate from camera_metadata_t type to parm_type_t*/
10262 int rc = 0;
10263
10264 if (NULL == request->settings){
10265 LOGE("Reprocess settings cannot be NULL");
10266 return BAD_VALUE;
10267 }
10268
10269 if (NULL == reprocParam) {
10270 LOGE("Invalid reprocessing metadata buffer");
10271 return BAD_VALUE;
10272 }
10273 clear_metadata_buffer(reprocParam);
10274
10275 /*we need to update the frame number in the parameters*/
10276 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
10277 request->frame_number)) {
10278 LOGE("Failed to set the frame number in the parameters");
10279 return BAD_VALUE;
10280 }
10281
10282 rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
10283 if (rc < 0) {
10284 LOGE("Failed to translate reproc request");
10285 return rc;
10286 }
10287
10288 CameraMetadata frame_settings;
10289 frame_settings = request->settings;
10290 if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
10291 frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
10292 int32_t *crop_count =
10293 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
10294 int32_t *crop_data =
10295 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
10296 int32_t *roi_map =
10297 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
10298 if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
10299 cam_crop_data_t crop_meta;
10300 memset(&crop_meta, 0, sizeof(cam_crop_data_t));
10301 crop_meta.num_of_streams = 1;
10302 crop_meta.crop_info[0].crop.left = crop_data[0];
10303 crop_meta.crop_info[0].crop.top = crop_data[1];
10304 crop_meta.crop_info[0].crop.width = crop_data[2];
10305 crop_meta.crop_info[0].crop.height = crop_data[3];
10306
10307 crop_meta.crop_info[0].roi_map.left =
10308 roi_map[0];
10309 crop_meta.crop_info[0].roi_map.top =
10310 roi_map[1];
10311 crop_meta.crop_info[0].roi_map.width =
10312 roi_map[2];
10313 crop_meta.crop_info[0].roi_map.height =
10314 roi_map[3];
10315
10316 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
10317 rc = BAD_VALUE;
10318 }
10319 LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
10320 request->input_buffer->stream,
10321 crop_meta.crop_info[0].crop.left,
10322 crop_meta.crop_info[0].crop.top,
10323 crop_meta.crop_info[0].crop.width,
10324 crop_meta.crop_info[0].crop.height);
10325 LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
10326 request->input_buffer->stream,
10327 crop_meta.crop_info[0].roi_map.left,
10328 crop_meta.crop_info[0].roi_map.top,
10329 crop_meta.crop_info[0].roi_map.width,
10330 crop_meta.crop_info[0].roi_map.height);
10331 } else {
10332 LOGE("Invalid reprocess crop count %d!", *crop_count);
10333 }
10334 } else {
10335 LOGE("No crop data from matching output stream");
10336 }
10337
10338 /* These settings are not needed for regular requests so handle them specially for
10339 reprocess requests; information needed for EXIF tags */
10340 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
10341 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
10342 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
10343 if (NAME_NOT_FOUND != val) {
10344 uint32_t flashMode = (uint32_t)val;
10345 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
10346 rc = BAD_VALUE;
10347 }
10348 } else {
10349 LOGE("Could not map fwk flash mode %d to correct hal flash mode",
10350 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
10351 }
10352 } else {
10353 LOGH("No flash mode in reprocess settings");
10354 }
10355
10356 if (frame_settings.exists(ANDROID_FLASH_STATE)) {
10357 int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
10358 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
10359 rc = BAD_VALUE;
10360 }
10361 } else {
10362 LOGH("No flash state in reprocess settings");
10363 }
10364
10365 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS)) {
10366 uint8_t *reprocessFlags =
10367 frame_settings.find(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS).data.u8;
10368 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_REPROCESS_FLAGS,
10369 *reprocessFlags)) {
10370 rc = BAD_VALUE;
10371 }
10372 }
10373
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070010374 // Add metadata which reprocess needs
10375 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB)) {
10376 cam_reprocess_info_t *repro_info =
10377 (cam_reprocess_info_t *)frame_settings.find
10378 (QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB).data.u8;
Thierry Strudel3d639192016-09-09 11:52:26 -070010379 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_SENSOR,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070010380 repro_info->sensor_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070010381 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CAMIF,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070010382 repro_info->camif_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070010383 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_ISP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070010384 repro_info->isp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070010385 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CPP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070010386 repro_info->cpp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070010387 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_FOCAL_LENGTH_RATIO,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070010388 repro_info->af_focal_length_ratio);
Thierry Strudel3d639192016-09-09 11:52:26 -070010389 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_FLIP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070010390 repro_info->pipeline_flip);
10391 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_ROI,
10392 repro_info->af_roi);
10393 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_IMG_DYN_FEAT,
10394 repro_info->dyn_mask);
Thierry Strudel3d639192016-09-09 11:52:26 -070010395 /* If there is ANDROID_JPEG_ORIENTATION in frame setting,
10396 CAM_INTF_PARM_ROTATION metadata then has been added in
10397 translateToHalMetadata. HAL need to keep this new rotation
10398 metadata. Otherwise, the old rotation info saved in the vendor tag
10399 would be used */
10400 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
10401 CAM_INTF_PARM_ROTATION, reprocParam) {
10402 LOGD("CAM_INTF_PARM_ROTATION metadata is added in translateToHalMetadata");
10403 } else {
10404 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_ROTATION,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070010405 repro_info->rotation_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070010406 }
Thierry Strudel3d639192016-09-09 11:52:26 -070010407 }
10408
10409 /* Add additional JPEG cropping information. App add QCAMERA3_JPEG_ENCODE_CROP_RECT
10410 to ask for cropping and use ROI for downscale/upscale during HW JPEG encoding.
10411 roi.width and roi.height would be the final JPEG size.
10412 For now, HAL only checks this for reprocess request */
10413 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ENABLE) &&
10414 frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_RECT)) {
10415 uint8_t *enable =
10416 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ENABLE).data.u8;
10417 if (*enable == TRUE) {
10418 int32_t *crop_data =
10419 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_RECT).data.i32;
10420 cam_stream_crop_info_t crop_meta;
10421 memset(&crop_meta, 0, sizeof(cam_stream_crop_info_t));
10422 crop_meta.stream_id = 0;
10423 crop_meta.crop.left = crop_data[0];
10424 crop_meta.crop.top = crop_data[1];
10425 crop_meta.crop.width = crop_data[2];
10426 crop_meta.crop.height = crop_data[3];
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010427 // The JPEG crop roi should match cpp output size
10428 IF_META_AVAILABLE(cam_stream_crop_info_t, cpp_crop,
10429 CAM_INTF_META_SNAP_CROP_INFO_CPP, reprocParam) {
10430 crop_meta.roi_map.left = 0;
10431 crop_meta.roi_map.top = 0;
10432 crop_meta.roi_map.width = cpp_crop->crop.width;
10433 crop_meta.roi_map.height = cpp_crop->crop.height;
Thierry Strudel3d639192016-09-09 11:52:26 -070010434 }
10435 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_ENCODE_CROP,
10436 crop_meta);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010437 LOGH("Add JPEG encode crop left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070010438 crop_meta.crop.left, crop_meta.crop.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010439 crop_meta.crop.width, crop_meta.crop.height, mCameraId);
10440 LOGH("Add JPEG encode crop ROI left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070010441 crop_meta.roi_map.left, crop_meta.roi_map.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010442 crop_meta.roi_map.width, crop_meta.roi_map.height, mCameraId);
10443
10444 // Add JPEG scale information
10445 cam_dimension_t scale_dim;
10446 memset(&scale_dim, 0, sizeof(cam_dimension_t));
10447 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ROI)) {
10448 int32_t *roi =
10449 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ROI).data.i32;
10450 scale_dim.width = roi[2];
10451 scale_dim.height = roi[3];
10452 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_SCALE_DIMENSION,
10453 scale_dim);
10454 LOGH("Add JPEG encode scale width %d, height %d, mCameraId %d",
10455 scale_dim.width, scale_dim.height, mCameraId);
10456 }
Thierry Strudel3d639192016-09-09 11:52:26 -070010457 }
10458 }
10459
10460 return rc;
10461}
10462
10463/*===========================================================================
10464 * FUNCTION : saveRequestSettings
10465 *
10466 * DESCRIPTION: Add any settings that might have changed to the request settings
10467 * and save the settings to be applied on the frame
10468 *
10469 * PARAMETERS :
10470 * @jpegMetadata : the extracted and/or modified jpeg metadata
10471 * @request : request with initial settings
10472 *
10473 * RETURN :
10474 * camera_metadata_t* : pointer to the saved request settings
10475 *==========================================================================*/
10476camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
10477 const CameraMetadata &jpegMetadata,
10478 camera3_capture_request_t *request)
10479{
10480 camera_metadata_t *resultMetadata;
10481 CameraMetadata camMetadata;
10482 camMetadata = request->settings;
10483
10484 if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
10485 int32_t thumbnail_size[2];
10486 thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
10487 thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
10488 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
10489 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
10490 }
10491
10492 if (request->input_buffer != NULL) {
10493 uint8_t reprocessFlags = 1;
10494 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
10495 (uint8_t*)&reprocessFlags,
10496 sizeof(reprocessFlags));
10497 }
10498
10499 resultMetadata = camMetadata.release();
10500 return resultMetadata;
10501}
10502
10503/*===========================================================================
10504 * FUNCTION : setHalFpsRange
10505 *
10506 * DESCRIPTION: set FPS range parameter
10507 *
10508 *
10509 * PARAMETERS :
10510 * @settings : Metadata from framework
10511 * @hal_metadata: Metadata buffer
10512 *
10513 *
10514 * RETURN : success: NO_ERROR
10515 * failure:
10516 *==========================================================================*/
10517int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
10518 metadata_buffer_t *hal_metadata)
10519{
10520 int32_t rc = NO_ERROR;
10521 cam_fps_range_t fps_range;
10522 fps_range.min_fps = (float)
10523 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
10524 fps_range.max_fps = (float)
10525 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
10526 fps_range.video_min_fps = fps_range.min_fps;
10527 fps_range.video_max_fps = fps_range.max_fps;
10528
10529 LOGD("aeTargetFpsRange fps: [%f %f]",
10530 fps_range.min_fps, fps_range.max_fps);
10531 /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
10532 * follows:
10533 * ---------------------------------------------------------------|
10534 * Video stream is absent in configure_streams |
10535 * (Camcorder preview before the first video record |
10536 * ---------------------------------------------------------------|
10537 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
10538 * | | | vid_min/max_fps|
10539 * ---------------------------------------------------------------|
10540 * NO | [ 30, 240] | 240 | [240, 240] |
10541 * |-------------|-------------|----------------|
10542 * | [240, 240] | 240 | [240, 240] |
10543 * ---------------------------------------------------------------|
10544 * Video stream is present in configure_streams |
10545 * ---------------------------------------------------------------|
10546 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
10547 * | | | vid_min/max_fps|
10548 * ---------------------------------------------------------------|
10549 * NO | [ 30, 240] | 240 | [240, 240] |
10550 * (camcorder prev |-------------|-------------|----------------|
10551 * after video rec | [240, 240] | 240 | [240, 240] |
10552 * is stopped) | | | |
10553 * ---------------------------------------------------------------|
10554 * YES | [ 30, 240] | 240 | [240, 240] |
10555 * |-------------|-------------|----------------|
10556 * | [240, 240] | 240 | [240, 240] |
10557 * ---------------------------------------------------------------|
10558 * When Video stream is absent in configure_streams,
10559 * preview fps = sensor_fps / batchsize
10560 * Eg: for 240fps at batchSize 4, preview = 60fps
10561 * for 120fps at batchSize 4, preview = 30fps
10562 *
10563 * When video stream is present in configure_streams, preview fps is as per
10564 * the ratio of preview buffers to video buffers requested in process
10565 * capture request
10566 */
10567 mBatchSize = 0;
10568 if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
10569 fps_range.min_fps = fps_range.video_max_fps;
10570 fps_range.video_min_fps = fps_range.video_max_fps;
10571 int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
10572 fps_range.max_fps);
10573 if (NAME_NOT_FOUND != val) {
10574 cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
10575 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
10576 return BAD_VALUE;
10577 }
10578
10579 if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
10580 /* If batchmode is currently in progress and the fps changes,
10581 * set the flag to restart the sensor */
10582 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
10583 (mHFRVideoFps != fps_range.max_fps)) {
10584 mNeedSensorRestart = true;
10585 }
10586 mHFRVideoFps = fps_range.max_fps;
10587 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
10588 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
10589 mBatchSize = MAX_HFR_BATCH_SIZE;
10590 }
10591 }
10592 LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
10593
10594 }
10595 } else {
10596 /* HFR mode is session param in backend/ISP. This should be reset when
10597 * in non-HFR mode */
10598 cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
10599 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
10600 return BAD_VALUE;
10601 }
10602 }
10603 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
10604 return BAD_VALUE;
10605 }
10606 LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
10607 fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
10608 return rc;
10609}
10610
10611/*===========================================================================
10612 * FUNCTION : translateToHalMetadata
10613 *
10614 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
10615 *
10616 *
10617 * PARAMETERS :
10618 * @request : request sent from framework
10619 *
10620 *
10621 * RETURN : success: NO_ERROR
10622 * failure:
10623 *==========================================================================*/
10624int QCamera3HardwareInterface::translateToHalMetadata
10625 (const camera3_capture_request_t *request,
10626 metadata_buffer_t *hal_metadata,
10627 uint32_t snapshotStreamId)
10628{
10629 int rc = 0;
10630 CameraMetadata frame_settings;
10631 frame_settings = request->settings;
10632
10633 /* Do not change the order of the following list unless you know what you are
10634 * doing.
10635 * The order is laid out in such a way that parameters in the front of the table
10636 * may be used to override the parameters later in the table. Examples are:
10637 * 1. META_MODE should precede AEC/AWB/AF MODE
10638 * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
10639 * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
10640 * 4. Any mode should precede it's corresponding settings
10641 */
10642 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
10643 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
10644 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
10645 rc = BAD_VALUE;
10646 }
10647 rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
10648 if (rc != NO_ERROR) {
10649 LOGE("extractSceneMode failed");
10650 }
10651 }
10652
10653 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
10654 uint8_t fwk_aeMode =
10655 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
10656 uint8_t aeMode;
10657 int32_t redeye;
10658
10659 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
10660 aeMode = CAM_AE_MODE_OFF;
10661 } else {
10662 aeMode = CAM_AE_MODE_ON;
10663 }
10664 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
10665 redeye = 1;
10666 } else {
10667 redeye = 0;
10668 }
10669
10670 int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
10671 fwk_aeMode);
10672 if (NAME_NOT_FOUND != val) {
10673 int32_t flashMode = (int32_t)val;
10674 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
10675 }
10676
10677 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
10678 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
10679 rc = BAD_VALUE;
10680 }
10681 }
10682
10683 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
10684 uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
10685 int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
10686 fwk_whiteLevel);
10687 if (NAME_NOT_FOUND != val) {
10688 uint8_t whiteLevel = (uint8_t)val;
10689 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
10690 rc = BAD_VALUE;
10691 }
10692 }
10693 }
10694
10695 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
10696 uint8_t fwk_cacMode =
10697 frame_settings.find(
10698 ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
10699 int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
10700 fwk_cacMode);
10701 if (NAME_NOT_FOUND != val) {
10702 cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
10703 bool entryAvailable = FALSE;
10704 // Check whether Frameworks set CAC mode is supported in device or not
10705 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
10706 if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
10707 entryAvailable = TRUE;
10708 break;
10709 }
10710 }
10711 LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
10712 // If entry not found then set the device supported mode instead of frameworks mode i.e,
10713 // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
10714 // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
10715 if (entryAvailable == FALSE) {
10716 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
10717 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
10718 } else {
10719 if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
10720 // High is not supported and so set the FAST as spec say's underlying
10721 // device implementation can be the same for both modes.
10722 cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
10723 } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
10724 // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
10725 // in order to avoid the fps drop due to high quality
10726 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
10727 } else {
10728 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
10729 }
10730 }
10731 }
10732 LOGD("Final cacMode is %d", cacMode);
10733 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
10734 rc = BAD_VALUE;
10735 }
10736 } else {
10737 LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
10738 }
10739 }
10740
10741 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
10742 uint8_t fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
10743 int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
10744 fwk_focusMode);
10745 if (NAME_NOT_FOUND != val) {
10746 uint8_t focusMode = (uint8_t)val;
10747 LOGD("set focus mode %d", focusMode);
10748 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
10749 rc = BAD_VALUE;
10750 }
10751 }
10752 }
10753
10754 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
10755 float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
10756 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
10757 focalDistance)) {
10758 rc = BAD_VALUE;
10759 }
10760 }
10761
10762 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
10763 uint8_t fwk_antibandingMode =
10764 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
10765 int val = lookupHalName(ANTIBANDING_MODES_MAP,
10766 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
10767 if (NAME_NOT_FOUND != val) {
10768 uint32_t hal_antibandingMode = (uint32_t)val;
Shuzhen Wangf6890e02016-08-12 14:28:54 -070010769 if (hal_antibandingMode == CAM_ANTIBANDING_MODE_AUTO) {
10770 if (m60HzZone) {
10771 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_60HZ;
10772 } else {
10773 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_50HZ;
10774 }
10775 }
Thierry Strudel3d639192016-09-09 11:52:26 -070010776 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
10777 hal_antibandingMode)) {
10778 rc = BAD_VALUE;
10779 }
10780 }
10781 }
10782
10783 if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
10784 int32_t expCompensation = frame_settings.find(
10785 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
10786 if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
10787 expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
10788 if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
10789 expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
Zhijun He426c4d92016-12-16 14:27:50 -080010790 ALOGV("CAM_DEBUG: Setting compensation:%d", expCompensation);
Thierry Strudel3d639192016-09-09 11:52:26 -070010791 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
10792 expCompensation)) {
10793 rc = BAD_VALUE;
10794 }
10795 }
10796
10797 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
10798 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
10799 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
10800 rc = BAD_VALUE;
10801 }
10802 }
10803 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
10804 rc = setHalFpsRange(frame_settings, hal_metadata);
10805 if (rc != NO_ERROR) {
10806 LOGE("setHalFpsRange failed");
10807 }
10808 }
10809
10810 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
10811 uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
10812 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
10813 rc = BAD_VALUE;
10814 }
10815 }
10816
10817 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
10818 uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
10819 int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
10820 fwk_effectMode);
10821 if (NAME_NOT_FOUND != val) {
10822 uint8_t effectMode = (uint8_t)val;
10823 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
10824 rc = BAD_VALUE;
10825 }
10826 }
10827 }
10828
10829 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
10830 uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
10831 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
10832 colorCorrectMode)) {
10833 rc = BAD_VALUE;
10834 }
10835 }
10836
10837 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
10838 cam_color_correct_gains_t colorCorrectGains;
10839 for (size_t i = 0; i < CC_GAIN_MAX; i++) {
10840 colorCorrectGains.gains[i] =
10841 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
10842 }
10843 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
10844 colorCorrectGains)) {
10845 rc = BAD_VALUE;
10846 }
10847 }
10848
10849 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
10850 cam_color_correct_matrix_t colorCorrectTransform;
10851 cam_rational_type_t transform_elem;
10852 size_t num = 0;
10853 for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
10854 for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
10855 transform_elem.numerator =
10856 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
10857 transform_elem.denominator =
10858 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
10859 colorCorrectTransform.transform_matrix[i][j] = transform_elem;
10860 num++;
10861 }
10862 }
10863 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
10864 colorCorrectTransform)) {
10865 rc = BAD_VALUE;
10866 }
10867 }
10868
10869 cam_trigger_t aecTrigger;
10870 aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
10871 aecTrigger.trigger_id = -1;
10872 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
10873 frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
10874 aecTrigger.trigger =
10875 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
10876 aecTrigger.trigger_id =
10877 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
10878 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
10879 aecTrigger)) {
10880 rc = BAD_VALUE;
10881 }
10882 LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
10883 aecTrigger.trigger, aecTrigger.trigger_id);
10884 }
10885
10886 /*af_trigger must come with a trigger id*/
10887 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
10888 frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
10889 cam_trigger_t af_trigger;
10890 af_trigger.trigger =
10891 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
10892 af_trigger.trigger_id =
10893 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
10894 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
10895 rc = BAD_VALUE;
10896 }
10897 LOGD("AfTrigger: %d AfTriggerID: %d",
10898 af_trigger.trigger, af_trigger.trigger_id);
10899 }
10900
10901 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
10902 int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
10903 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
10904 rc = BAD_VALUE;
10905 }
10906 }
10907 if (frame_settings.exists(ANDROID_EDGE_MODE)) {
10908 cam_edge_application_t edge_application;
10909 edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
10910 if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
10911 edge_application.sharpness = 0;
10912 } else {
10913 edge_application.sharpness = gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
10914 }
10915 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
10916 rc = BAD_VALUE;
10917 }
10918 }
10919
10920 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
10921 int32_t respectFlashMode = 1;
10922 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
10923 uint8_t fwk_aeMode =
10924 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
10925 if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
10926 respectFlashMode = 0;
10927 LOGH("AE Mode controls flash, ignore android.flash.mode");
10928 }
10929 }
10930 if (respectFlashMode) {
10931 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
10932 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
10933 LOGH("flash mode after mapping %d", val);
10934 // To check: CAM_INTF_META_FLASH_MODE usage
10935 if (NAME_NOT_FOUND != val) {
10936 uint8_t flashMode = (uint8_t)val;
10937 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
10938 rc = BAD_VALUE;
10939 }
10940 }
10941 }
10942 }
10943
10944 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
10945 uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
10946 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
10947 rc = BAD_VALUE;
10948 }
10949 }
10950
10951 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
10952 int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
10953 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
10954 flashFiringTime)) {
10955 rc = BAD_VALUE;
10956 }
10957 }
10958
10959 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
10960 uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
10961 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
10962 hotPixelMode)) {
10963 rc = BAD_VALUE;
10964 }
10965 }
10966
10967 if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
10968 float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
10969 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
10970 lensAperture)) {
10971 rc = BAD_VALUE;
10972 }
10973 }
10974
10975 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
10976 float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
10977 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
10978 filterDensity)) {
10979 rc = BAD_VALUE;
10980 }
10981 }
10982
10983 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
10984 float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
10985 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
10986 focalLength)) {
10987 rc = BAD_VALUE;
10988 }
10989 }
10990
10991 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
10992 uint8_t optStabMode =
10993 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
10994 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
10995 optStabMode)) {
10996 rc = BAD_VALUE;
10997 }
10998 }
10999
11000 if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
11001 uint8_t videoStabMode =
11002 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
11003 LOGD("videoStabMode from APP = %d", videoStabMode);
11004 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
11005 videoStabMode)) {
11006 rc = BAD_VALUE;
11007 }
11008 }
11009
11010
11011 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
11012 uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
11013 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
11014 noiseRedMode)) {
11015 rc = BAD_VALUE;
11016 }
11017 }
11018
11019 if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
11020 float reprocessEffectiveExposureFactor =
11021 frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
11022 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
11023 reprocessEffectiveExposureFactor)) {
11024 rc = BAD_VALUE;
11025 }
11026 }
11027
11028 cam_crop_region_t scalerCropRegion;
11029 bool scalerCropSet = false;
11030 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
11031 scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
11032 scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
11033 scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
11034 scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
11035
11036 // Map coordinate system from active array to sensor output.
11037 mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
11038 scalerCropRegion.width, scalerCropRegion.height);
11039
11040 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
11041 scalerCropRegion)) {
11042 rc = BAD_VALUE;
11043 }
11044 scalerCropSet = true;
11045 }
11046
11047 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
11048 int64_t sensorExpTime =
11049 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
11050 LOGD("setting sensorExpTime %lld", sensorExpTime);
11051 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
11052 sensorExpTime)) {
11053 rc = BAD_VALUE;
11054 }
11055 }
11056
11057 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
11058 int64_t sensorFrameDuration =
11059 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
11060 int64_t minFrameDuration = getMinFrameDuration(request);
11061 sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
11062 if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
11063 sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
11064 LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
11065 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
11066 sensorFrameDuration)) {
11067 rc = BAD_VALUE;
11068 }
11069 }
11070
11071 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
11072 int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
11073 if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
11074 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
11075 if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
11076 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
11077 LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
11078 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
11079 sensorSensitivity)) {
11080 rc = BAD_VALUE;
11081 }
11082 }
11083
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011084#ifndef USE_HAL_3_3
11085 if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
11086 int32_t ispSensitivity =
11087 frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
11088 if (ispSensitivity <
11089 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
11090 ispSensitivity =
11091 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
11092 LOGD("clamp ispSensitivity to %d", ispSensitivity);
11093 }
11094 if (ispSensitivity >
11095 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
11096 ispSensitivity =
11097 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
11098 LOGD("clamp ispSensitivity to %d", ispSensitivity);
11099 }
11100 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
11101 ispSensitivity)) {
11102 rc = BAD_VALUE;
11103 }
11104 }
11105#endif
11106
Thierry Strudel3d639192016-09-09 11:52:26 -070011107 if (frame_settings.exists(ANDROID_SHADING_MODE)) {
11108 uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
11109 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
11110 rc = BAD_VALUE;
11111 }
11112 }
11113
11114 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
11115 uint8_t fwk_facedetectMode =
11116 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
11117
11118 int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
11119 fwk_facedetectMode);
11120
11121 if (NAME_NOT_FOUND != val) {
11122 uint8_t facedetectMode = (uint8_t)val;
11123 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
11124 facedetectMode)) {
11125 rc = BAD_VALUE;
11126 }
11127 }
11128 }
11129
11130 if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
11131 uint8_t histogramMode =
11132 frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
11133 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
11134 histogramMode)) {
11135 rc = BAD_VALUE;
11136 }
11137 }
11138
11139 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
11140 uint8_t sharpnessMapMode =
11141 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
11142 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
11143 sharpnessMapMode)) {
11144 rc = BAD_VALUE;
11145 }
11146 }
11147
11148 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
11149 uint8_t tonemapMode =
11150 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
11151 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
11152 rc = BAD_VALUE;
11153 }
11154 }
11155 /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
11156 /*All tonemap channels will have the same number of points*/
11157 if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
11158 frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
11159 frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
11160 cam_rgb_tonemap_curves tonemapCurves;
11161 tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
11162 if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
11163 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
11164 tonemapCurves.tonemap_points_cnt,
11165 CAM_MAX_TONEMAP_CURVE_SIZE);
11166 tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
11167 }
11168
11169 /* ch0 = G*/
11170 size_t point = 0;
11171 cam_tonemap_curve_t tonemapCurveGreen;
11172 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
11173 for (size_t j = 0; j < 2; j++) {
11174 tonemapCurveGreen.tonemap_points[i][j] =
11175 frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
11176 point++;
11177 }
11178 }
11179 tonemapCurves.curves[0] = tonemapCurveGreen;
11180
11181 /* ch 1 = B */
11182 point = 0;
11183 cam_tonemap_curve_t tonemapCurveBlue;
11184 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
11185 for (size_t j = 0; j < 2; j++) {
11186 tonemapCurveBlue.tonemap_points[i][j] =
11187 frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
11188 point++;
11189 }
11190 }
11191 tonemapCurves.curves[1] = tonemapCurveBlue;
11192
11193 /* ch 2 = R */
11194 point = 0;
11195 cam_tonemap_curve_t tonemapCurveRed;
11196 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
11197 for (size_t j = 0; j < 2; j++) {
11198 tonemapCurveRed.tonemap_points[i][j] =
11199 frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
11200 point++;
11201 }
11202 }
11203 tonemapCurves.curves[2] = tonemapCurveRed;
11204
11205 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
11206 tonemapCurves)) {
11207 rc = BAD_VALUE;
11208 }
11209 }
11210
11211 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
11212 uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
11213 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
11214 captureIntent)) {
11215 rc = BAD_VALUE;
11216 }
11217 }
11218
11219 if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
11220 uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
11221 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
11222 blackLevelLock)) {
11223 rc = BAD_VALUE;
11224 }
11225 }
11226
11227 if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
11228 uint8_t lensShadingMapMode =
11229 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
11230 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
11231 lensShadingMapMode)) {
11232 rc = BAD_VALUE;
11233 }
11234 }
11235
11236 if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
11237 cam_area_t roi;
11238 bool reset = true;
11239 convertFromRegions(roi, request->settings, ANDROID_CONTROL_AE_REGIONS);
11240
11241 // Map coordinate system from active array to sensor output.
11242 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
11243 roi.rect.height);
11244
11245 if (scalerCropSet) {
11246 reset = resetIfNeededROI(&roi, &scalerCropRegion);
11247 }
11248 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
11249 rc = BAD_VALUE;
11250 }
11251 }
11252
11253 if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
11254 cam_area_t roi;
11255 bool reset = true;
11256 convertFromRegions(roi, request->settings, ANDROID_CONTROL_AF_REGIONS);
11257
11258 // Map coordinate system from active array to sensor output.
11259 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
11260 roi.rect.height);
11261
11262 if (scalerCropSet) {
11263 reset = resetIfNeededROI(&roi, &scalerCropRegion);
11264 }
11265 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
11266 rc = BAD_VALUE;
11267 }
11268 }
11269
11270 // CDS for non-HFR non-video mode
11271 if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
11272 !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
11273 int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
11274 if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
11275 LOGE("Invalid CDS mode %d!", *fwk_cds);
11276 } else {
11277 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11278 CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
11279 rc = BAD_VALUE;
11280 }
11281 }
11282 }
11283
Thierry Strudel04e026f2016-10-10 11:27:36 -070011284 // Video HDR
11285 if (frame_settings.exists(QCAMERA3_VIDEO_HDR_MODE)) {
11286 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
11287 frame_settings.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
11288 rc = setVideoHdrMode(mParameters, vhdr);
11289 if (rc != NO_ERROR) {
11290 LOGE("setVideoHDR is failed");
11291 }
11292 }
11293
11294 //IR
11295 if(frame_settings.exists(QCAMERA3_IR_MODE)) {
11296 cam_ir_mode_type_t fwk_ir = (cam_ir_mode_type_t)
11297 frame_settings.find(QCAMERA3_IR_MODE).data.i32[0];
11298 if ((CAM_IR_MODE_MAX <= fwk_ir) || (0 > fwk_ir)) {
11299 LOGE("Invalid IR mode %d!", fwk_ir);
11300 } else {
11301 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11302 CAM_INTF_META_IR_MODE, fwk_ir)) {
11303 rc = BAD_VALUE;
11304 }
11305 }
11306 }
11307
Thierry Strudel269c81a2016-10-12 12:13:59 -070011308 if (frame_settings.exists(QCAMERA3_AEC_CONVERGENCE_SPEED)) {
11309 float aec_speed;
11310 aec_speed = frame_settings.find(QCAMERA3_AEC_CONVERGENCE_SPEED).data.f[0];
11311 LOGD("AEC Speed :%f", aec_speed);
11312 if ( aec_speed < 0 ) {
11313 LOGE("Invalid AEC mode %f!", aec_speed);
11314 } else {
11315 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_CONVERGENCE_SPEED,
11316 aec_speed)) {
11317 rc = BAD_VALUE;
11318 }
11319 }
11320 }
11321
11322 if (frame_settings.exists(QCAMERA3_AWB_CONVERGENCE_SPEED)) {
11323 float awb_speed;
11324 awb_speed = frame_settings.find(QCAMERA3_AWB_CONVERGENCE_SPEED).data.f[0];
11325 LOGD("AWB Speed :%f", awb_speed);
11326 if ( awb_speed < 0 ) {
11327 LOGE("Invalid AWB mode %f!", awb_speed);
11328 } else {
11329 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AWB_CONVERGENCE_SPEED,
11330 awb_speed)) {
11331 rc = BAD_VALUE;
11332 }
11333 }
11334 }
11335
Thierry Strudel3d639192016-09-09 11:52:26 -070011336 // TNR
11337 if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
11338 frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
11339 uint8_t b_TnrRequested = 0;
11340 cam_denoise_param_t tnr;
11341 tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
11342 tnr.process_plates =
11343 (cam_denoise_process_type_t)frame_settings.find(
11344 QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
11345 b_TnrRequested = tnr.denoise_enable;
11346 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
11347 rc = BAD_VALUE;
11348 }
11349 }
11350
Thierry Strudel295a0ca2016-11-03 18:38:47 -070011351 if (frame_settings.exists(QCAMERA3_EXPOSURE_METERING_MODE)) {
11352 int32_t* exposure_metering_mode =
11353 frame_settings.find(QCAMERA3_EXPOSURE_METERING_MODE).data.i32;
11354 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_ALGO_TYPE,
11355 *exposure_metering_mode)) {
11356 rc = BAD_VALUE;
11357 }
11358 }
11359
Thierry Strudel3d639192016-09-09 11:52:26 -070011360 if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
11361 int32_t fwk_testPatternMode =
11362 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
11363 int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
11364 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
11365
11366 if (NAME_NOT_FOUND != testPatternMode) {
11367 cam_test_pattern_data_t testPatternData;
11368 memset(&testPatternData, 0, sizeof(testPatternData));
11369 testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
11370 if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
11371 frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
11372 int32_t *fwk_testPatternData =
11373 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
11374 testPatternData.r = fwk_testPatternData[0];
11375 testPatternData.b = fwk_testPatternData[3];
11376 switch (gCamCapability[mCameraId]->color_arrangement) {
11377 case CAM_FILTER_ARRANGEMENT_RGGB:
11378 case CAM_FILTER_ARRANGEMENT_GRBG:
11379 testPatternData.gr = fwk_testPatternData[1];
11380 testPatternData.gb = fwk_testPatternData[2];
11381 break;
11382 case CAM_FILTER_ARRANGEMENT_GBRG:
11383 case CAM_FILTER_ARRANGEMENT_BGGR:
11384 testPatternData.gr = fwk_testPatternData[2];
11385 testPatternData.gb = fwk_testPatternData[1];
11386 break;
11387 default:
11388 LOGE("color arrangement %d is not supported",
11389 gCamCapability[mCameraId]->color_arrangement);
11390 break;
11391 }
11392 }
11393 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
11394 testPatternData)) {
11395 rc = BAD_VALUE;
11396 }
11397 } else {
11398 LOGE("Invalid framework sensor test pattern mode %d",
11399 fwk_testPatternMode);
11400 }
11401 }
11402
11403 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
11404 size_t count = 0;
11405 camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
11406 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
11407 gps_coords.data.d, gps_coords.count, count);
11408 if (gps_coords.count != count) {
11409 rc = BAD_VALUE;
11410 }
11411 }
11412
11413 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
11414 char gps_methods[GPS_PROCESSING_METHOD_SIZE];
11415 size_t count = 0;
11416 const char *gps_methods_src = (const char *)
11417 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
11418 memset(gps_methods, '\0', sizeof(gps_methods));
11419 strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
11420 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
11421 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
11422 if (GPS_PROCESSING_METHOD_SIZE != count) {
11423 rc = BAD_VALUE;
11424 }
11425 }
11426
11427 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
11428 int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
11429 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
11430 gps_timestamp)) {
11431 rc = BAD_VALUE;
11432 }
11433 }
11434
11435 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
11436 int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
11437 cam_rotation_info_t rotation_info;
11438 if (orientation == 0) {
11439 rotation_info.rotation = ROTATE_0;
11440 } else if (orientation == 90) {
11441 rotation_info.rotation = ROTATE_90;
11442 } else if (orientation == 180) {
11443 rotation_info.rotation = ROTATE_180;
11444 } else if (orientation == 270) {
11445 rotation_info.rotation = ROTATE_270;
11446 }
Shuzhen Wang6ec8eac2016-07-28 23:09:23 -070011447 rotation_info.device_rotation = ROTATE_0;
Thierry Strudel3d639192016-09-09 11:52:26 -070011448 rotation_info.streamId = snapshotStreamId;
11449 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
11450 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
11451 rc = BAD_VALUE;
11452 }
11453 }
11454
11455 if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
11456 uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
11457 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
11458 rc = BAD_VALUE;
11459 }
11460 }
11461
11462 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
11463 uint32_t thumb_quality = (uint32_t)
11464 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
11465 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
11466 thumb_quality)) {
11467 rc = BAD_VALUE;
11468 }
11469 }
11470
11471 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
11472 cam_dimension_t dim;
11473 dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
11474 dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
11475 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
11476 rc = BAD_VALUE;
11477 }
11478 }
11479
11480 // Internal metadata
11481 if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
11482 size_t count = 0;
11483 camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
11484 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
11485 privatedata.data.i32, privatedata.count, count);
11486 if (privatedata.count != count) {
11487 rc = BAD_VALUE;
11488 }
11489 }
11490
Thierry Strudel295a0ca2016-11-03 18:38:47 -070011491 // ISO/Exposure Priority
11492 if (frame_settings.exists(QCAMERA3_USE_ISO_EXP_PRIORITY) &&
11493 frame_settings.exists(QCAMERA3_SELECT_PRIORITY)) {
11494 cam_priority_mode_t mode =
11495 (cam_priority_mode_t)frame_settings.find(QCAMERA3_SELECT_PRIORITY).data.i32[0];
11496 if((CAM_ISO_PRIORITY == mode) || (CAM_EXP_PRIORITY == mode)) {
11497 cam_intf_parm_manual_3a_t use_iso_exp_pty;
11498 use_iso_exp_pty.previewOnly = FALSE;
11499 uint64_t* ptr = (uint64_t*)frame_settings.find(QCAMERA3_USE_ISO_EXP_PRIORITY).data.i64;
11500 use_iso_exp_pty.value = *ptr;
11501
11502 if(CAM_ISO_PRIORITY == mode) {
11503 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ISO,
11504 use_iso_exp_pty)) {
11505 rc = BAD_VALUE;
11506 }
11507 }
11508 else {
11509 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_TIME,
11510 use_iso_exp_pty)) {
11511 rc = BAD_VALUE;
11512 }
11513 }
11514 }
11515 }
11516
11517 // Saturation
11518 if (frame_settings.exists(QCAMERA3_USE_SATURATION)) {
11519 int32_t* use_saturation =
11520 frame_settings.find(QCAMERA3_USE_SATURATION).data.i32;
11521 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_SATURATION, *use_saturation)) {
11522 rc = BAD_VALUE;
11523 }
11524 }
11525
Thierry Strudel3d639192016-09-09 11:52:26 -070011526 // EV step
11527 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
11528 gCamCapability[mCameraId]->exp_compensation_step)) {
11529 rc = BAD_VALUE;
11530 }
11531
11532 // CDS info
11533 if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
11534 cam_cds_data_t *cdsData = (cam_cds_data_t *)
11535 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
11536
11537 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11538 CAM_INTF_META_CDS_DATA, *cdsData)) {
11539 rc = BAD_VALUE;
11540 }
11541 }
11542
Shuzhen Wang19463d72016-03-08 11:09:52 -080011543 // Hybrid AE
11544 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
11545 uint8_t *hybrid_ae = (uint8_t *)
11546 frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
11547
11548 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11549 CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
11550 rc = BAD_VALUE;
11551 }
11552 }
11553
Thierry Strudel3d639192016-09-09 11:52:26 -070011554 return rc;
11555}
11556
11557/*===========================================================================
11558 * FUNCTION : captureResultCb
11559 *
11560 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
11561 *
11562 * PARAMETERS :
11563 * @frame : frame information from mm-camera-interface
11564 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
11565 * @userdata: userdata
11566 *
11567 * RETURN : NONE
11568 *==========================================================================*/
11569void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
11570 camera3_stream_buffer_t *buffer,
11571 uint32_t frame_number, bool isInputBuffer, void *userdata)
11572{
11573 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
11574 if (hw == NULL) {
11575 LOGE("Invalid hw %p", hw);
11576 return;
11577 }
11578
11579 hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
11580 return;
11581}
11582
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011583/*===========================================================================
11584 * FUNCTION : setBufferErrorStatus
11585 *
11586 * DESCRIPTION: Callback handler for channels to report any buffer errors
11587 *
11588 * PARAMETERS :
11589 * @ch : Channel on which buffer error is reported from
11590 * @frame_number : frame number on which buffer error is reported on
11591 * @buffer_status : buffer error status
11592 * @userdata: userdata
11593 *
11594 * RETURN : NONE
11595 *==========================================================================*/
11596void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
11597 uint32_t frame_number, camera3_buffer_status_t err, void *userdata)
11598{
11599 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
11600 if (hw == NULL) {
11601 LOGE("Invalid hw %p", hw);
11602 return;
11603 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011604
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011605 hw->setBufferErrorStatus(ch, frame_number, err);
11606 return;
11607}
11608
11609void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
11610 uint32_t frameNumber, camera3_buffer_status_t err)
11611{
11612 LOGD("channel: %p, frame# %d, buf err: %d", ch, frameNumber, err);
11613 pthread_mutex_lock(&mMutex);
11614
11615 for (auto& req : mPendingBuffersMap.mPendingBuffersInRequest) {
11616 if (req.frame_number != frameNumber)
11617 continue;
11618 for (auto& k : req.mPendingBufferList) {
11619 if(k.stream->priv == ch) {
11620 k.bufStatus = CAMERA3_BUFFER_STATUS_ERROR;
11621 }
11622 }
11623 }
11624
11625 pthread_mutex_unlock(&mMutex);
11626 return;
11627}
Thierry Strudel3d639192016-09-09 11:52:26 -070011628/*===========================================================================
11629 * FUNCTION : initialize
11630 *
11631 * DESCRIPTION: Pass framework callback pointers to HAL
11632 *
11633 * PARAMETERS :
11634 *
11635 *
11636 * RETURN : Success : 0
11637 * Failure: -ENODEV
11638 *==========================================================================*/
11639
11640int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
11641 const camera3_callback_ops_t *callback_ops)
11642{
11643 LOGD("E");
11644 QCamera3HardwareInterface *hw =
11645 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
11646 if (!hw) {
11647 LOGE("NULL camera device");
11648 return -ENODEV;
11649 }
11650
11651 int rc = hw->initialize(callback_ops);
11652 LOGD("X");
11653 return rc;
11654}
11655
11656/*===========================================================================
11657 * FUNCTION : configure_streams
11658 *
11659 * DESCRIPTION:
11660 *
11661 * PARAMETERS :
11662 *
11663 *
11664 * RETURN : Success: 0
11665 * Failure: -EINVAL (if stream configuration is invalid)
11666 * -ENODEV (fatal error)
11667 *==========================================================================*/
11668
11669int QCamera3HardwareInterface::configure_streams(
11670 const struct camera3_device *device,
11671 camera3_stream_configuration_t *stream_list)
11672{
11673 LOGD("E");
11674 QCamera3HardwareInterface *hw =
11675 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
11676 if (!hw) {
11677 LOGE("NULL camera device");
11678 return -ENODEV;
11679 }
11680 int rc = hw->configureStreams(stream_list);
11681 LOGD("X");
11682 return rc;
11683}
11684
11685/*===========================================================================
11686 * FUNCTION : construct_default_request_settings
11687 *
11688 * DESCRIPTION: Configure a settings buffer to meet the required use case
11689 *
11690 * PARAMETERS :
11691 *
11692 *
11693 * RETURN : Success: Return valid metadata
11694 * Failure: Return NULL
11695 *==========================================================================*/
11696const camera_metadata_t* QCamera3HardwareInterface::
11697 construct_default_request_settings(const struct camera3_device *device,
11698 int type)
11699{
11700
11701 LOGD("E");
11702 camera_metadata_t* fwk_metadata = NULL;
11703 QCamera3HardwareInterface *hw =
11704 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
11705 if (!hw) {
11706 LOGE("NULL camera device");
11707 return NULL;
11708 }
11709
11710 fwk_metadata = hw->translateCapabilityToMetadata(type);
11711
11712 LOGD("X");
11713 return fwk_metadata;
11714}
11715
11716/*===========================================================================
11717 * FUNCTION : process_capture_request
11718 *
11719 * DESCRIPTION:
11720 *
11721 * PARAMETERS :
11722 *
11723 *
11724 * RETURN :
11725 *==========================================================================*/
11726int QCamera3HardwareInterface::process_capture_request(
11727 const struct camera3_device *device,
11728 camera3_capture_request_t *request)
11729{
11730 LOGD("E");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011731 CAMSCOPE_UPDATE_FLAGS(CAMSCOPE_SECTION_HAL, kpi_camscope_flags);
Thierry Strudel3d639192016-09-09 11:52:26 -070011732 QCamera3HardwareInterface *hw =
11733 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
11734 if (!hw) {
11735 LOGE("NULL camera device");
11736 return -EINVAL;
11737 }
11738
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011739 int rc = hw->orchestrateRequest(request);
Thierry Strudel3d639192016-09-09 11:52:26 -070011740 LOGD("X");
11741 return rc;
11742}
11743
11744/*===========================================================================
11745 * FUNCTION : dump
11746 *
11747 * DESCRIPTION:
11748 *
11749 * PARAMETERS :
11750 *
11751 *
11752 * RETURN :
11753 *==========================================================================*/
11754
11755void QCamera3HardwareInterface::dump(
11756 const struct camera3_device *device, int fd)
11757{
11758 /* Log level property is read when "adb shell dumpsys media.camera" is
11759 called so that the log level can be controlled without restarting
11760 the media server */
11761 getLogLevel();
11762
11763 LOGD("E");
11764 QCamera3HardwareInterface *hw =
11765 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
11766 if (!hw) {
11767 LOGE("NULL camera device");
11768 return;
11769 }
11770
11771 hw->dump(fd);
11772 LOGD("X");
11773 return;
11774}
11775
11776/*===========================================================================
11777 * FUNCTION : flush
11778 *
11779 * DESCRIPTION:
11780 *
11781 * PARAMETERS :
11782 *
11783 *
11784 * RETURN :
11785 *==========================================================================*/
11786
11787int QCamera3HardwareInterface::flush(
11788 const struct camera3_device *device)
11789{
11790 int rc;
11791 LOGD("E");
11792 QCamera3HardwareInterface *hw =
11793 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
11794 if (!hw) {
11795 LOGE("NULL camera device");
11796 return -EINVAL;
11797 }
11798
11799 pthread_mutex_lock(&hw->mMutex);
11800 // Validate current state
11801 switch (hw->mState) {
11802 case STARTED:
11803 /* valid state */
11804 break;
11805
11806 case ERROR:
11807 pthread_mutex_unlock(&hw->mMutex);
11808 hw->handleCameraDeviceError();
11809 return -ENODEV;
11810
11811 default:
11812 LOGI("Flush returned during state %d", hw->mState);
11813 pthread_mutex_unlock(&hw->mMutex);
11814 return 0;
11815 }
11816 pthread_mutex_unlock(&hw->mMutex);
11817
11818 rc = hw->flush(true /* restart channels */ );
11819 LOGD("X");
11820 return rc;
11821}
11822
11823/*===========================================================================
11824 * FUNCTION : close_camera_device
11825 *
11826 * DESCRIPTION:
11827 *
11828 * PARAMETERS :
11829 *
11830 *
11831 * RETURN :
11832 *==========================================================================*/
11833int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
11834{
11835 int ret = NO_ERROR;
11836 QCamera3HardwareInterface *hw =
11837 reinterpret_cast<QCamera3HardwareInterface *>(
11838 reinterpret_cast<camera3_device_t *>(device)->priv);
11839 if (!hw) {
11840 LOGE("NULL camera device");
11841 return BAD_VALUE;
11842 }
11843
11844 LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
11845 delete hw;
11846 LOGI("[KPI Perf]: X");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011847 CAMSCOPE_DESTROY(CAMSCOPE_SECTION_HAL);
Thierry Strudel3d639192016-09-09 11:52:26 -070011848 return ret;
11849}
11850
11851/*===========================================================================
11852 * FUNCTION : getWaveletDenoiseProcessPlate
11853 *
11854 * DESCRIPTION: query wavelet denoise process plate
11855 *
11856 * PARAMETERS : None
11857 *
11858 * RETURN : WNR prcocess plate value
11859 *==========================================================================*/
11860cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
11861{
11862 char prop[PROPERTY_VALUE_MAX];
11863 memset(prop, 0, sizeof(prop));
11864 property_get("persist.denoise.process.plates", prop, "0");
11865 int processPlate = atoi(prop);
11866 switch(processPlate) {
11867 case 0:
11868 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
11869 case 1:
11870 return CAM_WAVELET_DENOISE_CBCR_ONLY;
11871 case 2:
11872 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
11873 case 3:
11874 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
11875 default:
11876 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
11877 }
11878}
11879
11880
11881/*===========================================================================
11882 * FUNCTION : getTemporalDenoiseProcessPlate
11883 *
11884 * DESCRIPTION: query temporal denoise process plate
11885 *
11886 * PARAMETERS : None
11887 *
11888 * RETURN : TNR prcocess plate value
11889 *==========================================================================*/
11890cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
11891{
11892 char prop[PROPERTY_VALUE_MAX];
11893 memset(prop, 0, sizeof(prop));
11894 property_get("persist.tnr.process.plates", prop, "0");
11895 int processPlate = atoi(prop);
11896 switch(processPlate) {
11897 case 0:
11898 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
11899 case 1:
11900 return CAM_WAVELET_DENOISE_CBCR_ONLY;
11901 case 2:
11902 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
11903 case 3:
11904 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
11905 default:
11906 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
11907 }
11908}
11909
11910
11911/*===========================================================================
11912 * FUNCTION : extractSceneMode
11913 *
11914 * DESCRIPTION: Extract scene mode from frameworks set metadata
11915 *
11916 * PARAMETERS :
11917 * @frame_settings: CameraMetadata reference
11918 * @metaMode: ANDROID_CONTORL_MODE
11919 * @hal_metadata: hal metadata structure
11920 *
11921 * RETURN : None
11922 *==========================================================================*/
11923int32_t QCamera3HardwareInterface::extractSceneMode(
11924 const CameraMetadata &frame_settings, uint8_t metaMode,
11925 metadata_buffer_t *hal_metadata)
11926{
11927 int32_t rc = NO_ERROR;
11928
11929 if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
11930 camera_metadata_ro_entry entry =
11931 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
11932 if (0 == entry.count)
11933 return rc;
11934
11935 uint8_t fwk_sceneMode = entry.data.u8[0];
11936
11937 int val = lookupHalName(SCENE_MODES_MAP,
11938 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
11939 fwk_sceneMode);
11940 if (NAME_NOT_FOUND != val) {
11941 uint8_t sceneMode = (uint8_t)val;
11942 LOGD("sceneMode: %d", sceneMode);
11943 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11944 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
11945 rc = BAD_VALUE;
11946 }
11947 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011948
11949 if (fwk_sceneMode == ANDROID_CONTROL_SCENE_MODE_HDR) {
11950 cam_hdr_param_t hdr_params;
11951 hdr_params.hdr_enable = 1;
11952 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
11953 hdr_params.hdr_need_1x = false;
11954 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11955 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
11956 rc = BAD_VALUE;
11957 }
11958 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011959 } else if ((ANDROID_CONTROL_MODE_OFF == metaMode) ||
11960 (ANDROID_CONTROL_MODE_AUTO == metaMode)) {
11961 uint8_t sceneMode = CAM_SCENE_MODE_OFF;
11962 LOGD("sceneMode: %d", sceneMode);
11963 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11964 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
11965 rc = BAD_VALUE;
11966 }
11967 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011968
11969 if (mForceHdrSnapshot) {
11970 cam_hdr_param_t hdr_params;
11971 hdr_params.hdr_enable = 1;
11972 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
11973 hdr_params.hdr_need_1x = false;
11974 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
11975 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
11976 rc = BAD_VALUE;
11977 }
11978 }
11979
Thierry Strudel3d639192016-09-09 11:52:26 -070011980 return rc;
11981}
11982
11983/*===========================================================================
Thierry Strudel04e026f2016-10-10 11:27:36 -070011984 * FUNCTION : setVideoHdrMode
11985 *
11986 * DESCRIPTION: Set Video HDR mode from frameworks set metadata
11987 *
11988 * PARAMETERS :
11989 * @hal_metadata: hal metadata structure
11990 * @metaMode: QCAMERA3_VIDEO_HDR_MODE
11991 *
11992 * RETURN : None
11993 *==========================================================================*/
11994int32_t QCamera3HardwareInterface::setVideoHdrMode(
11995 metadata_buffer_t *hal_metadata, cam_video_hdr_mode_t vhdr)
11996{
11997 int32_t rc = NO_ERROR;
11998 if ((CAM_VIDEO_HDR_MODE_MAX <= (vhdr)) || (0 > (vhdr))) {
11999 LOGE("%s: Invalid Video HDR mode %d!", __func__, vhdr);
12000 rc = BAD_VALUE;
12001 } else {
12002 cam_sensor_hdr_type_t vhdr_type = CAM_SENSOR_HDR_MAX;
12003 if(vhdr == QCAMERA3_VIDEO_HDR_MODE_OFF) {
12004 LOGD("Setting HDR mode Off");
12005 vhdr_type = CAM_SENSOR_HDR_OFF;
12006 } else {
12007 char video_hdr_prop[PROPERTY_VALUE_MAX];
12008 memset(video_hdr_prop, 0, sizeof(video_hdr_prop));
12009 property_get("persist.camera.hdr.video", video_hdr_prop, "3");
12010 uint8_t use_hdr_video = (uint8_t)atoi(video_hdr_prop);
12011 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask &
12012 CAM_QCOM_FEATURE_SENSOR_HDR) &&
12013 (use_hdr_video == CAM_SENSOR_HDR_IN_SENSOR)) {
12014 LOGD("Setting HDR mode In Sensor");
12015 vhdr_type = CAM_SENSOR_HDR_IN_SENSOR;
12016 }
12017 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask &
12018 CAM_QCOM_FEATURE_ZIGZAG_VIDEO_HDR) &&
12019 (use_hdr_video == CAM_SENSOR_HDR_ZIGZAG)) {
12020 LOGD("Setting HDR mode Zigzag");
12021 vhdr_type = CAM_SENSOR_HDR_ZIGZAG;
12022 }
12023 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask &
12024 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) &&
12025 (use_hdr_video == CAM_SENSOR_HDR_STAGGERED)) {
12026 LOGD("Setting HDR mode Staggered");
12027 vhdr_type = CAM_SENSOR_HDR_STAGGERED;
12028 }
12029 if(vhdr_type == CAM_SENSOR_HDR_MAX) {
12030 LOGD("HDR mode not supported");
12031 rc = BAD_VALUE;
12032 }
12033 }
12034 if(rc == NO_ERROR) {
12035 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12036 CAM_INTF_PARM_SENSOR_HDR, vhdr_type)) {
12037 rc = BAD_VALUE;
12038 }
12039 }
12040 }
12041 return rc;
12042}
12043
12044/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070012045 * FUNCTION : needRotationReprocess
12046 *
12047 * DESCRIPTION: if rotation needs to be done by reprocess in pp
12048 *
12049 * PARAMETERS : none
12050 *
12051 * RETURN : true: needed
12052 * false: no need
12053 *==========================================================================*/
12054bool QCamera3HardwareInterface::needRotationReprocess()
12055{
12056 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
12057 // current rotation is not zero, and pp has the capability to process rotation
12058 LOGH("need do reprocess for rotation");
12059 return true;
12060 }
12061
12062 return false;
12063}
12064
12065/*===========================================================================
12066 * FUNCTION : needReprocess
12067 *
12068 * DESCRIPTION: if reprocess in needed
12069 *
12070 * PARAMETERS : none
12071 *
12072 * RETURN : true: needed
12073 * false: no need
12074 *==========================================================================*/
12075bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
12076{
12077 if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
12078 // TODO: add for ZSL HDR later
12079 // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
12080 if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
12081 LOGH("need do reprocess for ZSL WNR or min PP reprocess");
12082 return true;
12083 } else {
12084 LOGH("already post processed frame");
12085 return false;
12086 }
12087 }
12088 return needRotationReprocess();
12089}
12090
12091/*===========================================================================
12092 * FUNCTION : needJpegExifRotation
12093 *
12094 * DESCRIPTION: if rotation from jpeg is needed
12095 *
12096 * PARAMETERS : none
12097 *
12098 * RETURN : true: needed
12099 * false: no need
12100 *==========================================================================*/
12101bool QCamera3HardwareInterface::needJpegExifRotation()
12102{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012103 /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
Thierry Strudel3d639192016-09-09 11:52:26 -070012104 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
12105 LOGD("Need use Jpeg EXIF Rotation");
12106 return true;
12107 }
12108 return false;
12109}
12110
12111/*===========================================================================
12112 * FUNCTION : addOfflineReprocChannel
12113 *
12114 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
12115 * coming from input channel
12116 *
12117 * PARAMETERS :
12118 * @config : reprocess configuration
12119 * @inputChHandle : pointer to the input (source) channel
12120 *
12121 *
12122 * RETURN : Ptr to the newly created channel obj. NULL if failed.
12123 *==========================================================================*/
12124QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
12125 const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
12126{
12127 int32_t rc = NO_ERROR;
12128 QCamera3ReprocessChannel *pChannel = NULL;
12129
12130 pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012131 mChannelHandle, mCameraHandle->ops, captureResultCb, setBufferErrorStatus,
12132 config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle);
Thierry Strudel3d639192016-09-09 11:52:26 -070012133 if (NULL == pChannel) {
12134 LOGE("no mem for reprocess channel");
12135 return NULL;
12136 }
12137
12138 rc = pChannel->initialize(IS_TYPE_NONE);
12139 if (rc != NO_ERROR) {
12140 LOGE("init reprocess channel failed, ret = %d", rc);
12141 delete pChannel;
12142 return NULL;
12143 }
12144
12145 // pp feature config
12146 cam_pp_feature_config_t pp_config;
12147 memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
12148
12149 pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
12150 if (gCamCapability[mCameraId]->qcom_supported_feature_mask
12151 & CAM_QCOM_FEATURE_DSDN) {
12152 //Use CPP CDS incase h/w supports it.
12153 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
12154 pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
12155 }
12156 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
12157 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
12158 }
12159
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012160 if (config.hdr_param.hdr_enable) {
12161 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
12162 pp_config.hdr_param = config.hdr_param;
12163 }
12164
12165 if (mForceHdrSnapshot) {
12166 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
12167 pp_config.hdr_param.hdr_enable = 1;
12168 pp_config.hdr_param.hdr_need_1x = 0;
12169 pp_config.hdr_param.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
12170 }
12171
Thierry Strudel3d639192016-09-09 11:52:26 -070012172 rc = pChannel->addReprocStreamsFromSource(pp_config,
12173 config,
12174 IS_TYPE_NONE,
12175 mMetadataChannel);
12176
12177 if (rc != NO_ERROR) {
12178 delete pChannel;
12179 return NULL;
12180 }
12181 return pChannel;
12182}
12183
12184/*===========================================================================
12185 * FUNCTION : getMobicatMask
12186 *
12187 * DESCRIPTION: returns mobicat mask
12188 *
12189 * PARAMETERS : none
12190 *
12191 * RETURN : mobicat mask
12192 *
12193 *==========================================================================*/
12194uint8_t QCamera3HardwareInterface::getMobicatMask()
12195{
12196 return m_MobicatMask;
12197}
12198
12199/*===========================================================================
12200 * FUNCTION : setMobicat
12201 *
12202 * DESCRIPTION: set Mobicat on/off.
12203 *
12204 * PARAMETERS :
12205 * @params : none
12206 *
12207 * RETURN : int32_t type of status
12208 * NO_ERROR -- success
12209 * none-zero failure code
12210 *==========================================================================*/
12211int32_t QCamera3HardwareInterface::setMobicat()
12212{
12213 char value [PROPERTY_VALUE_MAX];
12214 property_get("persist.camera.mobicat", value, "0");
12215 int32_t ret = NO_ERROR;
12216 uint8_t enableMobi = (uint8_t)atoi(value);
12217
12218 if (enableMobi) {
12219 tune_cmd_t tune_cmd;
12220 tune_cmd.type = SET_RELOAD_CHROMATIX;
12221 tune_cmd.module = MODULE_ALL;
12222 tune_cmd.value = TRUE;
12223 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
12224 CAM_INTF_PARM_SET_VFE_COMMAND,
12225 tune_cmd);
12226
12227 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
12228 CAM_INTF_PARM_SET_PP_COMMAND,
12229 tune_cmd);
12230 }
12231 m_MobicatMask = enableMobi;
12232
12233 return ret;
12234}
12235
12236/*===========================================================================
12237* FUNCTION : getLogLevel
12238*
12239* DESCRIPTION: Reads the log level property into a variable
12240*
12241* PARAMETERS :
12242* None
12243*
12244* RETURN :
12245* None
12246*==========================================================================*/
12247void QCamera3HardwareInterface::getLogLevel()
12248{
12249 char prop[PROPERTY_VALUE_MAX];
12250 uint32_t globalLogLevel = 0;
12251
12252 property_get("persist.camera.hal.debug", prop, "0");
12253 int val = atoi(prop);
12254 if (0 <= val) {
12255 gCamHal3LogLevel = (uint32_t)val;
12256 }
12257
Thierry Strudel9ec39c62016-12-28 11:30:05 -080012258 property_get("persist.camera.kpi.debug", prop, "0");
Thierry Strudel3d639192016-09-09 11:52:26 -070012259 gKpiDebugLevel = atoi(prop);
12260
12261 property_get("persist.camera.global.debug", prop, "0");
12262 val = atoi(prop);
12263 if (0 <= val) {
12264 globalLogLevel = (uint32_t)val;
12265 }
12266
12267 /* Highest log level among hal.logs and global.logs is selected */
12268 if (gCamHal3LogLevel < globalLogLevel)
12269 gCamHal3LogLevel = globalLogLevel;
12270
12271 return;
12272}
12273
12274/*===========================================================================
12275 * FUNCTION : validateStreamRotations
12276 *
12277 * DESCRIPTION: Check if the rotations requested are supported
12278 *
12279 * PARAMETERS :
12280 * @stream_list : streams to be configured
12281 *
12282 * RETURN : NO_ERROR on success
12283 * -EINVAL on failure
12284 *
12285 *==========================================================================*/
12286int QCamera3HardwareInterface::validateStreamRotations(
12287 camera3_stream_configuration_t *streamList)
12288{
12289 int rc = NO_ERROR;
12290
12291 /*
12292 * Loop through all streams requested in configuration
12293 * Check if unsupported rotations have been requested on any of them
12294 */
12295 for (size_t j = 0; j < streamList->num_streams; j++){
12296 camera3_stream_t *newStream = streamList->streams[j];
12297
12298 bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
12299 bool isImplDef = (newStream->format ==
12300 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
12301 bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
12302 isImplDef);
12303
12304 if (isRotated && (!isImplDef || isZsl)) {
12305 LOGE("Error: Unsupported rotation of %d requested for stream"
12306 "type:%d and stream format:%d",
12307 newStream->rotation, newStream->stream_type,
12308 newStream->format);
12309 rc = -EINVAL;
12310 break;
12311 }
12312 }
12313
12314 return rc;
12315}
12316
12317/*===========================================================================
12318* FUNCTION : getFlashInfo
12319*
12320* DESCRIPTION: Retrieve information about whether the device has a flash.
12321*
12322* PARAMETERS :
12323* @cameraId : Camera id to query
12324* @hasFlash : Boolean indicating whether there is a flash device
12325* associated with given camera
12326* @flashNode : If a flash device exists, this will be its device node.
12327*
12328* RETURN :
12329* None
12330*==========================================================================*/
12331void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
12332 bool& hasFlash,
12333 char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
12334{
12335 cam_capability_t* camCapability = gCamCapability[cameraId];
12336 if (NULL == camCapability) {
12337 hasFlash = false;
12338 flashNode[0] = '\0';
12339 } else {
12340 hasFlash = camCapability->flash_available;
12341 strlcpy(flashNode,
12342 (char*)camCapability->flash_dev_name,
12343 QCAMERA_MAX_FILEPATH_LENGTH);
12344 }
12345}
12346
12347/*===========================================================================
12348* FUNCTION : getEepromVersionInfo
12349*
12350* DESCRIPTION: Retrieve version info of the sensor EEPROM data
12351*
12352* PARAMETERS : None
12353*
12354* RETURN : string describing EEPROM version
12355* "\0" if no such info available
12356*==========================================================================*/
12357const char *QCamera3HardwareInterface::getEepromVersionInfo()
12358{
12359 return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
12360}
12361
12362/*===========================================================================
12363* FUNCTION : getLdafCalib
12364*
12365* DESCRIPTION: Retrieve Laser AF calibration data
12366*
12367* PARAMETERS : None
12368*
12369* RETURN : Two uint32_t describing laser AF calibration data
12370* NULL if none is available.
12371*==========================================================================*/
12372const uint32_t *QCamera3HardwareInterface::getLdafCalib()
12373{
12374 if (mLdafCalibExist) {
12375 return &mLdafCalib[0];
12376 } else {
12377 return NULL;
12378 }
12379}
12380
12381/*===========================================================================
12382 * FUNCTION : dynamicUpdateMetaStreamInfo
12383 *
12384 * DESCRIPTION: This function:
12385 * (1) stops all the channels
12386 * (2) returns error on pending requests and buffers
12387 * (3) sends metastream_info in setparams
12388 * (4) starts all channels
12389 * This is useful when sensor has to be restarted to apply any
12390 * settings such as frame rate from a different sensor mode
12391 *
12392 * PARAMETERS : None
12393 *
12394 * RETURN : NO_ERROR on success
12395 * Error codes on failure
12396 *
12397 *==========================================================================*/
12398int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
12399{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012400 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_DYN_UPDATE_META_STRM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070012401 int rc = NO_ERROR;
12402
12403 LOGD("E");
12404
12405 rc = stopAllChannels();
12406 if (rc < 0) {
12407 LOGE("stopAllChannels failed");
12408 return rc;
12409 }
12410
12411 rc = notifyErrorForPendingRequests();
12412 if (rc < 0) {
12413 LOGE("notifyErrorForPendingRequests failed");
12414 return rc;
12415 }
12416
12417 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
12418 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
12419 "Format:%d",
12420 mStreamConfigInfo.type[i],
12421 mStreamConfigInfo.stream_sizes[i].width,
12422 mStreamConfigInfo.stream_sizes[i].height,
12423 mStreamConfigInfo.postprocess_mask[i],
12424 mStreamConfigInfo.format[i]);
12425 }
12426
12427 /* Send meta stream info once again so that ISP can start */
12428 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
12429 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
12430 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
12431 mParameters);
12432 if (rc < 0) {
12433 LOGE("set Metastreaminfo failed. Sensor mode does not change");
12434 }
12435
12436 rc = startAllChannels();
12437 if (rc < 0) {
12438 LOGE("startAllChannels failed");
12439 return rc;
12440 }
12441
12442 LOGD("X");
12443 return rc;
12444}
12445
12446/*===========================================================================
12447 * FUNCTION : stopAllChannels
12448 *
12449 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
12450 *
12451 * PARAMETERS : None
12452 *
12453 * RETURN : NO_ERROR on success
12454 * Error codes on failure
12455 *
12456 *==========================================================================*/
12457int32_t QCamera3HardwareInterface::stopAllChannels()
12458{
12459 int32_t rc = NO_ERROR;
12460
12461 LOGD("Stopping all channels");
12462 // Stop the Streams/Channels
12463 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
12464 it != mStreamInfo.end(); it++) {
12465 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
12466 if (channel) {
12467 channel->stop();
12468 }
12469 (*it)->status = INVALID;
12470 }
12471
12472 if (mSupportChannel) {
12473 mSupportChannel->stop();
12474 }
12475 if (mAnalysisChannel) {
12476 mAnalysisChannel->stop();
12477 }
12478 if (mRawDumpChannel) {
12479 mRawDumpChannel->stop();
12480 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070012481 if (mHdrPlusRawSrcChannel) {
12482 mHdrPlusRawSrcChannel->stop();
12483 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012484 if (mMetadataChannel) {
12485 /* If content of mStreamInfo is not 0, there is metadata stream */
12486 mMetadataChannel->stop();
12487 }
12488
12489 LOGD("All channels stopped");
12490 return rc;
12491}
12492
12493/*===========================================================================
12494 * FUNCTION : startAllChannels
12495 *
12496 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
12497 *
12498 * PARAMETERS : None
12499 *
12500 * RETURN : NO_ERROR on success
12501 * Error codes on failure
12502 *
12503 *==========================================================================*/
12504int32_t QCamera3HardwareInterface::startAllChannels()
12505{
12506 int32_t rc = NO_ERROR;
12507
12508 LOGD("Start all channels ");
12509 // Start the Streams/Channels
12510 if (mMetadataChannel) {
12511 /* If content of mStreamInfo is not 0, there is metadata stream */
12512 rc = mMetadataChannel->start();
12513 if (rc < 0) {
12514 LOGE("META channel start failed");
12515 return rc;
12516 }
12517 }
12518 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
12519 it != mStreamInfo.end(); it++) {
12520 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
12521 if (channel) {
12522 rc = channel->start();
12523 if (rc < 0) {
12524 LOGE("channel start failed");
12525 return rc;
12526 }
12527 }
12528 }
12529 if (mAnalysisChannel) {
12530 mAnalysisChannel->start();
12531 }
12532 if (mSupportChannel) {
12533 rc = mSupportChannel->start();
12534 if (rc < 0) {
12535 LOGE("Support channel start failed");
12536 return rc;
12537 }
12538 }
12539 if (mRawDumpChannel) {
12540 rc = mRawDumpChannel->start();
12541 if (rc < 0) {
12542 LOGE("RAW dump channel start failed");
12543 return rc;
12544 }
12545 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070012546 if (mHdrPlusRawSrcChannel) {
12547 rc = mHdrPlusRawSrcChannel->start();
12548 if (rc < 0) {
12549 LOGE("HDR+ RAW channel start failed");
12550 return rc;
12551 }
12552 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012553
12554 LOGD("All channels started");
12555 return rc;
12556}
12557
12558/*===========================================================================
12559 * FUNCTION : notifyErrorForPendingRequests
12560 *
12561 * DESCRIPTION: This function sends error for all the pending requests/buffers
12562 *
12563 * PARAMETERS : None
12564 *
12565 * RETURN : Error codes
12566 * NO_ERROR on success
12567 *
12568 *==========================================================================*/
12569int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
12570{
12571 int32_t rc = NO_ERROR;
12572 unsigned int frameNum = 0;
12573 camera3_capture_result_t result;
12574 camera3_stream_buffer_t *pStream_Buf = NULL;
12575
12576 memset(&result, 0, sizeof(camera3_capture_result_t));
12577
12578 if (mPendingRequestsList.size() > 0) {
12579 pendingRequestIterator i = mPendingRequestsList.begin();
12580 frameNum = i->frame_number;
12581 } else {
12582 /* There might still be pending buffers even though there are
12583 no pending requests. Setting the frameNum to MAX so that
12584 all the buffers with smaller frame numbers are returned */
12585 frameNum = UINT_MAX;
12586 }
12587
12588 LOGH("Oldest frame num on mPendingRequestsList = %u",
12589 frameNum);
12590
12591 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
12592 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); ) {
12593
12594 if (req->frame_number < frameNum) {
12595 // Send Error notify to frameworks for each buffer for which
12596 // metadata buffer is already sent
12597 LOGH("Sending ERROR BUFFER for frame %d for %d buffer(s)",
12598 req->frame_number, req->mPendingBufferList.size());
12599
12600 pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
12601 if (NULL == pStream_Buf) {
12602 LOGE("No memory for pending buffers array");
12603 return NO_MEMORY;
12604 }
12605 memset(pStream_Buf, 0,
12606 sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
12607 result.result = NULL;
12608 result.frame_number = req->frame_number;
12609 result.num_output_buffers = req->mPendingBufferList.size();
12610 result.output_buffers = pStream_Buf;
12611
12612 size_t index = 0;
12613 for (auto info = req->mPendingBufferList.begin();
12614 info != req->mPendingBufferList.end(); ) {
12615
12616 camera3_notify_msg_t notify_msg;
12617 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
12618 notify_msg.type = CAMERA3_MSG_ERROR;
12619 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
12620 notify_msg.message.error.error_stream = info->stream;
12621 notify_msg.message.error.frame_number = req->frame_number;
12622 pStream_Buf[index].acquire_fence = -1;
12623 pStream_Buf[index].release_fence = -1;
12624 pStream_Buf[index].buffer = info->buffer;
12625 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
12626 pStream_Buf[index].stream = info->stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012627 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070012628 index++;
12629 // Remove buffer from list
12630 info = req->mPendingBufferList.erase(info);
12631 }
12632
12633 // Remove this request from Map
12634 LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
12635 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
12636 req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
12637
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012638 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070012639
12640 delete [] pStream_Buf;
12641 } else {
12642
12643 // Go through the pending requests info and send error request to framework
12644 pendingRequestIterator i = mPendingRequestsList.begin(); //make sure i is at the beginning
12645
12646 LOGH("Sending ERROR REQUEST for frame %d", req->frame_number);
12647
12648 // Send error notify to frameworks
12649 camera3_notify_msg_t notify_msg;
12650 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
12651 notify_msg.type = CAMERA3_MSG_ERROR;
12652 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
12653 notify_msg.message.error.error_stream = NULL;
12654 notify_msg.message.error.frame_number = req->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012655 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070012656
12657 pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
12658 if (NULL == pStream_Buf) {
12659 LOGE("No memory for pending buffers array");
12660 return NO_MEMORY;
12661 }
12662 memset(pStream_Buf, 0, sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
12663
12664 result.result = NULL;
12665 result.frame_number = req->frame_number;
12666 result.input_buffer = i->input_buffer;
12667 result.num_output_buffers = req->mPendingBufferList.size();
12668 result.output_buffers = pStream_Buf;
12669
12670 size_t index = 0;
12671 for (auto info = req->mPendingBufferList.begin();
12672 info != req->mPendingBufferList.end(); ) {
12673 pStream_Buf[index].acquire_fence = -1;
12674 pStream_Buf[index].release_fence = -1;
12675 pStream_Buf[index].buffer = info->buffer;
12676 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
12677 pStream_Buf[index].stream = info->stream;
12678 index++;
12679 // Remove buffer from list
12680 info = req->mPendingBufferList.erase(info);
12681 }
12682
12683 // Remove this request from Map
12684 LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
12685 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
12686 req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
12687
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012688 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070012689 delete [] pStream_Buf;
12690 i = erasePendingRequest(i);
12691 }
12692 }
12693
12694 /* Reset pending frame Drop list and requests list */
12695 mPendingFrameDropList.clear();
12696
12697 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
12698 req.mPendingBufferList.clear();
12699 }
12700 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -070012701 LOGH("Cleared all the pending buffers ");
12702
12703 return rc;
12704}
12705
12706bool QCamera3HardwareInterface::isOnEncoder(
12707 const cam_dimension_t max_viewfinder_size,
12708 uint32_t width, uint32_t height)
12709{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012710 return ((width > (uint32_t)max_viewfinder_size.width) ||
12711 (height > (uint32_t)max_viewfinder_size.height) ||
12712 (width > (uint32_t)VIDEO_4K_WIDTH) ||
12713 (height > (uint32_t)VIDEO_4K_HEIGHT));
Thierry Strudel3d639192016-09-09 11:52:26 -070012714}
12715
12716/*===========================================================================
12717 * FUNCTION : setBundleInfo
12718 *
12719 * DESCRIPTION: Set bundle info for all streams that are bundle.
12720 *
12721 * PARAMETERS : None
12722 *
12723 * RETURN : NO_ERROR on success
12724 * Error codes on failure
12725 *==========================================================================*/
12726int32_t QCamera3HardwareInterface::setBundleInfo()
12727{
12728 int32_t rc = NO_ERROR;
12729
12730 if (mChannelHandle) {
12731 cam_bundle_config_t bundleInfo;
12732 memset(&bundleInfo, 0, sizeof(bundleInfo));
12733 rc = mCameraHandle->ops->get_bundle_info(
12734 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
12735 if (rc != NO_ERROR) {
12736 LOGE("get_bundle_info failed");
12737 return rc;
12738 }
12739 if (mAnalysisChannel) {
12740 mAnalysisChannel->setBundleInfo(bundleInfo);
12741 }
12742 if (mSupportChannel) {
12743 mSupportChannel->setBundleInfo(bundleInfo);
12744 }
12745 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
12746 it != mStreamInfo.end(); it++) {
12747 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
12748 channel->setBundleInfo(bundleInfo);
12749 }
12750 if (mRawDumpChannel) {
12751 mRawDumpChannel->setBundleInfo(bundleInfo);
12752 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070012753 if (mHdrPlusRawSrcChannel) {
12754 mHdrPlusRawSrcChannel->setBundleInfo(bundleInfo);
12755 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012756 }
12757
12758 return rc;
12759}
12760
12761/*===========================================================================
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012762 * FUNCTION : setInstantAEC
12763 *
12764 * DESCRIPTION: Set Instant AEC related params.
12765 *
12766 * PARAMETERS :
12767 * @meta: CameraMetadata reference
12768 *
12769 * RETURN : NO_ERROR on success
12770 * Error codes on failure
12771 *==========================================================================*/
12772int32_t QCamera3HardwareInterface::setInstantAEC(const CameraMetadata &meta)
12773{
12774 int32_t rc = NO_ERROR;
12775 uint8_t val = 0;
12776 char prop[PROPERTY_VALUE_MAX];
12777
12778 // First try to configure instant AEC from framework metadata
12779 if (meta.exists(QCAMERA3_INSTANT_AEC_MODE)) {
12780 val = (uint8_t)meta.find(QCAMERA3_INSTANT_AEC_MODE).data.i32[0];
12781 }
12782
12783 // If framework did not set this value, try to read from set prop.
12784 if (val == 0) {
12785 memset(prop, 0, sizeof(prop));
12786 property_get("persist.camera.instant.aec", prop, "0");
12787 val = (uint8_t)atoi(prop);
12788 }
12789
12790 if ((val >= (uint8_t)CAM_AEC_NORMAL_CONVERGENCE) &&
12791 ( val < (uint8_t)CAM_AEC_CONVERGENCE_MAX)) {
12792 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_INSTANT_AEC, val);
12793 mInstantAEC = val;
12794 mInstantAECSettledFrameNumber = 0;
12795 mInstantAecFrameIdxCount = 0;
12796 LOGH("instantAEC value set %d",val);
12797 if (mInstantAEC) {
12798 memset(prop, 0, sizeof(prop));
12799 property_get("persist.camera.ae.instant.bound", prop, "10");
12800 int32_t aec_frame_skip_cnt = atoi(prop);
12801 if (aec_frame_skip_cnt >= 0) {
12802 mAecSkipDisplayFrameBound = (uint8_t)aec_frame_skip_cnt;
12803 } else {
12804 LOGE("Invalid prop for aec frame bound %d", aec_frame_skip_cnt);
12805 rc = BAD_VALUE;
12806 }
12807 }
12808 } else {
12809 LOGE("Bad instant aec value set %d", val);
12810 rc = BAD_VALUE;
12811 }
12812 return rc;
12813}
12814
12815/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070012816 * FUNCTION : get_num_overall_buffers
12817 *
12818 * DESCRIPTION: Estimate number of pending buffers across all requests.
12819 *
12820 * PARAMETERS : None
12821 *
12822 * RETURN : Number of overall pending buffers
12823 *
12824 *==========================================================================*/
12825uint32_t PendingBuffersMap::get_num_overall_buffers()
12826{
12827 uint32_t sum_buffers = 0;
12828 for (auto &req : mPendingBuffersInRequest) {
12829 sum_buffers += req.mPendingBufferList.size();
12830 }
12831 return sum_buffers;
12832}
12833
12834/*===========================================================================
12835 * FUNCTION : removeBuf
12836 *
12837 * DESCRIPTION: Remove a matching buffer from tracker.
12838 *
12839 * PARAMETERS : @buffer: image buffer for the callback
12840 *
12841 * RETURN : None
12842 *
12843 *==========================================================================*/
12844void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
12845{
12846 bool buffer_found = false;
12847 for (auto req = mPendingBuffersInRequest.begin();
12848 req != mPendingBuffersInRequest.end(); req++) {
12849 for (auto k = req->mPendingBufferList.begin();
12850 k != req->mPendingBufferList.end(); k++ ) {
12851 if (k->buffer == buffer) {
12852 LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
12853 req->frame_number, buffer);
12854 k = req->mPendingBufferList.erase(k);
12855 if (req->mPendingBufferList.empty()) {
12856 // Remove this request from Map
12857 req = mPendingBuffersInRequest.erase(req);
12858 }
12859 buffer_found = true;
12860 break;
12861 }
12862 }
12863 if (buffer_found) {
12864 break;
12865 }
12866 }
12867 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
12868 get_num_overall_buffers());
12869}
12870
12871/*===========================================================================
Thierry Strudelc2ee3302016-11-17 12:33:12 -080012872 * FUNCTION : getBufErrStatus
12873 *
12874 * DESCRIPTION: get buffer error status
12875 *
12876 * PARAMETERS : @buffer: buffer handle
12877 *
12878 * RETURN : Error status
12879 *
12880 *==========================================================================*/
12881int32_t PendingBuffersMap::getBufErrStatus(buffer_handle_t *buffer)
12882{
12883 for (auto& req : mPendingBuffersInRequest) {
12884 for (auto& k : req.mPendingBufferList) {
12885 if (k.buffer == buffer)
12886 return k.bufStatus;
12887 }
12888 }
12889 return CAMERA3_BUFFER_STATUS_OK;
12890}
12891
12892/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070012893 * FUNCTION : setPAAFSupport
12894 *
12895 * DESCRIPTION: Set the preview-assisted auto focus support bit in
12896 * feature mask according to stream type and filter
12897 * arrangement
12898 *
12899 * PARAMETERS : @feature_mask: current feature mask, which may be modified
12900 * @stream_type: stream type
12901 * @filter_arrangement: filter arrangement
12902 *
12903 * RETURN : None
12904 *==========================================================================*/
12905void QCamera3HardwareInterface::setPAAFSupport(
12906 cam_feature_mask_t& feature_mask,
12907 cam_stream_type_t stream_type,
12908 cam_color_filter_arrangement_t filter_arrangement)
12909{
12910 LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
12911 feature_mask, stream_type, filter_arrangement);
12912
12913 switch (filter_arrangement) {
12914 case CAM_FILTER_ARRANGEMENT_RGGB:
12915 case CAM_FILTER_ARRANGEMENT_GRBG:
12916 case CAM_FILTER_ARRANGEMENT_GBRG:
12917 case CAM_FILTER_ARRANGEMENT_BGGR:
Thierry Strudele80ad7c2016-12-06 10:16:27 -080012918 if ((stream_type == CAM_STREAM_TYPE_PREVIEW) ||
12919 (stream_type == CAM_STREAM_TYPE_ANALYSIS) ||
Thierry Strudel3d639192016-09-09 11:52:26 -070012920 (stream_type == CAM_STREAM_TYPE_VIDEO)) {
12921 feature_mask |= CAM_QCOM_FEATURE_PAAF;
12922 }
12923 break;
12924 case CAM_FILTER_ARRANGEMENT_Y:
12925 if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
12926 feature_mask |= CAM_QCOM_FEATURE_PAAF;
12927 }
12928 break;
12929 default:
12930 break;
12931 }
12932}
12933
12934/*===========================================================================
12935* FUNCTION : getSensorMountAngle
12936*
12937* DESCRIPTION: Retrieve sensor mount angle
12938*
12939* PARAMETERS : None
12940*
12941* RETURN : sensor mount angle in uint32_t
12942*==========================================================================*/
12943uint32_t QCamera3HardwareInterface::getSensorMountAngle()
12944{
12945 return gCamCapability[mCameraId]->sensor_mount_angle;
12946}
12947
12948/*===========================================================================
12949* FUNCTION : getRelatedCalibrationData
12950*
12951* DESCRIPTION: Retrieve related system calibration data
12952*
12953* PARAMETERS : None
12954*
12955* RETURN : Pointer of related system calibration data
12956*==========================================================================*/
12957const cam_related_system_calibration_data_t *QCamera3HardwareInterface::getRelatedCalibrationData()
12958{
12959 return (const cam_related_system_calibration_data_t *)
12960 &(gCamCapability[mCameraId]->related_cam_calibration);
12961}
Shuzhen Wangf6890e02016-08-12 14:28:54 -070012962
12963/*===========================================================================
12964 * FUNCTION : is60HzZone
12965 *
12966 * DESCRIPTION: Whether the phone is in zone with 60hz electricity frequency
12967 *
12968 * PARAMETERS : None
12969 *
12970 * RETURN : True if in 60Hz zone, False otherwise
12971 *==========================================================================*/
12972bool QCamera3HardwareInterface::is60HzZone()
12973{
12974 time_t t = time(NULL);
12975 struct tm lt;
12976
12977 struct tm* r = localtime_r(&t, &lt);
12978
12979 if (r == NULL || lt.tm_gmtoff <= -2*60*60 || lt.tm_gmtoff >= 8*60*60)
12980 return true;
12981 else
12982 return false;
12983}
Shuzhen Wanga5da1022016-07-13 20:18:42 -070012984
12985/*===========================================================================
12986 * FUNCTION : adjustBlackLevelForCFA
12987 *
12988 * DESCRIPTION: Adjust the black level pattern in the order of RGGB to the order
12989 * of bayer CFA (Color Filter Array).
12990 *
12991 * PARAMETERS : @input: black level pattern in the order of RGGB
12992 * @output: black level pattern in the order of CFA
12993 * @color_arrangement: CFA color arrangement
12994 *
12995 * RETURN : None
12996 *==========================================================================*/
12997template<typename T>
12998void QCamera3HardwareInterface::adjustBlackLevelForCFA(
12999 T input[BLACK_LEVEL_PATTERN_CNT],
13000 T output[BLACK_LEVEL_PATTERN_CNT],
13001 cam_color_filter_arrangement_t color_arrangement)
13002{
13003 switch (color_arrangement) {
13004 case CAM_FILTER_ARRANGEMENT_GRBG:
13005 output[0] = input[1];
13006 output[1] = input[0];
13007 output[2] = input[3];
13008 output[3] = input[2];
13009 break;
13010 case CAM_FILTER_ARRANGEMENT_GBRG:
13011 output[0] = input[2];
13012 output[1] = input[3];
13013 output[2] = input[0];
13014 output[3] = input[1];
13015 break;
13016 case CAM_FILTER_ARRANGEMENT_BGGR:
13017 output[0] = input[3];
13018 output[1] = input[2];
13019 output[2] = input[1];
13020 output[3] = input[0];
13021 break;
13022 case CAM_FILTER_ARRANGEMENT_RGGB:
13023 output[0] = input[0];
13024 output[1] = input[1];
13025 output[2] = input[2];
13026 output[3] = input[3];
13027 break;
13028 default:
13029 LOGE("Invalid color arrangement to derive dynamic blacklevel");
13030 break;
13031 }
13032}
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013033
Chien-Yu Chene687bd02016-12-07 18:30:26 -080013034void QCamera3HardwareInterface::onCaptureResult(pbcamera::CaptureResult *result,
13035 const camera_metadata_t &resultMetadata) {
13036 if (result != nullptr) {
13037 if (result->outputBuffers.size() != 1) {
13038 ALOGE("%s: Number of output buffers (%u) is not supported.", __FUNCTION__,
13039 result->outputBuffers.size());
13040 return;
13041 }
13042
13043 if (result->outputBuffers[0].streamId != kPbYuvOutputStreamId) {
13044 ALOGE("%s: Only YUV output stream is supported. (stream id %d).", __FUNCTION__,
13045 result->outputBuffers[0].streamId);
13046 return;
13047 }
13048
13049 // Send HDR+ metadata to framework.
13050 {
13051 pthread_mutex_lock(&mMutex);
13052 handlePendingResultsWithLock(result->requestId, clone_camera_metadata(&resultMetadata));
13053 pthread_mutex_unlock(&mMutex);
13054 }
13055
13056 HdrPlusPendingRequest pendingRequest;
13057 {
13058 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
13059 auto req = mHdrPlusPendingRequests.find(result->requestId);
13060 pendingRequest = req->second;
13061 }
13062
13063 // Check if dumping HDR+ YUV output is enabled.
13064 char prop[PROPERTY_VALUE_MAX];
13065 property_get("persist.camera.hdrplus.dump_yuv", prop, "0");
13066 bool dumpYuvOutput = atoi(prop);
13067
13068 if (dumpYuvOutput) {
13069 QCamera3PicChannel *picChannel =
13070 (QCamera3PicChannel*)pendingRequest.frameworkOutputBuffers[0].stream->priv;
13071
13072 // Dump yuv buffer to a ppm file.
13073 pbcamera::StreamConfiguration outputConfig;
13074 status_t rc = fillPbStreamConfig(&outputConfig, kPbYuvOutputStreamId,
13075 HAL_PIXEL_FORMAT_YCrCb_420_SP, picChannel, /*stream index*/0);
13076 if (rc == OK) {
13077 char buf[FILENAME_MAX] = {};
13078 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"s_%d_%d_%dx%d.ppm",
13079 result->requestId, result->outputBuffers[0].streamId,
13080 outputConfig.image.width, outputConfig.image.height);
13081
13082 hdrplus_client_utils::writePpm(buf, outputConfig, result->outputBuffers[0]);
13083 } else {
13084 LOGW("%s: Couldn't dump YUV buffer because getting stream config failed: %s (%d).",
13085 __FUNCTION__, strerror(-rc), rc);
13086 }
13087 }
13088
13089 // Return the buffer to pic channel.
13090 // TODO: Use result metadata.
13091 mPictureChannel->returnYuvBufferAndEncode(pendingRequest.yuvBuffer.get(),
13092 pendingRequest.frameworkOutputBuffers[0].buffer, result->requestId,
13093 pendingRequest.settings);
13094
13095 // Remove the HDR+ pending request.
13096 {
13097 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
13098 auto req = mHdrPlusPendingRequests.find(result->requestId);
13099 mHdrPlusPendingRequests.erase(req);
13100 }
13101 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013102}
13103
Chien-Yu Chene687bd02016-12-07 18:30:26 -080013104void QCamera3HardwareInterface::onFailedCaptureResult(pbcamera::CaptureResult *failedResult) {
13105 // TODO: Handle HDR+ capture failures and send the failure to framework.
13106 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
13107 auto pendingRequest = mHdrPlusPendingRequests.find(failedResult->requestId);
13108
13109 // Return the buffer to pic channel.
13110 QCamera3PicChannel *picChannel =
13111 (QCamera3PicChannel*)pendingRequest->second.frameworkOutputBuffers[0].stream->priv;
13112 picChannel->returnYuvBuffer(pendingRequest->second.yuvBuffer.get());
13113
13114 mHdrPlusPendingRequests.erase(pendingRequest);
Chien-Yu Chen8e599492016-11-01 13:37:46 -070013115}
13116
Thierry Strudel3d639192016-09-09 11:52:26 -070013117}; //end namespace qcamera