blob: ff08c92308efc2e03b5e2f06d95ad8ce20102e08 [file] [log] [blame]
Thierry Strudel3d639192016-09-09 11:52:26 -07001/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6* * Redistributions of source code must retain the above copyright
7* notice, this list of conditions and the following disclaimer.
8* * Redistributions in binary form must reproduce the above
9* copyright notice, this list of conditions and the following
10* disclaimer in the documentation and/or other materials provided
11* with the distribution.
12* * Neither the name of The Linux Foundation nor the names of its
13* contributors may be used to endorse or promote products derived
14* from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34
35// To remove
36#include <cutils/properties.h>
37
38// System dependencies
39#include <dlfcn.h>
40#include <fcntl.h>
41#include <stdio.h>
42#include <stdlib.h>
43#include "utils/Timers.h"
44#include "sys/ioctl.h"
45#include <sync/sync.h>
46#include "gralloc_priv.h"
47
48// Display dependencies
49#include "qdMetaData.h"
50
51// Camera dependencies
52#include "android/QCamera3External.h"
53#include "util/QCameraFlash.h"
54#include "QCamera3HWI.h"
55#include "QCamera3VendorTags.h"
56#include "QCameraTrace.h"
57
58extern "C" {
59#include "mm_camera_dbg.h"
60}
61
62using namespace android;
63
64namespace qcamera {
65
66#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
67
68#define EMPTY_PIPELINE_DELAY 2
69#define PARTIAL_RESULT_COUNT 2
70#define FRAME_SKIP_DELAY 0
71
72#define MAX_VALUE_8BIT ((1<<8)-1)
73#define MAX_VALUE_10BIT ((1<<10)-1)
74#define MAX_VALUE_12BIT ((1<<12)-1)
75
76#define VIDEO_4K_WIDTH 3840
77#define VIDEO_4K_HEIGHT 2160
78
79#define MAX_EIS_WIDTH 1920
80#define MAX_EIS_HEIGHT 1080
81
82#define MAX_RAW_STREAMS 1
83#define MAX_STALLING_STREAMS 1
84#define MAX_PROCESSED_STREAMS 3
85/* Batch mode is enabled only if FPS set is equal to or greater than this */
86#define MIN_FPS_FOR_BATCH_MODE (120)
87#define PREVIEW_FPS_FOR_HFR (30)
88#define DEFAULT_VIDEO_FPS (30.0)
89#define MAX_HFR_BATCH_SIZE (8)
90#define REGIONS_TUPLE_COUNT 5
91#define HDR_PLUS_PERF_TIME_OUT (7000) // milliseconds
Thierry Strudel3d639192016-09-09 11:52:26 -070092// Set a threshold for detection of missing buffers //seconds
93#define MISSING_REQUEST_BUF_TIMEOUT 3
94#define FLUSH_TIMEOUT 3
95#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
96
97#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3 ( CAM_QCOM_FEATURE_DENOISE2D |\
98 CAM_QCOM_FEATURE_CROP |\
99 CAM_QCOM_FEATURE_ROTATION |\
100 CAM_QCOM_FEATURE_SHARPNESS |\
101 CAM_QCOM_FEATURE_SCALE |\
102 CAM_QCOM_FEATURE_CAC |\
103 CAM_QCOM_FEATURE_CDS )
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700104/* Per configuration size for static metadata length*/
105#define PER_CONFIGURATION_SIZE_3 (3)
Thierry Strudel3d639192016-09-09 11:52:26 -0700106
107#define TIMEOUT_NEVER -1
108
Thierry Strudel04e026f2016-10-10 11:27:36 -0700109/* Face landmarks indices */
110#define LEFT_EYE_X 0
111#define LEFT_EYE_Y 1
112#define RIGHT_EYE_X 2
113#define RIGHT_EYE_Y 3
114#define MOUTH_X 4
115#define MOUTH_Y 5
116#define TOTAL_LANDMARK_INDICES 6
117
Thierry Strudel3d639192016-09-09 11:52:26 -0700118cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
119const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
120extern pthread_mutex_t gCamLock;
121volatile uint32_t gCamHal3LogLevel = 1;
122extern uint8_t gNumCameraSessions;
123
124const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
125 {"On", CAM_CDS_MODE_ON},
126 {"Off", CAM_CDS_MODE_OFF},
127 {"Auto",CAM_CDS_MODE_AUTO}
128};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700129const QCamera3HardwareInterface::QCameraMap<
130 camera_metadata_enum_android_video_hdr_mode_t,
131 cam_video_hdr_mode_t> QCamera3HardwareInterface::VIDEO_HDR_MODES_MAP[] = {
132 { QCAMERA3_VIDEO_HDR_MODE_OFF, CAM_VIDEO_HDR_MODE_OFF },
133 { QCAMERA3_VIDEO_HDR_MODE_ON, CAM_VIDEO_HDR_MODE_ON }
134};
135
136
137const QCamera3HardwareInterface::QCameraMap<
138 camera_metadata_enum_android_ir_mode_t,
139 cam_ir_mode_type_t> QCamera3HardwareInterface::IR_MODES_MAP [] = {
140 {QCAMERA3_IR_MODE_OFF, CAM_IR_MODE_OFF},
141 {QCAMERA3_IR_MODE_ON, CAM_IR_MODE_ON},
142 {QCAMERA3_IR_MODE_AUTO, CAM_IR_MODE_AUTO}
143};
Thierry Strudel3d639192016-09-09 11:52:26 -0700144
145const QCamera3HardwareInterface::QCameraMap<
146 camera_metadata_enum_android_control_effect_mode_t,
147 cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
148 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF },
149 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO },
150 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE },
151 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE },
152 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA },
153 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE },
154 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
155 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
156 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA }
157};
158
159const QCamera3HardwareInterface::QCameraMap<
160 camera_metadata_enum_android_control_awb_mode_t,
161 cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
162 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF },
163 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO },
164 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT },
165 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT },
166 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
167 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT },
168 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
169 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT },
170 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE }
171};
172
173const QCamera3HardwareInterface::QCameraMap<
174 camera_metadata_enum_android_control_scene_mode_t,
175 cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
176 { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY, CAM_SCENE_MODE_FACE_PRIORITY },
177 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION },
178 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT },
179 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE },
180 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT },
181 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
182 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE },
183 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH },
184 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW },
185 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET },
186 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE },
187 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS },
188 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS },
189 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY },
190 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT },
191 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE}
192};
193
194const QCamera3HardwareInterface::QCameraMap<
195 camera_metadata_enum_android_control_af_mode_t,
196 cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
197 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_OFF },
198 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED },
199 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO },
200 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO },
201 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF },
202 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
203 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO }
204};
205
206const QCamera3HardwareInterface::QCameraMap<
207 camera_metadata_enum_android_color_correction_aberration_mode_t,
208 cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
209 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
210 CAM_COLOR_CORRECTION_ABERRATION_OFF },
211 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
212 CAM_COLOR_CORRECTION_ABERRATION_FAST },
213 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
214 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
215};
216
217const QCamera3HardwareInterface::QCameraMap<
218 camera_metadata_enum_android_control_ae_antibanding_mode_t,
219 cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
220 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF },
221 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
222 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
223 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
224};
225
226const QCamera3HardwareInterface::QCameraMap<
227 camera_metadata_enum_android_control_ae_mode_t,
228 cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
229 { ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF },
230 { ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF },
231 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO},
232 { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON },
233 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO}
234};
235
236const QCamera3HardwareInterface::QCameraMap<
237 camera_metadata_enum_android_flash_mode_t,
238 cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
239 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF },
240 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
241 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH }
242};
243
244const QCamera3HardwareInterface::QCameraMap<
245 camera_metadata_enum_android_statistics_face_detect_mode_t,
246 cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
247 { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, CAM_FACE_DETECT_MODE_OFF },
248 { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE },
249 { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL, CAM_FACE_DETECT_MODE_FULL }
250};
251
252const QCamera3HardwareInterface::QCameraMap<
253 camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
254 cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
255 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
256 CAM_FOCUS_UNCALIBRATED },
257 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
258 CAM_FOCUS_APPROXIMATE },
259 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
260 CAM_FOCUS_CALIBRATED }
261};
262
263const QCamera3HardwareInterface::QCameraMap<
264 camera_metadata_enum_android_lens_state_t,
265 cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
266 { ANDROID_LENS_STATE_STATIONARY, CAM_AF_LENS_STATE_STATIONARY},
267 { ANDROID_LENS_STATE_MOVING, CAM_AF_LENS_STATE_MOVING}
268};
269
270const int32_t available_thumbnail_sizes[] = {0, 0,
271 176, 144,
272 240, 144,
273 256, 144,
274 240, 160,
275 256, 154,
276 240, 240,
277 320, 240};
278
279const QCamera3HardwareInterface::QCameraMap<
280 camera_metadata_enum_android_sensor_test_pattern_mode_t,
281 cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
282 { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, CAM_TEST_PATTERN_OFF },
283 { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, CAM_TEST_PATTERN_SOLID_COLOR },
284 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS, CAM_TEST_PATTERN_COLOR_BARS },
285 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
286 { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9, CAM_TEST_PATTERN_PN9 },
287 { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1, CAM_TEST_PATTERN_CUSTOM1},
288};
289
290/* Since there is no mapping for all the options some Android enum are not listed.
291 * Also, the order in this list is important because while mapping from HAL to Android it will
292 * traverse from lower to higher index which means that for HAL values that are map to different
293 * Android values, the traverse logic will select the first one found.
294 */
295const QCamera3HardwareInterface::QCameraMap<
296 camera_metadata_enum_android_sensor_reference_illuminant1_t,
297 cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
298 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
299 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
300 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
301 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
302 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
303 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
304 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
305 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
306 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
307 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
308 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
309 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
310 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
311 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
312 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
313 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
314};
315
316const QCamera3HardwareInterface::QCameraMap<
317 int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
318 { 60, CAM_HFR_MODE_60FPS},
319 { 90, CAM_HFR_MODE_90FPS},
320 { 120, CAM_HFR_MODE_120FPS},
321 { 150, CAM_HFR_MODE_150FPS},
322 { 180, CAM_HFR_MODE_180FPS},
323 { 210, CAM_HFR_MODE_210FPS},
324 { 240, CAM_HFR_MODE_240FPS},
325 { 480, CAM_HFR_MODE_480FPS},
326};
327
328camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
329 .initialize = QCamera3HardwareInterface::initialize,
330 .configure_streams = QCamera3HardwareInterface::configure_streams,
331 .register_stream_buffers = NULL,
332 .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
333 .process_capture_request = QCamera3HardwareInterface::process_capture_request,
334 .get_metadata_vendor_tag_ops = NULL,
335 .dump = QCamera3HardwareInterface::dump,
336 .flush = QCamera3HardwareInterface::flush,
337 .reserved = {0},
338};
339
340// initialise to some default value
341uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
342
343/*===========================================================================
344 * FUNCTION : QCamera3HardwareInterface
345 *
346 * DESCRIPTION: constructor of QCamera3HardwareInterface
347 *
348 * PARAMETERS :
349 * @cameraId : camera ID
350 *
351 * RETURN : none
352 *==========================================================================*/
353QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
354 const camera_module_callbacks_t *callbacks)
355 : mCameraId(cameraId),
356 mCameraHandle(NULL),
357 mCameraInitialized(false),
358 mCallbackOps(NULL),
359 mMetadataChannel(NULL),
360 mPictureChannel(NULL),
361 mRawChannel(NULL),
362 mSupportChannel(NULL),
363 mAnalysisChannel(NULL),
364 mRawDumpChannel(NULL),
365 mDummyBatchChannel(NULL),
366 m_perfLock(),
367 mCommon(),
368 mChannelHandle(0),
369 mFirstConfiguration(true),
370 mFlush(false),
371 mFlushPerf(false),
372 mParamHeap(NULL),
373 mParameters(NULL),
374 mPrevParameters(NULL),
375 m_bIsVideo(false),
376 m_bIs4KVideo(false),
377 m_bEisSupportedSize(false),
378 m_bEisEnable(false),
379 m_MobicatMask(0),
380 mMinProcessedFrameDuration(0),
381 mMinJpegFrameDuration(0),
382 mMinRawFrameDuration(0),
383 mMetaFrameCount(0U),
384 mUpdateDebugLevel(false),
385 mCallbacks(callbacks),
386 mCaptureIntent(0),
387 mCacMode(0),
388 mBatchSize(0),
389 mToBeQueuedVidBufs(0),
390 mHFRVideoFps(DEFAULT_VIDEO_FPS),
391 mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
392 mFirstFrameNumberInBatch(0),
393 mNeedSensorRestart(false),
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700394 mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
395 mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
Thierry Strudel3d639192016-09-09 11:52:26 -0700396 mLdafCalibExist(false),
397 mPowerHintEnabled(false),
398 mLastCustIntentFrmNum(-1),
399 mState(CLOSED),
400 mIsDeviceLinked(false),
401 mIsMainCamera(true),
402 mLinkedCameraId(0),
403 m_pRelCamSyncHeap(NULL),
404 m_pRelCamSyncBuf(NULL)
405{
406 getLogLevel();
407 m_perfLock.lock_init();
408 mCommon.init(gCamCapability[cameraId]);
409 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700410#ifndef USE_HAL_3_3
411 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
412#else
Thierry Strudel3d639192016-09-09 11:52:26 -0700413 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700414#endif
Thierry Strudel3d639192016-09-09 11:52:26 -0700415 mCameraDevice.common.close = close_camera_device;
416 mCameraDevice.ops = &mCameraOps;
417 mCameraDevice.priv = this;
418 gCamCapability[cameraId]->version = CAM_HAL_V3;
419 // TODO: hardcode for now until mctl add support for min_num_pp_bufs
420 //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
421 gCamCapability[cameraId]->min_num_pp_bufs = 3;
422
423 pthread_cond_init(&mBuffersCond, NULL);
424
425 pthread_cond_init(&mRequestCond, NULL);
426 mPendingLiveRequest = 0;
427 mCurrentRequestId = -1;
428 pthread_mutex_init(&mMutex, NULL);
429
430 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
431 mDefaultMetadata[i] = NULL;
432
433 // Getting system props of different kinds
434 char prop[PROPERTY_VALUE_MAX];
435 memset(prop, 0, sizeof(prop));
436 property_get("persist.camera.raw.dump", prop, "0");
437 mEnableRawDump = atoi(prop);
438 if (mEnableRawDump)
439 LOGD("Raw dump from Camera HAL enabled");
440
441 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
442 memset(mLdafCalib, 0, sizeof(mLdafCalib));
443
444 memset(prop, 0, sizeof(prop));
445 property_get("persist.camera.tnr.preview", prop, "0");
446 m_bTnrPreview = (uint8_t)atoi(prop);
447
448 memset(prop, 0, sizeof(prop));
449 property_get("persist.camera.tnr.video", prop, "0");
450 m_bTnrVideo = (uint8_t)atoi(prop);
451
452 memset(prop, 0, sizeof(prop));
453 property_get("persist.camera.avtimer.debug", prop, "0");
454 m_debug_avtimer = (uint8_t)atoi(prop);
455
456 //Load and read GPU library.
457 lib_surface_utils = NULL;
458 LINK_get_surface_pixel_alignment = NULL;
459 mSurfaceStridePadding = CAM_PAD_TO_32;
460 lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
461 if (lib_surface_utils) {
462 *(void **)&LINK_get_surface_pixel_alignment =
463 dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
464 if (LINK_get_surface_pixel_alignment) {
465 mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
466 }
467 dlclose(lib_surface_utils);
468 }
469}
470
471/*===========================================================================
472 * FUNCTION : ~QCamera3HardwareInterface
473 *
474 * DESCRIPTION: destructor of QCamera3HardwareInterface
475 *
476 * PARAMETERS : none
477 *
478 * RETURN : none
479 *==========================================================================*/
480QCamera3HardwareInterface::~QCamera3HardwareInterface()
481{
482 LOGD("E");
483
484 /* Turn off current power hint before acquiring perfLock in case they
485 * conflict with each other */
486 disablePowerHint();
487
488 m_perfLock.lock_acq();
489
490 /* We need to stop all streams before deleting any stream */
491 if (mRawDumpChannel) {
492 mRawDumpChannel->stop();
493 }
494
495 // NOTE: 'camera3_stream_t *' objects are already freed at
496 // this stage by the framework
497 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
498 it != mStreamInfo.end(); it++) {
499 QCamera3ProcessingChannel *channel = (*it)->channel;
500 if (channel) {
501 channel->stop();
502 }
503 }
504 if (mSupportChannel)
505 mSupportChannel->stop();
506
507 if (mAnalysisChannel) {
508 mAnalysisChannel->stop();
509 }
510 if (mMetadataChannel) {
511 mMetadataChannel->stop();
512 }
513 if (mChannelHandle) {
514 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
515 mChannelHandle);
516 LOGD("stopping channel %d", mChannelHandle);
517 }
518
519 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
520 it != mStreamInfo.end(); it++) {
521 QCamera3ProcessingChannel *channel = (*it)->channel;
522 if (channel)
523 delete channel;
524 free (*it);
525 }
526 if (mSupportChannel) {
527 delete mSupportChannel;
528 mSupportChannel = NULL;
529 }
530
531 if (mAnalysisChannel) {
532 delete mAnalysisChannel;
533 mAnalysisChannel = NULL;
534 }
535 if (mRawDumpChannel) {
536 delete mRawDumpChannel;
537 mRawDumpChannel = NULL;
538 }
539 if (mDummyBatchChannel) {
540 delete mDummyBatchChannel;
541 mDummyBatchChannel = NULL;
542 }
543
544 mPictureChannel = NULL;
545
546 if (mMetadataChannel) {
547 delete mMetadataChannel;
548 mMetadataChannel = NULL;
549 }
550
551 /* Clean up all channels */
552 if (mCameraInitialized) {
553 if(!mFirstConfiguration){
554 //send the last unconfigure
555 cam_stream_size_info_t stream_config_info;
556 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
557 stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
558 stream_config_info.buffer_info.max_buffers =
559 m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700560 clear_metadata_buffer(mParameters);
Thierry Strudel3d639192016-09-09 11:52:26 -0700561 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
562 stream_config_info);
563 int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
564 if (rc < 0) {
565 LOGE("set_parms failed for unconfigure");
566 }
567 }
568 deinitParameters();
569 }
570
571 if (mChannelHandle) {
572 mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
573 mChannelHandle);
574 LOGH("deleting channel %d", mChannelHandle);
575 mChannelHandle = 0;
576 }
577
578 if (mState != CLOSED)
579 closeCamera();
580
581 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
582 req.mPendingBufferList.clear();
583 }
584 mPendingBuffersMap.mPendingBuffersInRequest.clear();
585 mPendingReprocessResultList.clear();
586 for (pendingRequestIterator i = mPendingRequestsList.begin();
587 i != mPendingRequestsList.end();) {
588 i = erasePendingRequest(i);
589 }
590 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
591 if (mDefaultMetadata[i])
592 free_camera_metadata(mDefaultMetadata[i]);
593
594 m_perfLock.lock_rel();
595 m_perfLock.lock_deinit();
596
597 pthread_cond_destroy(&mRequestCond);
598
599 pthread_cond_destroy(&mBuffersCond);
600
601 pthread_mutex_destroy(&mMutex);
602 LOGD("X");
603}
604
605/*===========================================================================
606 * FUNCTION : erasePendingRequest
607 *
608 * DESCRIPTION: function to erase a desired pending request after freeing any
609 * allocated memory
610 *
611 * PARAMETERS :
612 * @i : iterator pointing to pending request to be erased
613 *
614 * RETURN : iterator pointing to the next request
615 *==========================================================================*/
616QCamera3HardwareInterface::pendingRequestIterator
617 QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
618{
619 if (i->input_buffer != NULL) {
620 free(i->input_buffer);
621 i->input_buffer = NULL;
622 }
623 if (i->settings != NULL)
624 free_camera_metadata((camera_metadata_t*)i->settings);
625 return mPendingRequestsList.erase(i);
626}
627
628/*===========================================================================
629 * FUNCTION : camEvtHandle
630 *
631 * DESCRIPTION: Function registered to mm-camera-interface to handle events
632 *
633 * PARAMETERS :
634 * @camera_handle : interface layer camera handle
635 * @evt : ptr to event
636 * @user_data : user data ptr
637 *
638 * RETURN : none
639 *==========================================================================*/
640void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
641 mm_camera_event_t *evt,
642 void *user_data)
643{
644 QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
645 if (obj && evt) {
646 switch(evt->server_event_type) {
647 case CAM_EVENT_TYPE_DAEMON_DIED:
648 pthread_mutex_lock(&obj->mMutex);
649 obj->mState = ERROR;
650 pthread_mutex_unlock(&obj->mMutex);
651 LOGE("Fatal, camera daemon died");
652 break;
653
654 case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
655 LOGD("HAL got request pull from Daemon");
656 pthread_mutex_lock(&obj->mMutex);
657 obj->mWokenUpByDaemon = true;
658 obj->unblockRequestIfNecessary();
659 pthread_mutex_unlock(&obj->mMutex);
660 break;
661
662 default:
663 LOGW("Warning: Unhandled event %d",
664 evt->server_event_type);
665 break;
666 }
667 } else {
668 LOGE("NULL user_data/evt");
669 }
670}
671
672/*===========================================================================
673 * FUNCTION : openCamera
674 *
675 * DESCRIPTION: open camera
676 *
677 * PARAMETERS :
678 * @hw_device : double ptr for camera device struct
679 *
680 * RETURN : int32_t type of status
681 * NO_ERROR -- success
682 * none-zero failure code
683 *==========================================================================*/
684int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
685{
686 int rc = 0;
687 if (mState != CLOSED) {
688 *hw_device = NULL;
689 return PERMISSION_DENIED;
690 }
691
692 m_perfLock.lock_acq();
693 LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
694 mCameraId);
695
696 rc = openCamera();
697 if (rc == 0) {
698 *hw_device = &mCameraDevice.common;
699 } else
700 *hw_device = NULL;
701
702 m_perfLock.lock_rel();
703 LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
704 mCameraId, rc);
705
706 if (rc == NO_ERROR) {
707 mState = OPENED;
708 }
709 return rc;
710}
711
712/*===========================================================================
713 * FUNCTION : openCamera
714 *
715 * DESCRIPTION: open camera
716 *
717 * PARAMETERS : none
718 *
719 * RETURN : int32_t type of status
720 * NO_ERROR -- success
721 * none-zero failure code
722 *==========================================================================*/
723int QCamera3HardwareInterface::openCamera()
724{
725 int rc = 0;
726 char value[PROPERTY_VALUE_MAX];
727
728 KPI_ATRACE_CALL();
729 if (mCameraHandle) {
730 LOGE("Failure: Camera already opened");
731 return ALREADY_EXISTS;
732 }
733
734 rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
735 if (rc < 0) {
736 LOGE("Failed to reserve flash for camera id: %d",
737 mCameraId);
738 return UNKNOWN_ERROR;
739 }
740
741 rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
742 if (rc) {
743 LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
744 return rc;
745 }
746
747 if (!mCameraHandle) {
748 LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
749 return -ENODEV;
750 }
751
752 rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
753 camEvtHandle, (void *)this);
754
755 if (rc < 0) {
756 LOGE("Error, failed to register event callback");
757 /* Not closing camera here since it is already handled in destructor */
758 return FAILED_TRANSACTION;
759 }
760
761 mExifParams.debug_params =
762 (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
763 if (mExifParams.debug_params) {
764 memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
765 } else {
766 LOGE("Out of Memory. Allocation failed for 3A debug exif params");
767 return NO_MEMORY;
768 }
769 mFirstConfiguration = true;
770
771 //Notify display HAL that a camera session is active.
772 //But avoid calling the same during bootup because camera service might open/close
773 //cameras at boot time during its initialization and display service will also internally
774 //wait for camera service to initialize first while calling this display API, resulting in a
775 //deadlock situation. Since boot time camera open/close calls are made only to fetch
776 //capabilities, no need of this display bw optimization.
777 //Use "service.bootanim.exit" property to know boot status.
778 property_get("service.bootanim.exit", value, "0");
779 if (atoi(value) == 1) {
780 pthread_mutex_lock(&gCamLock);
781 if (gNumCameraSessions++ == 0) {
782 setCameraLaunchStatus(true);
783 }
784 pthread_mutex_unlock(&gCamLock);
785 }
786
787 //fill the session id needed while linking dual cam
788 pthread_mutex_lock(&gCamLock);
789 rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
790 &sessionId[mCameraId]);
791 pthread_mutex_unlock(&gCamLock);
792
793 if (rc < 0) {
794 LOGE("Error, failed to get sessiion id");
795 return UNKNOWN_ERROR;
796 } else {
797 //Allocate related cam sync buffer
798 //this is needed for the payload that goes along with bundling cmd for related
799 //camera use cases
800 m_pRelCamSyncHeap = new QCamera3HeapMemory(1);
801 rc = m_pRelCamSyncHeap->allocate(sizeof(cam_sync_related_sensors_event_info_t));
802 if(rc != OK) {
803 rc = NO_MEMORY;
804 LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
805 return NO_MEMORY;
806 }
807
808 //Map memory for related cam sync buffer
809 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
810 CAM_MAPPING_BUF_TYPE_SYNC_RELATED_SENSORS_BUF,
811 m_pRelCamSyncHeap->getFd(0),
812 sizeof(cam_sync_related_sensors_event_info_t),
813 m_pRelCamSyncHeap->getPtr(0));
814 if(rc < 0) {
815 LOGE("Dualcam: failed to map Related cam sync buffer");
816 rc = FAILED_TRANSACTION;
817 return NO_MEMORY;
818 }
819 m_pRelCamSyncBuf =
820 (cam_sync_related_sensors_event_info_t*) DATA_PTR(m_pRelCamSyncHeap,0);
821 }
822
823 LOGH("mCameraId=%d",mCameraId);
824
825 return NO_ERROR;
826}
827
828/*===========================================================================
829 * FUNCTION : closeCamera
830 *
831 * DESCRIPTION: close camera
832 *
833 * PARAMETERS : none
834 *
835 * RETURN : int32_t type of status
836 * NO_ERROR -- success
837 * none-zero failure code
838 *==========================================================================*/
839int QCamera3HardwareInterface::closeCamera()
840{
841 KPI_ATRACE_CALL();
842 int rc = NO_ERROR;
843 char value[PROPERTY_VALUE_MAX];
844
845 LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
846 mCameraId);
847 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
848 mCameraHandle = NULL;
849
850 //reset session id to some invalid id
851 pthread_mutex_lock(&gCamLock);
852 sessionId[mCameraId] = 0xDEADBEEF;
853 pthread_mutex_unlock(&gCamLock);
854
855 //Notify display HAL that there is no active camera session
856 //but avoid calling the same during bootup. Refer to openCamera
857 //for more details.
858 property_get("service.bootanim.exit", value, "0");
859 if (atoi(value) == 1) {
860 pthread_mutex_lock(&gCamLock);
861 if (--gNumCameraSessions == 0) {
862 setCameraLaunchStatus(false);
863 }
864 pthread_mutex_unlock(&gCamLock);
865 }
866
867 if (NULL != m_pRelCamSyncHeap) {
868 m_pRelCamSyncHeap->deallocate();
869 delete m_pRelCamSyncHeap;
870 m_pRelCamSyncHeap = NULL;
871 m_pRelCamSyncBuf = NULL;
872 }
873
874 if (mExifParams.debug_params) {
875 free(mExifParams.debug_params);
876 mExifParams.debug_params = NULL;
877 }
878 if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
879 LOGW("Failed to release flash for camera id: %d",
880 mCameraId);
881 }
882 mState = CLOSED;
883 LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
884 mCameraId, rc);
885 return rc;
886}
887
888/*===========================================================================
889 * FUNCTION : initialize
890 *
891 * DESCRIPTION: Initialize frameworks callback functions
892 *
893 * PARAMETERS :
894 * @callback_ops : callback function to frameworks
895 *
896 * RETURN :
897 *
898 *==========================================================================*/
899int QCamera3HardwareInterface::initialize(
900 const struct camera3_callback_ops *callback_ops)
901{
902 ATRACE_CALL();
903 int rc;
904
905 LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
906 pthread_mutex_lock(&mMutex);
907
908 // Validate current state
909 switch (mState) {
910 case OPENED:
911 /* valid state */
912 break;
913 default:
914 LOGE("Invalid state %d", mState);
915 rc = -ENODEV;
916 goto err1;
917 }
918
919 rc = initParameters();
920 if (rc < 0) {
921 LOGE("initParamters failed %d", rc);
922 goto err1;
923 }
924 mCallbackOps = callback_ops;
925
926 mChannelHandle = mCameraHandle->ops->add_channel(
927 mCameraHandle->camera_handle, NULL, NULL, this);
928 if (mChannelHandle == 0) {
929 LOGE("add_channel failed");
930 rc = -ENOMEM;
931 pthread_mutex_unlock(&mMutex);
932 return rc;
933 }
934
935 pthread_mutex_unlock(&mMutex);
936 mCameraInitialized = true;
937 mState = INITIALIZED;
938 LOGI("X");
939 return 0;
940
941err1:
942 pthread_mutex_unlock(&mMutex);
943 return rc;
944}
945
946/*===========================================================================
947 * FUNCTION : validateStreamDimensions
948 *
949 * DESCRIPTION: Check if the configuration requested are those advertised
950 *
951 * PARAMETERS :
952 * @stream_list : streams to be configured
953 *
954 * RETURN :
955 *
956 *==========================================================================*/
957int QCamera3HardwareInterface::validateStreamDimensions(
958 camera3_stream_configuration_t *streamList)
959{
960 int rc = NO_ERROR;
961 size_t count = 0;
962
963 camera3_stream_t *inputStream = NULL;
964 /*
965 * Loop through all streams to find input stream if it exists*
966 */
967 for (size_t i = 0; i< streamList->num_streams; i++) {
968 if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
969 if (inputStream != NULL) {
970 LOGE("Error, Multiple input streams requested");
971 return -EINVAL;
972 }
973 inputStream = streamList->streams[i];
974 }
975 }
976 /*
977 * Loop through all streams requested in configuration
978 * Check if unsupported sizes have been requested on any of them
979 */
980 for (size_t j = 0; j < streamList->num_streams; j++) {
981 bool sizeFound = false;
982 camera3_stream_t *newStream = streamList->streams[j];
983
984 uint32_t rotatedHeight = newStream->height;
985 uint32_t rotatedWidth = newStream->width;
986 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
987 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
988 rotatedHeight = newStream->width;
989 rotatedWidth = newStream->height;
990 }
991
992 /*
993 * Sizes are different for each type of stream format check against
994 * appropriate table.
995 */
996 switch (newStream->format) {
997 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
998 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
999 case HAL_PIXEL_FORMAT_RAW10:
1000 count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
1001 for (size_t i = 0; i < count; i++) {
1002 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
1003 (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
1004 sizeFound = true;
1005 break;
1006 }
1007 }
1008 break;
1009 case HAL_PIXEL_FORMAT_BLOB:
1010 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
1011 /* Verify set size against generated sizes table */
1012 for (size_t i = 0; i < count; i++) {
1013 if (((int32_t)rotatedWidth ==
1014 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1015 ((int32_t)rotatedHeight ==
1016 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1017 sizeFound = true;
1018 break;
1019 }
1020 }
1021 break;
1022 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1023 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1024 default:
1025 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1026 || newStream->stream_type == CAMERA3_STREAM_INPUT
1027 || IS_USAGE_ZSL(newStream->usage)) {
1028 if (((int32_t)rotatedWidth ==
1029 gCamCapability[mCameraId]->active_array_size.width) &&
1030 ((int32_t)rotatedHeight ==
1031 gCamCapability[mCameraId]->active_array_size.height)) {
1032 sizeFound = true;
1033 break;
1034 }
1035 /* We could potentially break here to enforce ZSL stream
1036 * set from frameworks always is full active array size
1037 * but it is not clear from the spc if framework will always
1038 * follow that, also we have logic to override to full array
1039 * size, so keeping the logic lenient at the moment
1040 */
1041 }
1042 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
1043 MAX_SIZES_CNT);
1044 for (size_t i = 0; i < count; i++) {
1045 if (((int32_t)rotatedWidth ==
1046 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1047 ((int32_t)rotatedHeight ==
1048 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1049 sizeFound = true;
1050 break;
1051 }
1052 }
1053 break;
1054 } /* End of switch(newStream->format) */
1055
1056 /* We error out even if a single stream has unsupported size set */
1057 if (!sizeFound) {
1058 LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1059 rotatedWidth, rotatedHeight, newStream->format,
1060 gCamCapability[mCameraId]->active_array_size.width,
1061 gCamCapability[mCameraId]->active_array_size.height);
1062 rc = -EINVAL;
1063 break;
1064 }
1065 } /* End of for each stream */
1066 return rc;
1067}
1068
1069/*==============================================================================
1070 * FUNCTION : isSupportChannelNeeded
1071 *
1072 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1073 *
1074 * PARAMETERS :
1075 * @stream_list : streams to be configured
1076 * @stream_config_info : the config info for streams to be configured
1077 *
1078 * RETURN : Boolen true/false decision
1079 *
1080 *==========================================================================*/
1081bool QCamera3HardwareInterface::isSupportChannelNeeded(
1082 camera3_stream_configuration_t *streamList,
1083 cam_stream_size_info_t stream_config_info)
1084{
1085 uint32_t i;
1086 bool pprocRequested = false;
1087 /* Check for conditions where PProc pipeline does not have any streams*/
1088 for (i = 0; i < stream_config_info.num_streams; i++) {
1089 if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1090 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1091 pprocRequested = true;
1092 break;
1093 }
1094 }
1095
1096 if (pprocRequested == false )
1097 return true;
1098
1099 /* Dummy stream needed if only raw or jpeg streams present */
1100 for (i = 0; i < streamList->num_streams; i++) {
1101 switch(streamList->streams[i]->format) {
1102 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1103 case HAL_PIXEL_FORMAT_RAW10:
1104 case HAL_PIXEL_FORMAT_RAW16:
1105 case HAL_PIXEL_FORMAT_BLOB:
1106 break;
1107 default:
1108 return false;
1109 }
1110 }
1111 return true;
1112}
1113
1114/*==============================================================================
1115 * FUNCTION : getSensorOutputSize
1116 *
1117 * DESCRIPTION: Get sensor output size based on current stream configuratoin
1118 *
1119 * PARAMETERS :
1120 * @sensor_dim : sensor output dimension (output)
1121 *
1122 * RETURN : int32_t type of status
1123 * NO_ERROR -- success
1124 * none-zero failure code
1125 *
1126 *==========================================================================*/
1127int32_t QCamera3HardwareInterface::getSensorOutputSize(cam_dimension_t &sensor_dim)
1128{
1129 int32_t rc = NO_ERROR;
1130
1131 cam_dimension_t max_dim = {0, 0};
1132 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1133 if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1134 max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1135 if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1136 max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1137 }
1138
1139 clear_metadata_buffer(mParameters);
1140
1141 rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1142 max_dim);
1143 if (rc != NO_ERROR) {
1144 LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1145 return rc;
1146 }
1147
1148 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1149 if (rc != NO_ERROR) {
1150 LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1151 return rc;
1152 }
1153
1154 clear_metadata_buffer(mParameters);
1155 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_RAW_DIMENSION);
1156
1157 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1158 mParameters);
1159 if (rc != NO_ERROR) {
1160 LOGE("Failed to get CAM_INTF_PARM_RAW_DIMENSION");
1161 return rc;
1162 }
1163
1164 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_RAW_DIMENSION, sensor_dim);
1165 LOGH("sensor output dimension = %d x %d", sensor_dim.width, sensor_dim.height);
1166
1167 return rc;
1168}
1169
1170/*==============================================================================
1171 * FUNCTION : enablePowerHint
1172 *
1173 * DESCRIPTION: enable single powerhint for preview and different video modes.
1174 *
1175 * PARAMETERS :
1176 *
1177 * RETURN : NULL
1178 *
1179 *==========================================================================*/
1180void QCamera3HardwareInterface::enablePowerHint()
1181{
1182 if (!mPowerHintEnabled) {
1183 m_perfLock.powerHint(POWER_HINT_VIDEO_ENCODE, true);
1184 mPowerHintEnabled = true;
1185 }
1186}
1187
1188/*==============================================================================
1189 * FUNCTION : disablePowerHint
1190 *
1191 * DESCRIPTION: disable current powerhint.
1192 *
1193 * PARAMETERS :
1194 *
1195 * RETURN : NULL
1196 *
1197 *==========================================================================*/
1198void QCamera3HardwareInterface::disablePowerHint()
1199{
1200 if (mPowerHintEnabled) {
1201 m_perfLock.powerHint(POWER_HINT_VIDEO_ENCODE, false);
1202 mPowerHintEnabled = false;
1203 }
1204}
1205
1206/*==============================================================================
1207 * FUNCTION : addToPPFeatureMask
1208 *
1209 * DESCRIPTION: add additional features to pp feature mask based on
1210 * stream type and usecase
1211 *
1212 * PARAMETERS :
1213 * @stream_format : stream type for feature mask
1214 * @stream_idx : stream idx within postprocess_mask list to change
1215 *
1216 * RETURN : NULL
1217 *
1218 *==========================================================================*/
1219void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1220 uint32_t stream_idx)
1221{
1222 char feature_mask_value[PROPERTY_VALUE_MAX];
1223 cam_feature_mask_t feature_mask;
1224 int args_converted;
1225 int property_len;
1226
1227 /* Get feature mask from property */
1228 property_len = property_get("persist.camera.hal3.feature",
1229 feature_mask_value, "0");
1230 if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1231 (feature_mask_value[1] == 'x')) {
1232 args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1233 } else {
1234 args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1235 }
1236 if (1 != args_converted) {
1237 feature_mask = 0;
1238 LOGE("Wrong feature mask %s", feature_mask_value);
1239 return;
1240 }
1241
1242 switch (stream_format) {
1243 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1244 /* Add LLVD to pp feature mask only if video hint is enabled */
1245 if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1246 mStreamConfigInfo.postprocess_mask[stream_idx]
1247 |= CAM_QTI_FEATURE_SW_TNR;
1248 LOGH("Added SW TNR to pp feature mask");
1249 } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1250 mStreamConfigInfo.postprocess_mask[stream_idx]
1251 |= CAM_QCOM_FEATURE_LLVD;
1252 LOGH("Added LLVD SeeMore to pp feature mask");
1253 }
1254 break;
1255 }
1256 default:
1257 break;
1258 }
1259 LOGD("PP feature mask %llx",
1260 mStreamConfigInfo.postprocess_mask[stream_idx]);
1261}
1262
1263/*==============================================================================
1264 * FUNCTION : updateFpsInPreviewBuffer
1265 *
1266 * DESCRIPTION: update FPS information in preview buffer.
1267 *
1268 * PARAMETERS :
1269 * @metadata : pointer to metadata buffer
1270 * @frame_number: frame_number to look for in pending buffer list
1271 *
1272 * RETURN : None
1273 *
1274 *==========================================================================*/
1275void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1276 uint32_t frame_number)
1277{
1278 // Mark all pending buffers for this particular request
1279 // with corresponding framerate information
1280 for (List<PendingBuffersInRequest>::iterator req =
1281 mPendingBuffersMap.mPendingBuffersInRequest.begin();
1282 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1283 for(List<PendingBufferInfo>::iterator j =
1284 req->mPendingBufferList.begin();
1285 j != req->mPendingBufferList.end(); j++) {
1286 QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1287 if ((req->frame_number == frame_number) &&
1288 (channel->getStreamTypeMask() &
1289 (1U << CAM_STREAM_TYPE_PREVIEW))) {
1290 IF_META_AVAILABLE(cam_fps_range_t, float_range,
1291 CAM_INTF_PARM_FPS_RANGE, metadata) {
1292 typeof (MetaData_t::refreshrate) cameraFps = float_range->max_fps;
1293 struct private_handle_t *priv_handle =
1294 (struct private_handle_t *)(*(j->buffer));
1295 setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1296 }
1297 }
1298 }
1299 }
1300}
1301
1302/*===========================================================================
1303 * FUNCTION : configureStreams
1304 *
1305 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1306 * and output streams.
1307 *
1308 * PARAMETERS :
1309 * @stream_list : streams to be configured
1310 *
1311 * RETURN :
1312 *
1313 *==========================================================================*/
1314int QCamera3HardwareInterface::configureStreams(
1315 camera3_stream_configuration_t *streamList)
1316{
1317 ATRACE_CALL();
1318 int rc = 0;
1319
1320 // Acquire perfLock before configure streams
1321 m_perfLock.lock_acq();
1322 rc = configureStreamsPerfLocked(streamList);
1323 m_perfLock.lock_rel();
1324
1325 return rc;
1326}
1327
1328/*===========================================================================
1329 * FUNCTION : configureStreamsPerfLocked
1330 *
1331 * DESCRIPTION: configureStreams while perfLock is held.
1332 *
1333 * PARAMETERS :
1334 * @stream_list : streams to be configured
1335 *
1336 * RETURN : int32_t type of status
1337 * NO_ERROR -- success
1338 * none-zero failure code
1339 *==========================================================================*/
1340int QCamera3HardwareInterface::configureStreamsPerfLocked(
1341 camera3_stream_configuration_t *streamList)
1342{
1343 ATRACE_CALL();
1344 int rc = 0;
1345
1346 // Sanity check stream_list
1347 if (streamList == NULL) {
1348 LOGE("NULL stream configuration");
1349 return BAD_VALUE;
1350 }
1351 if (streamList->streams == NULL) {
1352 LOGE("NULL stream list");
1353 return BAD_VALUE;
1354 }
1355
1356 if (streamList->num_streams < 1) {
1357 LOGE("Bad number of streams requested: %d",
1358 streamList->num_streams);
1359 return BAD_VALUE;
1360 }
1361
1362 if (streamList->num_streams >= MAX_NUM_STREAMS) {
1363 LOGE("Maximum number of streams %d exceeded: %d",
1364 MAX_NUM_STREAMS, streamList->num_streams);
1365 return BAD_VALUE;
1366 }
1367
1368 mOpMode = streamList->operation_mode;
1369 LOGD("mOpMode: %d", mOpMode);
1370
1371 /* first invalidate all the steams in the mStreamList
1372 * if they appear again, they will be validated */
1373 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1374 it != mStreamInfo.end(); it++) {
1375 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1376 if (channel) {
1377 channel->stop();
1378 }
1379 (*it)->status = INVALID;
1380 }
1381
1382 if (mRawDumpChannel) {
1383 mRawDumpChannel->stop();
1384 delete mRawDumpChannel;
1385 mRawDumpChannel = NULL;
1386 }
1387
1388 if (mSupportChannel)
1389 mSupportChannel->stop();
1390
1391 if (mAnalysisChannel) {
1392 mAnalysisChannel->stop();
1393 }
1394 if (mMetadataChannel) {
1395 /* If content of mStreamInfo is not 0, there is metadata stream */
1396 mMetadataChannel->stop();
1397 }
1398 if (mChannelHandle) {
1399 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
1400 mChannelHandle);
1401 LOGD("stopping channel %d", mChannelHandle);
1402 }
1403
1404 pthread_mutex_lock(&mMutex);
1405
1406 // Check state
1407 switch (mState) {
1408 case INITIALIZED:
1409 case CONFIGURED:
1410 case STARTED:
1411 /* valid state */
1412 break;
1413 default:
1414 LOGE("Invalid state %d", mState);
1415 pthread_mutex_unlock(&mMutex);
1416 return -ENODEV;
1417 }
1418
1419 /* Check whether we have video stream */
1420 m_bIs4KVideo = false;
1421 m_bIsVideo = false;
1422 m_bEisSupportedSize = false;
1423 m_bTnrEnabled = false;
1424 bool isZsl = false;
1425 uint32_t videoWidth = 0U;
1426 uint32_t videoHeight = 0U;
1427 size_t rawStreamCnt = 0;
1428 size_t stallStreamCnt = 0;
1429 size_t processedStreamCnt = 0;
1430 // Number of streams on ISP encoder path
1431 size_t numStreamsOnEncoder = 0;
1432 size_t numYuv888OnEncoder = 0;
1433 bool bYuv888OverrideJpeg = false;
1434 cam_dimension_t largeYuv888Size = {0, 0};
1435 cam_dimension_t maxViewfinderSize = {0, 0};
1436 bool bJpegExceeds4K = false;
1437 bool bJpegOnEncoder = false;
1438 bool bUseCommonFeatureMask = false;
1439 cam_feature_mask_t commonFeatureMask = 0;
1440 bool bSmallJpegSize = false;
1441 uint32_t width_ratio;
1442 uint32_t height_ratio;
1443 maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1444 camera3_stream_t *inputStream = NULL;
1445 bool isJpeg = false;
1446 cam_dimension_t jpegSize = {0, 0};
1447
1448 cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1449
1450 /*EIS configuration*/
Thierry Strudel3d639192016-09-09 11:52:26 -07001451 bool oisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001452 uint8_t eis_prop_set;
1453 uint32_t maxEisWidth = 0;
1454 uint32_t maxEisHeight = 0;
1455
1456 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1457
1458 size_t count = IS_TYPE_MAX;
1459 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1460 for (size_t i = 0; i < count; i++) {
1461 if ((gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001462 (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
1463 m_bEisSupported = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001464 break;
1465 }
1466 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001467 count = CAM_OPT_STAB_MAX;
1468 count = MIN(gCamCapability[mCameraId]->optical_stab_modes_count, count);
1469 for (size_t i = 0; i < count; i++) {
1470 if (gCamCapability[mCameraId]->optical_stab_modes[i] == CAM_OPT_STAB_ON) {
1471 oisSupported = true;
1472 break;
1473 }
1474 }
1475
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001476 if (m_bEisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001477 maxEisWidth = MAX_EIS_WIDTH;
1478 maxEisHeight = MAX_EIS_HEIGHT;
1479 }
1480
1481 /* EIS setprop control */
1482 char eis_prop[PROPERTY_VALUE_MAX];
1483 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001484 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07001485 eis_prop_set = (uint8_t)atoi(eis_prop);
1486
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001487 m_bEisEnable = eis_prop_set && (!oisSupported && m_bEisSupported) &&
Thierry Strudel3d639192016-09-09 11:52:26 -07001488 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1489
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001490 LOGD("m_bEisEnable: %d, eis_prop_set: %d, m_bEisSupported: %d, oisSupported:%d ",
1491 m_bEisEnable, eis_prop_set, m_bEisSupported, oisSupported);
1492
Thierry Strudel3d639192016-09-09 11:52:26 -07001493 /* stream configurations */
1494 for (size_t i = 0; i < streamList->num_streams; i++) {
1495 camera3_stream_t *newStream = streamList->streams[i];
1496 LOGI("stream[%d] type = %d, format = %d, width = %d, "
1497 "height = %d, rotation = %d, usage = 0x%x",
1498 i, newStream->stream_type, newStream->format,
1499 newStream->width, newStream->height, newStream->rotation,
1500 newStream->usage);
1501 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1502 newStream->stream_type == CAMERA3_STREAM_INPUT){
1503 isZsl = true;
1504 }
1505 if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1506 inputStream = newStream;
1507 }
1508
1509 if (newStream->format == HAL_PIXEL_FORMAT_BLOB) {
1510 isJpeg = true;
1511 jpegSize.width = newStream->width;
1512 jpegSize.height = newStream->height;
1513 if (newStream->width > VIDEO_4K_WIDTH ||
1514 newStream->height > VIDEO_4K_HEIGHT)
1515 bJpegExceeds4K = true;
1516 }
1517
1518 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1519 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1520 m_bIsVideo = true;
1521 videoWidth = newStream->width;
1522 videoHeight = newStream->height;
1523 if ((VIDEO_4K_WIDTH <= newStream->width) &&
1524 (VIDEO_4K_HEIGHT <= newStream->height)) {
1525 m_bIs4KVideo = true;
1526 }
1527 m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1528 (newStream->height <= maxEisHeight);
1529 }
1530 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1531 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1532 switch (newStream->format) {
1533 case HAL_PIXEL_FORMAT_BLOB:
1534 stallStreamCnt++;
1535 if (isOnEncoder(maxViewfinderSize, newStream->width,
1536 newStream->height)) {
1537 numStreamsOnEncoder++;
1538 bJpegOnEncoder = true;
1539 }
1540 width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
1541 newStream->width);
1542 height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
1543 newStream->height);;
1544 FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
1545 "FATAL: max_downscale_factor cannot be zero and so assert");
1546 if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
1547 (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
1548 LOGH("Setting small jpeg size flag to true");
1549 bSmallJpegSize = true;
1550 }
1551 break;
1552 case HAL_PIXEL_FORMAT_RAW10:
1553 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1554 case HAL_PIXEL_FORMAT_RAW16:
1555 rawStreamCnt++;
1556 break;
1557 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1558 processedStreamCnt++;
1559 if (isOnEncoder(maxViewfinderSize, newStream->width,
1560 newStream->height)) {
1561 if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
1562 !IS_USAGE_ZSL(newStream->usage)) {
1563 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1564 }
1565 numStreamsOnEncoder++;
1566 }
1567 break;
1568 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1569 processedStreamCnt++;
1570 if (isOnEncoder(maxViewfinderSize, newStream->width,
1571 newStream->height)) {
1572 // If Yuv888 size is not greater than 4K, set feature mask
1573 // to SUPERSET so that it support concurrent request on
1574 // YUV and JPEG.
1575 if (newStream->width <= VIDEO_4K_WIDTH &&
1576 newStream->height <= VIDEO_4K_HEIGHT) {
1577 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1578 }
1579 numStreamsOnEncoder++;
1580 numYuv888OnEncoder++;
1581 largeYuv888Size.width = newStream->width;
1582 largeYuv888Size.height = newStream->height;
1583 }
1584 break;
1585 default:
1586 processedStreamCnt++;
1587 if (isOnEncoder(maxViewfinderSize, newStream->width,
1588 newStream->height)) {
1589 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1590 numStreamsOnEncoder++;
1591 }
1592 break;
1593 }
1594
1595 }
1596 }
1597
1598 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1599 gCamCapability[mCameraId]->position == CAM_POSITION_FRONT_AUX ||
1600 !m_bIsVideo) {
1601 m_bEisEnable = false;
1602 }
1603
1604 /* Logic to enable/disable TNR based on specific config size/etc.*/
1605 if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
1606 ((videoWidth == 1920 && videoHeight == 1080) ||
1607 (videoWidth == 1280 && videoHeight == 720)) &&
1608 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
1609 m_bTnrEnabled = true;
1610
1611 /* Check if num_streams is sane */
1612 if (stallStreamCnt > MAX_STALLING_STREAMS ||
1613 rawStreamCnt > MAX_RAW_STREAMS ||
1614 processedStreamCnt > MAX_PROCESSED_STREAMS) {
1615 LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
1616 stallStreamCnt, rawStreamCnt, processedStreamCnt);
1617 pthread_mutex_unlock(&mMutex);
1618 return -EINVAL;
1619 }
1620 /* Check whether we have zsl stream or 4k video case */
1621 if (isZsl && m_bIsVideo) {
1622 LOGE("Currently invalid configuration ZSL&Video!");
1623 pthread_mutex_unlock(&mMutex);
1624 return -EINVAL;
1625 }
1626 /* Check if stream sizes are sane */
1627 if (numStreamsOnEncoder > 2) {
1628 LOGE("Number of streams on ISP encoder path exceeds limits of 2");
1629 pthread_mutex_unlock(&mMutex);
1630 return -EINVAL;
1631 } else if (1 < numStreamsOnEncoder){
1632 bUseCommonFeatureMask = true;
1633 LOGH("Multiple streams above max viewfinder size, common mask needed");
1634 }
1635
1636 /* Check if BLOB size is greater than 4k in 4k recording case */
1637 if (m_bIs4KVideo && bJpegExceeds4K) {
1638 LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
1639 pthread_mutex_unlock(&mMutex);
1640 return -EINVAL;
1641 }
1642
1643 // When JPEG and preview streams share VFE output, CPP will not apply CAC2
1644 // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
1645 // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
1646 // is not true. Otherwise testMandatoryOutputCombinations will fail with following
1647 // configurations:
1648 // {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
1649 // {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
1650 // (These two configurations will not have CAC2 enabled even in HQ modes.)
1651 if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
1652 ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
1653 __func__);
1654 pthread_mutex_unlock(&mMutex);
1655 return -EINVAL;
1656 }
1657
1658 // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
1659 // the YUV stream's size is greater or equal to the JPEG size, set common
1660 // postprocess mask to NONE, so that we can take advantage of postproc bypass.
1661 if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
1662 jpegSize.width, jpegSize.height) &&
1663 largeYuv888Size.width > jpegSize.width &&
1664 largeYuv888Size.height > jpegSize.height) {
1665 bYuv888OverrideJpeg = true;
1666 } else if (!isJpeg && numStreamsOnEncoder > 1) {
1667 commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1668 }
1669
1670 LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
1671 maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
1672 commonFeatureMask);
1673 LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
1674 numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
1675
1676 rc = validateStreamDimensions(streamList);
1677 if (rc == NO_ERROR) {
1678 rc = validateStreamRotations(streamList);
1679 }
1680 if (rc != NO_ERROR) {
1681 LOGE("Invalid stream configuration requested!");
1682 pthread_mutex_unlock(&mMutex);
1683 return rc;
1684 }
1685
1686 camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
1687 for (size_t i = 0; i < streamList->num_streams; i++) {
1688 camera3_stream_t *newStream = streamList->streams[i];
1689 LOGH("newStream type = %d, stream format = %d "
1690 "stream size : %d x %d, stream rotation = %d",
1691 newStream->stream_type, newStream->format,
1692 newStream->width, newStream->height, newStream->rotation);
1693 //if the stream is in the mStreamList validate it
1694 bool stream_exists = false;
1695 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
1696 it != mStreamInfo.end(); it++) {
1697 if ((*it)->stream == newStream) {
1698 QCamera3ProcessingChannel *channel =
1699 (QCamera3ProcessingChannel*)(*it)->stream->priv;
1700 stream_exists = true;
1701 if (channel)
1702 delete channel;
1703 (*it)->status = VALID;
1704 (*it)->stream->priv = NULL;
1705 (*it)->channel = NULL;
1706 }
1707 }
1708 if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
1709 //new stream
1710 stream_info_t* stream_info;
1711 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
1712 if (!stream_info) {
1713 LOGE("Could not allocate stream info");
1714 rc = -ENOMEM;
1715 pthread_mutex_unlock(&mMutex);
1716 return rc;
1717 }
1718 stream_info->stream = newStream;
1719 stream_info->status = VALID;
1720 stream_info->channel = NULL;
1721 mStreamInfo.push_back(stream_info);
1722 }
1723 /* Covers Opaque ZSL and API1 F/W ZSL */
1724 if (IS_USAGE_ZSL(newStream->usage)
1725 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
1726 if (zslStream != NULL) {
1727 LOGE("Multiple input/reprocess streams requested!");
1728 pthread_mutex_unlock(&mMutex);
1729 return BAD_VALUE;
1730 }
1731 zslStream = newStream;
1732 }
1733 /* Covers YUV reprocess */
1734 if (inputStream != NULL) {
1735 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
1736 && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
1737 && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
1738 && inputStream->width == newStream->width
1739 && inputStream->height == newStream->height) {
1740 if (zslStream != NULL) {
1741 /* This scenario indicates multiple YUV streams with same size
1742 * as input stream have been requested, since zsl stream handle
1743 * is solely use for the purpose of overriding the size of streams
1744 * which share h/w streams we will just make a guess here as to
1745 * which of the stream is a ZSL stream, this will be refactored
1746 * once we make generic logic for streams sharing encoder output
1747 */
1748 LOGH("Warning, Multiple ip/reprocess streams requested!");
1749 }
1750 zslStream = newStream;
1751 }
1752 }
1753 }
1754
1755 /* If a zsl stream is set, we know that we have configured at least one input or
1756 bidirectional stream */
1757 if (NULL != zslStream) {
1758 mInputStreamInfo.dim.width = (int32_t)zslStream->width;
1759 mInputStreamInfo.dim.height = (int32_t)zslStream->height;
1760 mInputStreamInfo.format = zslStream->format;
1761 mInputStreamInfo.usage = zslStream->usage;
1762 LOGD("Input stream configured! %d x %d, format %d, usage %d",
1763 mInputStreamInfo.dim.width,
1764 mInputStreamInfo.dim.height,
1765 mInputStreamInfo.format, mInputStreamInfo.usage);
1766 }
1767
1768 cleanAndSortStreamInfo();
1769 if (mMetadataChannel) {
1770 delete mMetadataChannel;
1771 mMetadataChannel = NULL;
1772 }
1773 if (mSupportChannel) {
1774 delete mSupportChannel;
1775 mSupportChannel = NULL;
1776 }
1777
1778 if (mAnalysisChannel) {
1779 delete mAnalysisChannel;
1780 mAnalysisChannel = NULL;
1781 }
1782
1783 if (mDummyBatchChannel) {
1784 delete mDummyBatchChannel;
1785 mDummyBatchChannel = NULL;
1786 }
1787
1788 //Create metadata channel and initialize it
1789 cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
1790 setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
1791 gCamCapability[mCameraId]->color_arrangement);
1792 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
1793 mChannelHandle, mCameraHandle->ops, captureResultCb,
1794 &padding_info, metadataFeatureMask, this);
1795 if (mMetadataChannel == NULL) {
1796 LOGE("failed to allocate metadata channel");
1797 rc = -ENOMEM;
1798 pthread_mutex_unlock(&mMutex);
1799 return rc;
1800 }
1801 rc = mMetadataChannel->initialize(IS_TYPE_NONE);
1802 if (rc < 0) {
1803 LOGE("metadata channel initialization failed");
1804 delete mMetadataChannel;
1805 mMetadataChannel = NULL;
1806 pthread_mutex_unlock(&mMutex);
1807 return rc;
1808 }
1809
1810 // Create analysis stream all the time, even when h/w support is not available
1811 {
1812 cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1813 setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
1814 gCamCapability[mCameraId]->color_arrangement);
1815 cam_analysis_info_t analysisInfo;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001816 int32_t ret = NO_ERROR;
1817 ret = mCommon.getAnalysisInfo(
Thierry Strudel3d639192016-09-09 11:52:26 -07001818 FALSE,
1819 TRUE,
1820 analysisFeatureMask,
1821 &analysisInfo);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001822 if (ret == NO_ERROR) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001823 mAnalysisChannel = new QCamera3SupportChannel(
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001824 mCameraHandle->camera_handle,
1825 mChannelHandle,
1826 mCameraHandle->ops,
1827 &analysisInfo.analysis_padding_info,
1828 analysisFeatureMask,
1829 CAM_STREAM_TYPE_ANALYSIS,
1830 &analysisInfo.analysis_max_res,
1831 (analysisInfo.analysis_format
1832 == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
1833 : CAM_FORMAT_YUV_420_NV21),
1834 analysisInfo.hw_analysis_supported,
1835 gCamCapability[mCameraId]->color_arrangement,
1836 this,
1837 0); // force buffer count to 0
1838 } else {
1839 LOGW("getAnalysisInfo failed, ret = %d", ret);
1840 }
1841 if (!mAnalysisChannel) {
1842 LOGW("Analysis channel cannot be created");
Thierry Strudel3d639192016-09-09 11:52:26 -07001843 }
1844 }
1845
1846 bool isRawStreamRequested = false;
1847 memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
1848 /* Allocate channel objects for the requested streams */
1849 for (size_t i = 0; i < streamList->num_streams; i++) {
1850 camera3_stream_t *newStream = streamList->streams[i];
1851 uint32_t stream_usage = newStream->usage;
1852 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
1853 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
1854 struct camera_info *p_info = NULL;
1855 pthread_mutex_lock(&gCamLock);
1856 p_info = get_cam_info(mCameraId, &mStreamConfigInfo.sync_type);
1857 pthread_mutex_unlock(&gCamLock);
1858 if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1859 || IS_USAGE_ZSL(newStream->usage)) &&
1860 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
1861 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
1862 if (bUseCommonFeatureMask) {
1863 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1864 commonFeatureMask;
1865 } else {
1866 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1867 CAM_QCOM_FEATURE_NONE;
1868 }
1869
1870 } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
1871 LOGH("Input stream configured, reprocess config");
1872 } else {
1873 //for non zsl streams find out the format
1874 switch (newStream->format) {
1875 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
1876 {
1877 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1878 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1879 /* add additional features to pp feature mask */
1880 addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
1881 mStreamConfigInfo.num_streams);
1882
1883 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
1884 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
1885 CAM_STREAM_TYPE_VIDEO;
1886 if (m_bTnrEnabled && m_bTnrVideo) {
1887 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
1888 CAM_QCOM_FEATURE_CPP_TNR;
1889 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
1890 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
1891 ~CAM_QCOM_FEATURE_CDS;
1892 }
1893 } else {
1894 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
1895 CAM_STREAM_TYPE_PREVIEW;
1896 if (m_bTnrEnabled && m_bTnrPreview) {
1897 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
1898 CAM_QCOM_FEATURE_CPP_TNR;
1899 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
1900 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
1901 ~CAM_QCOM_FEATURE_CDS;
1902 }
1903 padding_info.width_padding = mSurfaceStridePadding;
1904 padding_info.height_padding = CAM_PAD_TO_2;
1905 }
1906 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1907 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1908 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1909 newStream->height;
1910 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1911 newStream->width;
1912 }
1913 }
1914 break;
1915 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1916 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
1917 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
1918 if (bUseCommonFeatureMask)
1919 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1920 commonFeatureMask;
1921 else
1922 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1923 CAM_QCOM_FEATURE_NONE;
1924 } else {
1925 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1926 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1927 }
1928 break;
1929 case HAL_PIXEL_FORMAT_BLOB:
1930 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
1931 // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
1932 if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
1933 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
1934 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
1935 } else {
1936 if (bUseCommonFeatureMask &&
1937 isOnEncoder(maxViewfinderSize, newStream->width,
1938 newStream->height)) {
1939 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
1940 } else {
1941 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
1942 }
1943 }
1944 if (isZsl) {
1945 if (zslStream) {
1946 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1947 (int32_t)zslStream->width;
1948 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1949 (int32_t)zslStream->height;
1950 } else {
1951 LOGE("Error, No ZSL stream identified");
1952 pthread_mutex_unlock(&mMutex);
1953 return -EINVAL;
1954 }
1955 } else if (m_bIs4KVideo) {
1956 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
1957 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
1958 } else if (bYuv888OverrideJpeg) {
1959 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
1960 (int32_t)largeYuv888Size.width;
1961 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
1962 (int32_t)largeYuv888Size.height;
1963 }
1964 break;
1965 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1966 case HAL_PIXEL_FORMAT_RAW16:
1967 case HAL_PIXEL_FORMAT_RAW10:
1968 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
1969 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
1970 isRawStreamRequested = true;
1971 break;
1972 default:
1973 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
1974 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
1975 break;
1976 }
1977 }
1978
1979 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
1980 (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
1981 gCamCapability[mCameraId]->color_arrangement);
1982
1983 if (newStream->priv == NULL) {
1984 //New stream, construct channel
1985 switch (newStream->stream_type) {
1986 case CAMERA3_STREAM_INPUT:
1987 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
1988 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
1989 break;
1990 case CAMERA3_STREAM_BIDIRECTIONAL:
1991 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
1992 GRALLOC_USAGE_HW_CAMERA_WRITE;
1993 break;
1994 case CAMERA3_STREAM_OUTPUT:
1995 /* For video encoding stream, set read/write rarely
1996 * flag so that they may be set to un-cached */
1997 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
1998 newStream->usage |=
1999 (GRALLOC_USAGE_SW_READ_RARELY |
2000 GRALLOC_USAGE_SW_WRITE_RARELY |
2001 GRALLOC_USAGE_HW_CAMERA_WRITE);
2002 else if (IS_USAGE_ZSL(newStream->usage))
2003 {
2004 LOGD("ZSL usage flag skipping");
2005 }
2006 else if (newStream == zslStream
2007 || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
2008 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
2009 } else
2010 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
2011 break;
2012 default:
2013 LOGE("Invalid stream_type %d", newStream->stream_type);
2014 break;
2015 }
2016
2017 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
2018 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
2019 QCamera3ProcessingChannel *channel = NULL;
2020 switch (newStream->format) {
2021 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2022 if ((newStream->usage &
2023 private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
2024 (streamList->operation_mode ==
2025 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2026 ) {
2027 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2028 mChannelHandle, mCameraHandle->ops, captureResultCb,
2029 &gCamCapability[mCameraId]->padding_info,
2030 this,
2031 newStream,
2032 (cam_stream_type_t)
2033 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2034 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2035 mMetadataChannel,
2036 0); //heap buffers are not required for HFR video channel
2037 if (channel == NULL) {
2038 LOGE("allocation of channel failed");
2039 pthread_mutex_unlock(&mMutex);
2040 return -ENOMEM;
2041 }
2042 //channel->getNumBuffers() will return 0 here so use
2043 //MAX_INFLIGH_HFR_REQUESTS
2044 newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2045 newStream->priv = channel;
2046 LOGI("num video buffers in HFR mode: %d",
2047 MAX_INFLIGHT_HFR_REQUESTS);
2048 } else {
2049 /* Copy stream contents in HFR preview only case to create
2050 * dummy batch channel so that sensor streaming is in
2051 * HFR mode */
2052 if (!m_bIsVideo && (streamList->operation_mode ==
2053 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2054 mDummyBatchStream = *newStream;
2055 }
2056 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2057 mChannelHandle, mCameraHandle->ops, captureResultCb,
2058 &gCamCapability[mCameraId]->padding_info,
2059 this,
2060 newStream,
2061 (cam_stream_type_t)
2062 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2063 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2064 mMetadataChannel,
2065 MAX_INFLIGHT_REQUESTS);
2066 if (channel == NULL) {
2067 LOGE("allocation of channel failed");
2068 pthread_mutex_unlock(&mMutex);
2069 return -ENOMEM;
2070 }
2071 newStream->max_buffers = channel->getNumBuffers();
2072 newStream->priv = channel;
2073 }
2074 break;
2075 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
2076 channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2077 mChannelHandle,
2078 mCameraHandle->ops, captureResultCb,
2079 &padding_info,
2080 this,
2081 newStream,
2082 (cam_stream_type_t)
2083 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2084 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2085 mMetadataChannel);
2086 if (channel == NULL) {
2087 LOGE("allocation of YUV channel failed");
2088 pthread_mutex_unlock(&mMutex);
2089 return -ENOMEM;
2090 }
2091 newStream->max_buffers = channel->getNumBuffers();
2092 newStream->priv = channel;
2093 break;
2094 }
2095 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2096 case HAL_PIXEL_FORMAT_RAW16:
2097 case HAL_PIXEL_FORMAT_RAW10:
2098 mRawChannel = new QCamera3RawChannel(
2099 mCameraHandle->camera_handle, mChannelHandle,
2100 mCameraHandle->ops, captureResultCb,
2101 &padding_info,
2102 this, newStream,
2103 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2104 mMetadataChannel,
2105 (newStream->format == HAL_PIXEL_FORMAT_RAW16));
2106 if (mRawChannel == NULL) {
2107 LOGE("allocation of raw channel failed");
2108 pthread_mutex_unlock(&mMutex);
2109 return -ENOMEM;
2110 }
2111 newStream->max_buffers = mRawChannel->getNumBuffers();
2112 newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2113 break;
2114 case HAL_PIXEL_FORMAT_BLOB:
2115 // Max live snapshot inflight buffer is 1. This is to mitigate
2116 // frame drop issues for video snapshot. The more buffers being
2117 // allocated, the more frame drops there are.
2118 mPictureChannel = new QCamera3PicChannel(
2119 mCameraHandle->camera_handle, mChannelHandle,
2120 mCameraHandle->ops, captureResultCb,
2121 &padding_info, this, newStream,
2122 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2123 m_bIs4KVideo, isZsl, mMetadataChannel,
2124 (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2125 if (mPictureChannel == NULL) {
2126 LOGE("allocation of channel failed");
2127 pthread_mutex_unlock(&mMutex);
2128 return -ENOMEM;
2129 }
2130 newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2131 newStream->max_buffers = mPictureChannel->getNumBuffers();
2132 mPictureChannel->overrideYuvSize(
2133 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2134 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
2135 break;
2136
2137 default:
2138 LOGE("not a supported format 0x%x", newStream->format);
2139 break;
2140 }
2141 } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2142 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2143 } else {
2144 LOGE("Error, Unknown stream type");
2145 pthread_mutex_unlock(&mMutex);
2146 return -EINVAL;
2147 }
2148
2149 QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
2150 if (channel != NULL && channel->isUBWCEnabled()) {
2151 cam_format_t fmt = channel->getStreamDefaultFormat(
2152 mStreamConfigInfo.type[mStreamConfigInfo.num_streams]);
2153 if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2154 newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2155 }
2156 }
2157
2158 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2159 it != mStreamInfo.end(); it++) {
2160 if ((*it)->stream == newStream) {
2161 (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2162 break;
2163 }
2164 }
2165 } else {
2166 // Channel already exists for this stream
2167 // Do nothing for now
2168 }
2169 padding_info = gCamCapability[mCameraId]->padding_info;
2170
2171 /* Do not add entries for input stream in metastream info
2172 * since there is no real stream associated with it
2173 */
2174 if (newStream->stream_type != CAMERA3_STREAM_INPUT)
2175 mStreamConfigInfo.num_streams++;
2176 }
2177
2178 //RAW DUMP channel
2179 if (mEnableRawDump && isRawStreamRequested == false){
2180 cam_dimension_t rawDumpSize;
2181 rawDumpSize = getMaxRawSize(mCameraId);
2182 cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2183 setPAAFSupport(rawDumpFeatureMask,
2184 CAM_STREAM_TYPE_RAW,
2185 gCamCapability[mCameraId]->color_arrangement);
2186 mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2187 mChannelHandle,
2188 mCameraHandle->ops,
2189 rawDumpSize,
2190 &padding_info,
2191 this, rawDumpFeatureMask);
2192 if (!mRawDumpChannel) {
2193 LOGE("Raw Dump channel cannot be created");
2194 pthread_mutex_unlock(&mMutex);
2195 return -ENOMEM;
2196 }
2197 }
2198
2199
2200 if (mAnalysisChannel) {
2201 cam_analysis_info_t analysisInfo;
2202 memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2203 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2204 CAM_STREAM_TYPE_ANALYSIS;
2205 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2206 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2207 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2208 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2209 gCamCapability[mCameraId]->color_arrangement);
2210 rc = mCommon.getAnalysisInfo(FALSE, TRUE,
2211 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2212 &analysisInfo);
2213 if (rc != NO_ERROR) {
2214 LOGE("getAnalysisInfo failed, ret = %d", rc);
2215 pthread_mutex_unlock(&mMutex);
2216 return rc;
2217 }
2218 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2219 analysisInfo.analysis_max_res;
2220 mStreamConfigInfo.num_streams++;
2221 }
2222
2223 if (isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
2224 cam_analysis_info_t supportInfo;
2225 memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
2226 cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2227 setPAAFSupport(callbackFeatureMask,
2228 CAM_STREAM_TYPE_CALLBACK,
2229 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002230 int32_t ret = NO_ERROR;
2231 ret = mCommon.getAnalysisInfo(FALSE, TRUE, callbackFeatureMask, &supportInfo);
2232 if (ret != NO_ERROR) {
2233 /* Ignore the error for Mono camera
2234 * because the PAAF bit mask is only set
2235 * for CAM_STREAM_TYPE_ANALYSIS stream type
2236 */
2237 if (gCamCapability[mCameraId]->color_arrangement != CAM_FILTER_ARRANGEMENT_Y) {
2238 LOGW("getAnalysisInfo failed, ret = %d", ret);
2239 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002240 }
2241 mSupportChannel = new QCamera3SupportChannel(
2242 mCameraHandle->camera_handle,
2243 mChannelHandle,
2244 mCameraHandle->ops,
2245 &gCamCapability[mCameraId]->padding_info,
2246 callbackFeatureMask,
2247 CAM_STREAM_TYPE_CALLBACK,
2248 &QCamera3SupportChannel::kDim,
2249 CAM_FORMAT_YUV_420_NV21,
2250 supportInfo.hw_analysis_supported,
2251 gCamCapability[mCameraId]->color_arrangement,
2252 this);
2253 if (!mSupportChannel) {
2254 LOGE("dummy channel cannot be created");
2255 pthread_mutex_unlock(&mMutex);
2256 return -ENOMEM;
2257 }
2258 }
2259
2260 if (mSupportChannel) {
2261 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2262 QCamera3SupportChannel::kDim;
2263 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2264 CAM_STREAM_TYPE_CALLBACK;
2265 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2266 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2267 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2268 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2269 gCamCapability[mCameraId]->color_arrangement);
2270 mStreamConfigInfo.num_streams++;
2271 }
2272
2273 if (mRawDumpChannel) {
2274 cam_dimension_t rawSize;
2275 rawSize = getMaxRawSize(mCameraId);
2276 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2277 rawSize;
2278 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2279 CAM_STREAM_TYPE_RAW;
2280 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2281 CAM_QCOM_FEATURE_NONE;
2282 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2283 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2284 gCamCapability[mCameraId]->color_arrangement);
2285 mStreamConfigInfo.num_streams++;
2286 }
2287 /* In HFR mode, if video stream is not added, create a dummy channel so that
2288 * ISP can create a batch mode even for preview only case. This channel is
2289 * never 'start'ed (no stream-on), it is only 'initialized' */
2290 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2291 !m_bIsVideo) {
2292 cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2293 setPAAFSupport(dummyFeatureMask,
2294 CAM_STREAM_TYPE_VIDEO,
2295 gCamCapability[mCameraId]->color_arrangement);
2296 mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2297 mChannelHandle,
2298 mCameraHandle->ops, captureResultCb,
2299 &gCamCapability[mCameraId]->padding_info,
2300 this,
2301 &mDummyBatchStream,
2302 CAM_STREAM_TYPE_VIDEO,
2303 dummyFeatureMask,
2304 mMetadataChannel);
2305 if (NULL == mDummyBatchChannel) {
2306 LOGE("creation of mDummyBatchChannel failed."
2307 "Preview will use non-hfr sensor mode ");
2308 }
2309 }
2310 if (mDummyBatchChannel) {
2311 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2312 mDummyBatchStream.width;
2313 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2314 mDummyBatchStream.height;
2315 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2316 CAM_STREAM_TYPE_VIDEO;
2317 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2318 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2319 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2320 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2321 gCamCapability[mCameraId]->color_arrangement);
2322 mStreamConfigInfo.num_streams++;
2323 }
2324
2325 mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
2326 mStreamConfigInfo.buffer_info.max_buffers =
2327 m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
2328
2329 /* Initialize mPendingRequestInfo and mPendingBuffersMap */
2330 for (pendingRequestIterator i = mPendingRequestsList.begin();
2331 i != mPendingRequestsList.end();) {
2332 i = erasePendingRequest(i);
2333 }
2334 mPendingFrameDropList.clear();
2335 // Initialize/Reset the pending buffers list
2336 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
2337 req.mPendingBufferList.clear();
2338 }
2339 mPendingBuffersMap.mPendingBuffersInRequest.clear();
2340
2341 mPendingReprocessResultList.clear();
2342
2343 mCurJpegMeta.clear();
2344 //Get min frame duration for this streams configuration
2345 deriveMinFrameDuration();
2346
2347 // Update state
2348 mState = CONFIGURED;
2349
2350 pthread_mutex_unlock(&mMutex);
2351
2352 return rc;
2353}
2354
2355/*===========================================================================
2356 * FUNCTION : validateCaptureRequest
2357 *
2358 * DESCRIPTION: validate a capture request from camera service
2359 *
2360 * PARAMETERS :
2361 * @request : request from framework to process
2362 *
2363 * RETURN :
2364 *
2365 *==========================================================================*/
2366int QCamera3HardwareInterface::validateCaptureRequest(
2367 camera3_capture_request_t *request)
2368{
2369 ssize_t idx = 0;
2370 const camera3_stream_buffer_t *b;
2371 CameraMetadata meta;
2372
2373 /* Sanity check the request */
2374 if (request == NULL) {
2375 LOGE("NULL capture request");
2376 return BAD_VALUE;
2377 }
2378
2379 if ((request->settings == NULL) && (mState == CONFIGURED)) {
2380 /*settings cannot be null for the first request*/
2381 return BAD_VALUE;
2382 }
2383
2384 uint32_t frameNumber = request->frame_number;
2385 if (request->num_output_buffers < 1 || request->output_buffers == NULL) {
2386 LOGE("Request %d: No output buffers provided!",
2387 __FUNCTION__, frameNumber);
2388 return BAD_VALUE;
2389 }
2390 if (request->num_output_buffers >= MAX_NUM_STREAMS) {
2391 LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
2392 request->num_output_buffers, MAX_NUM_STREAMS);
2393 return BAD_VALUE;
2394 }
2395 if (request->input_buffer != NULL) {
2396 b = request->input_buffer;
2397 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2398 LOGE("Request %d: Buffer %ld: Status not OK!",
2399 frameNumber, (long)idx);
2400 return BAD_VALUE;
2401 }
2402 if (b->release_fence != -1) {
2403 LOGE("Request %d: Buffer %ld: Has a release fence!",
2404 frameNumber, (long)idx);
2405 return BAD_VALUE;
2406 }
2407 if (b->buffer == NULL) {
2408 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
2409 frameNumber, (long)idx);
2410 return BAD_VALUE;
2411 }
2412 }
2413
2414 // Validate all buffers
2415 b = request->output_buffers;
2416 do {
2417 QCamera3ProcessingChannel *channel =
2418 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
2419 if (channel == NULL) {
2420 LOGE("Request %d: Buffer %ld: Unconfigured stream!",
2421 frameNumber, (long)idx);
2422 return BAD_VALUE;
2423 }
2424 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
2425 LOGE("Request %d: Buffer %ld: Status not OK!",
2426 frameNumber, (long)idx);
2427 return BAD_VALUE;
2428 }
2429 if (b->release_fence != -1) {
2430 LOGE("Request %d: Buffer %ld: Has a release fence!",
2431 frameNumber, (long)idx);
2432 return BAD_VALUE;
2433 }
2434 if (b->buffer == NULL) {
2435 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
2436 frameNumber, (long)idx);
2437 return BAD_VALUE;
2438 }
2439 if (*(b->buffer) == NULL) {
2440 LOGE("Request %d: Buffer %ld: NULL private handle!",
2441 frameNumber, (long)idx);
2442 return BAD_VALUE;
2443 }
2444 idx++;
2445 b = request->output_buffers + idx;
2446 } while (idx < (ssize_t)request->num_output_buffers);
2447
2448 return NO_ERROR;
2449}
2450
2451/*===========================================================================
2452 * FUNCTION : deriveMinFrameDuration
2453 *
2454 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
2455 * on currently configured streams.
2456 *
2457 * PARAMETERS : NONE
2458 *
2459 * RETURN : NONE
2460 *
2461 *==========================================================================*/
2462void QCamera3HardwareInterface::deriveMinFrameDuration()
2463{
2464 int32_t maxJpegDim, maxProcessedDim, maxRawDim;
2465
2466 maxJpegDim = 0;
2467 maxProcessedDim = 0;
2468 maxRawDim = 0;
2469
2470 // Figure out maximum jpeg, processed, and raw dimensions
2471 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
2472 it != mStreamInfo.end(); it++) {
2473
2474 // Input stream doesn't have valid stream_type
2475 if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
2476 continue;
2477
2478 int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
2479 if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
2480 if (dimension > maxJpegDim)
2481 maxJpegDim = dimension;
2482 } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
2483 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
2484 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
2485 if (dimension > maxRawDim)
2486 maxRawDim = dimension;
2487 } else {
2488 if (dimension > maxProcessedDim)
2489 maxProcessedDim = dimension;
2490 }
2491 }
2492
2493 size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
2494 MAX_SIZES_CNT);
2495
2496 //Assume all jpeg dimensions are in processed dimensions.
2497 if (maxJpegDim > maxProcessedDim)
2498 maxProcessedDim = maxJpegDim;
2499 //Find the smallest raw dimension that is greater or equal to jpeg dimension
2500 if (maxProcessedDim > maxRawDim) {
2501 maxRawDim = INT32_MAX;
2502
2503 for (size_t i = 0; i < count; i++) {
2504 int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
2505 gCamCapability[mCameraId]->raw_dim[i].height;
2506 if (dimension >= maxProcessedDim && dimension < maxRawDim)
2507 maxRawDim = dimension;
2508 }
2509 }
2510
2511 //Find minimum durations for processed, jpeg, and raw
2512 for (size_t i = 0; i < count; i++) {
2513 if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
2514 gCamCapability[mCameraId]->raw_dim[i].height) {
2515 mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
2516 break;
2517 }
2518 }
2519 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
2520 for (size_t i = 0; i < count; i++) {
2521 if (maxProcessedDim ==
2522 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
2523 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
2524 mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
2525 mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
2526 break;
2527 }
2528 }
2529}
2530
2531/*===========================================================================
2532 * FUNCTION : getMinFrameDuration
2533 *
2534 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
2535 * and current request configuration.
2536 *
2537 * PARAMETERS : @request: requset sent by the frameworks
2538 *
2539 * RETURN : min farme duration for a particular request
2540 *
2541 *==========================================================================*/
2542int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
2543{
2544 bool hasJpegStream = false;
2545 bool hasRawStream = false;
2546 for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
2547 const camera3_stream_t *stream = request->output_buffers[i].stream;
2548 if (stream->format == HAL_PIXEL_FORMAT_BLOB)
2549 hasJpegStream = true;
2550 else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
2551 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
2552 stream->format == HAL_PIXEL_FORMAT_RAW16)
2553 hasRawStream = true;
2554 }
2555
2556 if (!hasJpegStream)
2557 return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
2558 else
2559 return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
2560}
2561
2562/*===========================================================================
2563 * FUNCTION : handleBuffersDuringFlushLock
2564 *
2565 * DESCRIPTION: Account for buffers returned from back-end during flush
2566 * This function is executed while mMutex is held by the caller.
2567 *
2568 * PARAMETERS :
2569 * @buffer: image buffer for the callback
2570 *
2571 * RETURN :
2572 *==========================================================================*/
2573void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
2574{
2575 bool buffer_found = false;
2576 for (List<PendingBuffersInRequest>::iterator req =
2577 mPendingBuffersMap.mPendingBuffersInRequest.begin();
2578 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
2579 for (List<PendingBufferInfo>::iterator i =
2580 req->mPendingBufferList.begin();
2581 i != req->mPendingBufferList.end(); i++) {
2582 if (i->buffer == buffer->buffer) {
2583 mPendingBuffersMap.numPendingBufsAtFlush--;
2584 LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
2585 buffer->buffer, req->frame_number,
2586 mPendingBuffersMap.numPendingBufsAtFlush);
2587 buffer_found = true;
2588 break;
2589 }
2590 }
2591 if (buffer_found) {
2592 break;
2593 }
2594 }
2595 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
2596 //signal the flush()
2597 LOGD("All buffers returned to HAL. Continue flush");
2598 pthread_cond_signal(&mBuffersCond);
2599 }
2600}
2601
2602
2603/*===========================================================================
2604 * FUNCTION : handlePendingReprocResults
2605 *
2606 * DESCRIPTION: check and notify on any pending reprocess results
2607 *
2608 * PARAMETERS :
2609 * @frame_number : Pending request frame number
2610 *
2611 * RETURN : int32_t type of status
2612 * NO_ERROR -- success
2613 * none-zero failure code
2614 *==========================================================================*/
2615int32_t QCamera3HardwareInterface::handlePendingReprocResults(uint32_t frame_number)
2616{
2617 for (List<PendingReprocessResult>::iterator j = mPendingReprocessResultList.begin();
2618 j != mPendingReprocessResultList.end(); j++) {
2619 if (j->frame_number == frame_number) {
2620 mCallbackOps->notify(mCallbackOps, &j->notify_msg);
2621
2622 LOGD("Delayed reprocess notify %d",
2623 frame_number);
2624
2625 for (pendingRequestIterator k = mPendingRequestsList.begin();
2626 k != mPendingRequestsList.end(); k++) {
2627
2628 if (k->frame_number == j->frame_number) {
2629 LOGD("Found reprocess frame number %d in pending reprocess List "
2630 "Take it out!!",
2631 k->frame_number);
2632
2633 camera3_capture_result result;
2634 memset(&result, 0, sizeof(camera3_capture_result));
2635 result.frame_number = frame_number;
2636 result.num_output_buffers = 1;
2637 result.output_buffers = &j->buffer;
2638 result.input_buffer = k->input_buffer;
2639 result.result = k->settings;
2640 result.partial_result = PARTIAL_RESULT_COUNT;
2641 mCallbackOps->process_capture_result(mCallbackOps, &result);
2642
2643 erasePendingRequest(k);
2644 break;
2645 }
2646 }
2647 mPendingReprocessResultList.erase(j);
2648 break;
2649 }
2650 }
2651 return NO_ERROR;
2652}
2653
2654/*===========================================================================
2655 * FUNCTION : handleBatchMetadata
2656 *
2657 * DESCRIPTION: Handles metadata buffer callback in batch mode
2658 *
2659 * PARAMETERS : @metadata_buf: metadata buffer
2660 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
2661 * the meta buf in this method
2662 *
2663 * RETURN :
2664 *
2665 *==========================================================================*/
2666void QCamera3HardwareInterface::handleBatchMetadata(
2667 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
2668{
2669 ATRACE_CALL();
2670
2671 if (NULL == metadata_buf) {
2672 LOGE("metadata_buf is NULL");
2673 return;
2674 }
2675 /* In batch mode, the metdata will contain the frame number and timestamp of
2676 * the last frame in the batch. Eg: a batch containing buffers from request
2677 * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
2678 * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
2679 * multiple process_capture_results */
2680 metadata_buffer_t *metadata =
2681 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
2682 int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
2683 uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
2684 uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
2685 uint32_t frame_number = 0, urgent_frame_number = 0;
2686 int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
2687 bool invalid_metadata = false;
2688 size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
2689 size_t loopCount = 1;
2690
2691 int32_t *p_frame_number_valid =
2692 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
2693 uint32_t *p_frame_number =
2694 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
2695 int64_t *p_capture_time =
2696 POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
2697 int32_t *p_urgent_frame_number_valid =
2698 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
2699 uint32_t *p_urgent_frame_number =
2700 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
2701
2702 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
2703 (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
2704 (NULL == p_urgent_frame_number)) {
2705 LOGE("Invalid metadata");
2706 invalid_metadata = true;
2707 } else {
2708 frame_number_valid = *p_frame_number_valid;
2709 last_frame_number = *p_frame_number;
2710 last_frame_capture_time = *p_capture_time;
2711 urgent_frame_number_valid = *p_urgent_frame_number_valid;
2712 last_urgent_frame_number = *p_urgent_frame_number;
2713 }
2714
2715 /* In batchmode, when no video buffers are requested, set_parms are sent
2716 * for every capture_request. The difference between consecutive urgent
2717 * frame numbers and frame numbers should be used to interpolate the
2718 * corresponding frame numbers and time stamps */
2719 pthread_mutex_lock(&mMutex);
2720 if (urgent_frame_number_valid) {
2721 first_urgent_frame_number =
2722 mPendingBatchMap.valueFor(last_urgent_frame_number);
2723 urgentFrameNumDiff = last_urgent_frame_number + 1 -
2724 first_urgent_frame_number;
2725
2726 LOGD("urgent_frm: valid: %d frm_num: %d - %d",
2727 urgent_frame_number_valid,
2728 first_urgent_frame_number, last_urgent_frame_number);
2729 }
2730
2731 if (frame_number_valid) {
2732 first_frame_number = mPendingBatchMap.valueFor(last_frame_number);
2733 frameNumDiff = last_frame_number + 1 -
2734 first_frame_number;
2735 mPendingBatchMap.removeItem(last_frame_number);
2736
2737 LOGD("frm: valid: %d frm_num: %d - %d",
2738 frame_number_valid,
2739 first_frame_number, last_frame_number);
2740
2741 }
2742 pthread_mutex_unlock(&mMutex);
2743
2744 if (urgent_frame_number_valid || frame_number_valid) {
2745 loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
2746 if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
2747 LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
2748 urgentFrameNumDiff, last_urgent_frame_number);
2749 if (frameNumDiff > MAX_HFR_BATCH_SIZE)
2750 LOGE("frameNumDiff: %d frameNum: %d",
2751 frameNumDiff, last_frame_number);
2752 }
2753
2754 for (size_t i = 0; i < loopCount; i++) {
2755 /* handleMetadataWithLock is called even for invalid_metadata for
2756 * pipeline depth calculation */
2757 if (!invalid_metadata) {
2758 /* Infer frame number. Batch metadata contains frame number of the
2759 * last frame */
2760 if (urgent_frame_number_valid) {
2761 if (i < urgentFrameNumDiff) {
2762 urgent_frame_number =
2763 first_urgent_frame_number + i;
2764 LOGD("inferred urgent frame_number: %d",
2765 urgent_frame_number);
2766 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2767 CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
2768 } else {
2769 /* This is to handle when urgentFrameNumDiff < frameNumDiff */
2770 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2771 CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
2772 }
2773 }
2774
2775 /* Infer frame number. Batch metadata contains frame number of the
2776 * last frame */
2777 if (frame_number_valid) {
2778 if (i < frameNumDiff) {
2779 frame_number = first_frame_number + i;
2780 LOGD("inferred frame_number: %d", frame_number);
2781 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2782 CAM_INTF_META_FRAME_NUMBER, frame_number);
2783 } else {
2784 /* This is to handle when urgentFrameNumDiff > frameNumDiff */
2785 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2786 CAM_INTF_META_FRAME_NUMBER_VALID, 0);
2787 }
2788 }
2789
2790 if (last_frame_capture_time) {
2791 //Infer timestamp
2792 first_frame_capture_time = last_frame_capture_time -
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002793 (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07002794 capture_time =
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002795 first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07002796 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
2797 CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
2798 LOGD("batch capture_time: %lld, capture_time: %lld",
2799 last_frame_capture_time, capture_time);
2800 }
2801 }
2802 pthread_mutex_lock(&mMutex);
2803 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002804 false /* free_and_bufdone_meta_buf */,
2805 (i == 0) /* first metadata in the batch metadata */);
Thierry Strudel3d639192016-09-09 11:52:26 -07002806 pthread_mutex_unlock(&mMutex);
2807 }
2808
2809 /* BufDone metadata buffer */
2810 if (free_and_bufdone_meta_buf) {
2811 mMetadataChannel->bufDone(metadata_buf);
2812 free(metadata_buf);
2813 }
2814}
2815
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002816void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
2817 camera3_error_msg_code_t errorCode)
2818{
2819 camera3_notify_msg_t notify_msg;
2820 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
2821 notify_msg.type = CAMERA3_MSG_ERROR;
2822 notify_msg.message.error.error_code = errorCode;
2823 notify_msg.message.error.error_stream = NULL;
2824 notify_msg.message.error.frame_number = frameNumber;
2825 mCallbackOps->notify(mCallbackOps, &notify_msg);
2826
2827 return;
2828}
Thierry Strudel3d639192016-09-09 11:52:26 -07002829/*===========================================================================
2830 * FUNCTION : handleMetadataWithLock
2831 *
2832 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
2833 *
2834 * PARAMETERS : @metadata_buf: metadata buffer
2835 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
2836 * the meta buf in this method
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002837 * @firstMetadataInBatch: Boolean to indicate whether this is the
2838 * first metadata in a batch. Valid only for batch mode
Thierry Strudel3d639192016-09-09 11:52:26 -07002839 *
2840 * RETURN :
2841 *
2842 *==========================================================================*/
2843void QCamera3HardwareInterface::handleMetadataWithLock(
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002844 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
2845 bool firstMetadataInBatch)
Thierry Strudel3d639192016-09-09 11:52:26 -07002846{
2847 ATRACE_CALL();
2848 if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
2849 //during flush do not send metadata from this thread
2850 LOGD("not sending metadata during flush or when mState is error");
2851 if (free_and_bufdone_meta_buf) {
2852 mMetadataChannel->bufDone(metadata_buf);
2853 free(metadata_buf);
2854 }
2855 return;
2856 }
2857
2858 //not in flush
2859 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
2860 int32_t frame_number_valid, urgent_frame_number_valid;
2861 uint32_t frame_number, urgent_frame_number;
2862 int64_t capture_time;
2863 nsecs_t currentSysTime;
2864
2865 int32_t *p_frame_number_valid =
2866 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
2867 uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
2868 int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
2869 int32_t *p_urgent_frame_number_valid =
2870 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
2871 uint32_t *p_urgent_frame_number =
2872 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
2873 IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
2874 metadata) {
2875 LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
2876 *p_frame_number_valid, *p_frame_number);
2877 }
2878
2879 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
2880 (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
2881 LOGE("Invalid metadata");
2882 if (free_and_bufdone_meta_buf) {
2883 mMetadataChannel->bufDone(metadata_buf);
2884 free(metadata_buf);
2885 }
2886 goto done_metadata;
2887 }
2888 frame_number_valid = *p_frame_number_valid;
2889 frame_number = *p_frame_number;
2890 capture_time = *p_capture_time;
2891 urgent_frame_number_valid = *p_urgent_frame_number_valid;
2892 urgent_frame_number = *p_urgent_frame_number;
2893 currentSysTime = systemTime(CLOCK_MONOTONIC);
2894
2895 // Detect if buffers from any requests are overdue
2896 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
2897 if ( (currentSysTime - req.timestamp) >
2898 s2ns(MISSING_REQUEST_BUF_TIMEOUT) ) {
2899 for (auto &missed : req.mPendingBufferList) {
2900 LOGE("Current frame: %d. Missing: frame = %d, buffer = %p,"
2901 "stream type = %d, stream format = %d",
2902 frame_number, req.frame_number, missed.buffer,
2903 missed.stream->stream_type, missed.stream->format);
2904 }
2905 }
2906 }
2907 //Partial result on process_capture_result for timestamp
2908 if (urgent_frame_number_valid) {
2909 LOGD("valid urgent frame_number = %u, capture_time = %lld",
2910 urgent_frame_number, capture_time);
2911
2912 //Recieved an urgent Frame Number, handle it
2913 //using partial results
2914 for (pendingRequestIterator i =
2915 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
2916 LOGD("Iterator Frame = %d urgent frame = %d",
2917 i->frame_number, urgent_frame_number);
2918
2919 if ((!i->input_buffer) && (i->frame_number < urgent_frame_number) &&
2920 (i->partial_result_cnt == 0)) {
2921 LOGE("Error: HAL missed urgent metadata for frame number %d",
2922 i->frame_number);
2923 }
2924
2925 if (i->frame_number == urgent_frame_number &&
2926 i->bUrgentReceived == 0) {
2927
2928 camera3_capture_result_t result;
2929 memset(&result, 0, sizeof(camera3_capture_result_t));
2930
2931 i->partial_result_cnt++;
2932 i->bUrgentReceived = 1;
2933 // Extract 3A metadata
2934 result.result =
2935 translateCbUrgentMetadataToResultMetadata(metadata);
2936 // Populate metadata result
2937 result.frame_number = urgent_frame_number;
2938 result.num_output_buffers = 0;
2939 result.output_buffers = NULL;
2940 result.partial_result = i->partial_result_cnt;
2941
2942 mCallbackOps->process_capture_result(mCallbackOps, &result);
2943 LOGD("urgent frame_number = %u, capture_time = %lld",
2944 result.frame_number, capture_time);
2945 free_camera_metadata((camera_metadata_t *)result.result);
2946 break;
2947 }
2948 }
2949 }
2950
2951 if (!frame_number_valid) {
2952 LOGD("Not a valid normal frame number, used as SOF only");
2953 if (free_and_bufdone_meta_buf) {
2954 mMetadataChannel->bufDone(metadata_buf);
2955 free(metadata_buf);
2956 }
2957 goto done_metadata;
2958 }
2959 LOGH("valid frame_number = %u, capture_time = %lld",
2960 frame_number, capture_time);
2961
2962 for (pendingRequestIterator i = mPendingRequestsList.begin();
2963 i != mPendingRequestsList.end() && i->frame_number <= frame_number;) {
2964 // Flush out all entries with less or equal frame numbers.
2965
2966 camera3_capture_result_t result;
2967 memset(&result, 0, sizeof(camera3_capture_result_t));
2968
2969 LOGD("frame_number in the list is %u", i->frame_number);
2970 i->partial_result_cnt++;
2971 result.partial_result = i->partial_result_cnt;
2972
2973 // Check whether any stream buffer corresponding to this is dropped or not
2974 // If dropped, then send the ERROR_BUFFER for the corresponding stream
Thierry Strudel3d639192016-09-09 11:52:26 -07002975 if (p_cam_frame_drop) {
2976 /* Clear notify_msg structure */
2977 camera3_notify_msg_t notify_msg;
2978 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
2979 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
2980 j != i->buffers.end(); j++) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002981 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel *)j->stream->priv;
2982 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
2983 for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
2984 if (streamID == p_cam_frame_drop->streamID[k]) {
2985 // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
2986 LOGE("Start of reporting error frame#=%u, streamID=%u",
2987 i->frame_number, streamID);
2988 notify_msg.type = CAMERA3_MSG_ERROR;
2989 notify_msg.message.error.frame_number = i->frame_number;
2990 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
2991 notify_msg.message.error.error_stream = j->stream;
2992 mCallbackOps->notify(mCallbackOps, &notify_msg);
2993 LOGE("End of reporting error frame#=%u, streamID=%u",
2994 i->frame_number, streamID);
2995 PendingFrameDropInfo PendingFrameDrop;
2996 PendingFrameDrop.frame_number=i->frame_number;
2997 PendingFrameDrop.stream_ID = streamID;
2998 // Add the Frame drop info to mPendingFrameDropList
2999 mPendingFrameDropList.push_back(PendingFrameDrop);
Thierry Strudel3d639192016-09-09 11:52:26 -07003000 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003001 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003002 }
3003 }
3004
3005 // Send empty metadata with already filled buffers for dropped metadata
3006 // and send valid metadata with already filled buffers for current metadata
3007 /* we could hit this case when we either
3008 * 1. have a pending reprocess request or
3009 * 2. miss a metadata buffer callback */
3010 if (i->frame_number < frame_number) {
3011 if (i->input_buffer) {
3012 /* this will be handled in handleInputBufferWithLock */
3013 i++;
3014 continue;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003015 } else if (mBatchSize) {
3016
3017 mPendingLiveRequest--;
3018
3019 CameraMetadata dummyMetadata;
3020 dummyMetadata.update(ANDROID_REQUEST_ID, &(i->request_id), 1);
3021 result.result = dummyMetadata.release();
3022
3023 notifyError(i->frame_number, CAMERA3_MSG_ERROR_RESULT);
Thierry Strudel3d639192016-09-09 11:52:26 -07003024 } else {
3025 LOGE("Fatal: Missing metadata buffer for frame number %d", i->frame_number);
3026 if (free_and_bufdone_meta_buf) {
3027 mMetadataChannel->bufDone(metadata_buf);
3028 free(metadata_buf);
3029 }
3030 mState = ERROR;
3031 goto done_metadata;
3032 }
3033 } else {
3034 mPendingLiveRequest--;
3035 /* Clear notify_msg structure */
3036 camera3_notify_msg_t notify_msg;
3037 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3038
3039 // Send shutter notify to frameworks
3040 notify_msg.type = CAMERA3_MSG_SHUTTER;
3041 notify_msg.message.shutter.frame_number = i->frame_number;
3042 notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
3043 mCallbackOps->notify(mCallbackOps, &notify_msg);
3044
3045 i->timestamp = capture_time;
3046
3047 // Find channel requiring metadata, meaning internal offline postprocess
3048 // is needed.
3049 //TODO: for now, we don't support two streams requiring metadata at the same time.
3050 // (because we are not making copies, and metadata buffer is not reference counted.
3051 bool internalPproc = false;
3052 for (pendingBufferIterator iter = i->buffers.begin();
3053 iter != i->buffers.end(); iter++) {
3054 if (iter->need_metadata) {
3055 internalPproc = true;
3056 QCamera3ProcessingChannel *channel =
3057 (QCamera3ProcessingChannel *)iter->stream->priv;
3058 channel->queueReprocMetadata(metadata_buf);
3059 break;
3060 }
3061 }
3062
3063 result.result = translateFromHalMetadata(metadata,
3064 i->timestamp, i->request_id, i->jpegMetadata, i->pipeline_depth,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003065 i->capture_intent, internalPproc, i->fwkCacMode,
3066 firstMetadataInBatch);
Thierry Strudel3d639192016-09-09 11:52:26 -07003067
3068 saveExifParams(metadata);
3069
3070 if (i->blob_request) {
3071 {
3072 //Dump tuning metadata if enabled and available
3073 char prop[PROPERTY_VALUE_MAX];
3074 memset(prop, 0, sizeof(prop));
3075 property_get("persist.camera.dumpmetadata", prop, "0");
3076 int32_t enabled = atoi(prop);
3077 if (enabled && metadata->is_tuning_params_valid) {
3078 dumpMetadataToFile(metadata->tuning_params,
3079 mMetaFrameCount,
3080 enabled,
3081 "Snapshot",
3082 frame_number);
3083 }
3084 }
3085 }
3086
3087 if (!internalPproc) {
3088 LOGD("couldn't find need_metadata for this metadata");
3089 // Return metadata buffer
3090 if (free_and_bufdone_meta_buf) {
3091 mMetadataChannel->bufDone(metadata_buf);
3092 free(metadata_buf);
3093 }
3094 }
3095 }
3096 if (!result.result) {
3097 LOGE("metadata is NULL");
3098 }
3099 result.frame_number = i->frame_number;
3100 result.input_buffer = i->input_buffer;
3101 result.num_output_buffers = 0;
3102 result.output_buffers = NULL;
3103 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
3104 j != i->buffers.end(); j++) {
3105 if (j->buffer) {
3106 result.num_output_buffers++;
3107 }
3108 }
3109
3110 updateFpsInPreviewBuffer(metadata, i->frame_number);
3111
3112 if (result.num_output_buffers > 0) {
3113 camera3_stream_buffer_t *result_buffers =
3114 new camera3_stream_buffer_t[result.num_output_buffers];
3115 if (result_buffers != NULL) {
3116 size_t result_buffers_idx = 0;
3117 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
3118 j != i->buffers.end(); j++) {
3119 if (j->buffer) {
3120 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
3121 m != mPendingFrameDropList.end(); m++) {
3122 QCamera3Channel *channel = (QCamera3Channel *)j->buffer->stream->priv;
3123 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
3124 if((m->stream_ID == streamID) && (m->frame_number==frame_number)) {
3125 j->buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
3126 LOGE("Stream STATUS_ERROR frame_number=%u, streamID=%u",
3127 frame_number, streamID);
3128 m = mPendingFrameDropList.erase(m);
3129 break;
3130 }
3131 }
3132 mPendingBuffersMap.removeBuf(j->buffer->buffer);
3133 result_buffers[result_buffers_idx++] = *(j->buffer);
3134 free(j->buffer);
3135 j->buffer = NULL;
3136 }
3137 }
3138 result.output_buffers = result_buffers;
3139 mCallbackOps->process_capture_result(mCallbackOps, &result);
3140 LOGD("meta frame_number = %u, capture_time = %lld",
3141 result.frame_number, i->timestamp);
3142 free_camera_metadata((camera_metadata_t *)result.result);
3143 delete[] result_buffers;
3144 }else {
3145 LOGE("Fatal error: out of memory");
3146 }
3147 } else {
3148 mCallbackOps->process_capture_result(mCallbackOps, &result);
3149 LOGD("meta frame_number = %u, capture_time = %lld",
3150 result.frame_number, i->timestamp);
3151 free_camera_metadata((camera_metadata_t *)result.result);
3152 }
3153
3154 i = erasePendingRequest(i);
3155
3156 if (!mPendingReprocessResultList.empty()) {
3157 handlePendingReprocResults(frame_number + 1);
3158 }
3159 }
3160
3161done_metadata:
3162 for (pendingRequestIterator i = mPendingRequestsList.begin();
3163 i != mPendingRequestsList.end() ;i++) {
3164 i->pipeline_depth++;
3165 }
3166 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3167 unblockRequestIfNecessary();
3168}
3169
3170/*===========================================================================
3171 * FUNCTION : hdrPlusPerfLock
3172 *
3173 * DESCRIPTION: perf lock for HDR+ using custom intent
3174 *
3175 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
3176 *
3177 * RETURN : None
3178 *
3179 *==========================================================================*/
3180void QCamera3HardwareInterface::hdrPlusPerfLock(
3181 mm_camera_super_buf_t *metadata_buf)
3182{
3183 if (NULL == metadata_buf) {
3184 LOGE("metadata_buf is NULL");
3185 return;
3186 }
3187 metadata_buffer_t *metadata =
3188 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3189 int32_t *p_frame_number_valid =
3190 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3191 uint32_t *p_frame_number =
3192 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3193
3194 if (p_frame_number_valid == NULL || p_frame_number == NULL) {
3195 LOGE("%s: Invalid metadata", __func__);
3196 return;
3197 }
3198
3199 //acquire perf lock for 5 sec after the last HDR frame is captured
3200 if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
3201 if ((p_frame_number != NULL) &&
3202 (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
3203 m_perfLock.lock_acq_timed(HDR_PLUS_PERF_TIME_OUT);
3204 }
3205 }
3206
3207 //release lock after perf lock timer is expired. If lock is already released,
3208 //isTimerReset returns false
3209 if (m_perfLock.isTimerReset()) {
3210 mLastCustIntentFrmNum = -1;
3211 m_perfLock.lock_rel_timed();
3212 }
3213}
3214
3215/*===========================================================================
3216 * FUNCTION : handleInputBufferWithLock
3217 *
3218 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
3219 *
3220 * PARAMETERS : @frame_number: frame number of the input buffer
3221 *
3222 * RETURN :
3223 *
3224 *==========================================================================*/
3225void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
3226{
3227 ATRACE_CALL();
3228 pendingRequestIterator i = mPendingRequestsList.begin();
3229 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3230 i++;
3231 }
3232 if (i != mPendingRequestsList.end() && i->input_buffer) {
3233 //found the right request
3234 if (!i->shutter_notified) {
3235 CameraMetadata settings;
3236 camera3_notify_msg_t notify_msg;
3237 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3238 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
3239 if(i->settings) {
3240 settings = i->settings;
3241 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
3242 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
3243 } else {
3244 LOGE("No timestamp in input settings! Using current one.");
3245 }
3246 } else {
3247 LOGE("Input settings missing!");
3248 }
3249
3250 notify_msg.type = CAMERA3_MSG_SHUTTER;
3251 notify_msg.message.shutter.frame_number = frame_number;
3252 notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
3253 mCallbackOps->notify(mCallbackOps, &notify_msg);
3254 i->shutter_notified = true;
3255 LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
3256 i->frame_number, notify_msg.message.shutter.timestamp);
3257 }
3258
3259 if (i->input_buffer->release_fence != -1) {
3260 int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3261 close(i->input_buffer->release_fence);
3262 if (rc != OK) {
3263 LOGE("input buffer sync wait failed %d", rc);
3264 }
3265 }
3266
3267 camera3_capture_result result;
3268 memset(&result, 0, sizeof(camera3_capture_result));
3269 result.frame_number = frame_number;
3270 result.result = i->settings;
3271 result.input_buffer = i->input_buffer;
3272 result.partial_result = PARTIAL_RESULT_COUNT;
3273
3274 mCallbackOps->process_capture_result(mCallbackOps, &result);
3275 LOGD("Input request metadata and input buffer frame_number = %u",
3276 i->frame_number);
3277 i = erasePendingRequest(i);
3278 } else {
3279 LOGE("Could not find input request for frame number %d", frame_number);
3280 }
3281}
3282
3283/*===========================================================================
3284 * FUNCTION : handleBufferWithLock
3285 *
3286 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
3287 *
3288 * PARAMETERS : @buffer: image buffer for the callback
3289 * @frame_number: frame number of the image buffer
3290 *
3291 * RETURN :
3292 *
3293 *==========================================================================*/
3294void QCamera3HardwareInterface::handleBufferWithLock(
3295 camera3_stream_buffer_t *buffer, uint32_t frame_number)
3296{
3297 ATRACE_CALL();
3298 /* Nothing to be done during error state */
3299 if ((ERROR == mState) || (DEINIT == mState)) {
3300 return;
3301 }
3302 if (mFlushPerf) {
3303 handleBuffersDuringFlushLock(buffer);
3304 return;
3305 }
3306 //not in flush
3307 // If the frame number doesn't exist in the pending request list,
3308 // directly send the buffer to the frameworks, and update pending buffers map
3309 // Otherwise, book-keep the buffer.
3310 pendingRequestIterator i = mPendingRequestsList.begin();
3311 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
3312 i++;
3313 }
3314 if (i == mPendingRequestsList.end()) {
3315 // Verify all pending requests frame_numbers are greater
3316 for (pendingRequestIterator j = mPendingRequestsList.begin();
3317 j != mPendingRequestsList.end(); j++) {
3318 if ((j->frame_number < frame_number) && !(j->input_buffer)) {
3319 LOGW("Error: pending live frame number %d is smaller than %d",
3320 j->frame_number, frame_number);
3321 }
3322 }
3323 camera3_capture_result_t result;
3324 memset(&result, 0, sizeof(camera3_capture_result_t));
3325 result.result = NULL;
3326 result.frame_number = frame_number;
3327 result.num_output_buffers = 1;
3328 result.partial_result = 0;
3329 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
3330 m != mPendingFrameDropList.end(); m++) {
3331 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
3332 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
3333 if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
3334 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
3335 LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
3336 frame_number, streamID);
3337 m = mPendingFrameDropList.erase(m);
3338 break;
3339 }
3340 }
3341 result.output_buffers = buffer;
3342 LOGH("result frame_number = %d, buffer = %p",
3343 frame_number, buffer->buffer);
3344
3345 mPendingBuffersMap.removeBuf(buffer->buffer);
3346
3347 mCallbackOps->process_capture_result(mCallbackOps, &result);
3348 } else {
3349 if (i->input_buffer) {
3350 CameraMetadata settings;
3351 camera3_notify_msg_t notify_msg;
3352 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3353 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
3354 if(i->settings) {
3355 settings = i->settings;
3356 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
3357 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
3358 } else {
3359 LOGW("No timestamp in input settings! Using current one.");
3360 }
3361 } else {
3362 LOGE("Input settings missing!");
3363 }
3364
3365 notify_msg.type = CAMERA3_MSG_SHUTTER;
3366 notify_msg.message.shutter.frame_number = frame_number;
3367 notify_msg.message.shutter.timestamp = (uint64_t)capture_time;
3368
3369 if (i->input_buffer->release_fence != -1) {
3370 int32_t rc = sync_wait(i->input_buffer->release_fence, TIMEOUT_NEVER);
3371 close(i->input_buffer->release_fence);
3372 if (rc != OK) {
3373 LOGE("input buffer sync wait failed %d", rc);
3374 }
3375 }
3376 mPendingBuffersMap.removeBuf(buffer->buffer);
3377
Thierry Strudel04e026f2016-10-10 11:27:36 -07003378 camera3_capture_result result;
3379 memset(&result, 0, sizeof(camera3_capture_result));
3380 result.frame_number = frame_number;
3381 result.result = i->settings;
3382 result.input_buffer = i->input_buffer;
3383 result.num_output_buffers = 1;
3384 result.output_buffers = buffer;
3385 result.partial_result = PARTIAL_RESULT_COUNT;
Thierry Strudel3d639192016-09-09 11:52:26 -07003386
Thierry Strudel04e026f2016-10-10 11:27:36 -07003387 mCallbackOps->notify(mCallbackOps, &notify_msg);
3388 mCallbackOps->process_capture_result(mCallbackOps, &result);
3389 LOGD("Notify reprocess now %d!", frame_number);
3390 i = erasePendingRequest(i);
Thierry Strudel3d639192016-09-09 11:52:26 -07003391 } else {
3392 for (List<RequestedBufferInfo>::iterator j = i->buffers.begin();
3393 j != i->buffers.end(); j++) {
3394 if (j->stream == buffer->stream) {
3395 if (j->buffer != NULL) {
3396 LOGE("Error: buffer is already set");
3397 } else {
3398 j->buffer = (camera3_stream_buffer_t *)malloc(
3399 sizeof(camera3_stream_buffer_t));
3400 *(j->buffer) = *buffer;
3401 LOGH("cache buffer %p at result frame_number %u",
3402 buffer->buffer, frame_number);
3403 }
3404 }
3405 }
3406 }
3407 }
3408}
3409
3410/*===========================================================================
3411 * FUNCTION : unblockRequestIfNecessary
3412 *
3413 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
3414 * that mMutex is held when this function is called.
3415 *
3416 * PARAMETERS :
3417 *
3418 * RETURN :
3419 *
3420 *==========================================================================*/
3421void QCamera3HardwareInterface::unblockRequestIfNecessary()
3422{
3423 // Unblock process_capture_request
3424 pthread_cond_signal(&mRequestCond);
3425}
3426
3427
3428/*===========================================================================
3429 * FUNCTION : processCaptureRequest
3430 *
3431 * DESCRIPTION: process a capture request from camera service
3432 *
3433 * PARAMETERS :
3434 * @request : request from framework to process
3435 *
3436 * RETURN :
3437 *
3438 *==========================================================================*/
3439int QCamera3HardwareInterface::processCaptureRequest(
3440 camera3_capture_request_t *request)
3441{
3442 ATRACE_CALL();
3443 int rc = NO_ERROR;
3444 int32_t request_id;
3445 CameraMetadata meta;
Thierry Strudel3d639192016-09-09 11:52:26 -07003446 bool isVidBufRequested = false;
3447 camera3_stream_buffer_t *pInputBuffer = NULL;
3448
3449 pthread_mutex_lock(&mMutex);
3450
3451 // Validate current state
3452 switch (mState) {
3453 case CONFIGURED:
3454 case STARTED:
3455 /* valid state */
3456 break;
3457
3458 case ERROR:
3459 pthread_mutex_unlock(&mMutex);
3460 handleCameraDeviceError();
3461 return -ENODEV;
3462
3463 default:
3464 LOGE("Invalid state %d", mState);
3465 pthread_mutex_unlock(&mMutex);
3466 return -ENODEV;
3467 }
3468
3469 rc = validateCaptureRequest(request);
3470 if (rc != NO_ERROR) {
3471 LOGE("incoming request is not valid");
3472 pthread_mutex_unlock(&mMutex);
3473 return rc;
3474 }
3475
3476 meta = request->settings;
3477
3478 // For first capture request, send capture intent, and
3479 // stream on all streams
3480 if (mState == CONFIGURED) {
3481 // send an unconfigure to the backend so that the isp
3482 // resources are deallocated
3483 if (!mFirstConfiguration) {
3484 cam_stream_size_info_t stream_config_info;
3485 int32_t hal_version = CAM_HAL_V3;
3486 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
3487 stream_config_info.buffer_info.min_buffers =
3488 MIN_INFLIGHT_REQUESTS;
3489 stream_config_info.buffer_info.max_buffers =
3490 m_bIs4KVideo ? 0 : MAX_INFLIGHT_REQUESTS;
3491 clear_metadata_buffer(mParameters);
3492 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3493 CAM_INTF_PARM_HAL_VERSION, hal_version);
3494 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3495 CAM_INTF_META_STREAM_INFO, stream_config_info);
3496 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
3497 mParameters);
3498 if (rc < 0) {
3499 LOGE("set_parms for unconfigure failed");
3500 pthread_mutex_unlock(&mMutex);
3501 return rc;
3502 }
3503 }
3504 m_perfLock.lock_acq();
3505 /* get eis information for stream configuration */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003506 cam_is_type_t isTypeVideo, isTypePreview, is_type=IS_TYPE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07003507 char is_type_value[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003508 property_get("persist.camera.is_type", is_type_value, "4");
3509 isTypeVideo = static_cast<cam_is_type_t>(atoi(is_type_value));
3510 // Make default value for preview IS_TYPE as IS_TYPE_EIS_2_0
3511 property_get("persist.camera.is_type_preview", is_type_value, "4");
3512 isTypePreview = static_cast<cam_is_type_t>(atoi(is_type_value));
3513 LOGD("isTypeVideo: %d isTypePreview: %d", isTypeVideo, isTypePreview);
Thierry Strudel3d639192016-09-09 11:52:26 -07003514
3515 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
3516 int32_t hal_version = CAM_HAL_V3;
3517 uint8_t captureIntent =
3518 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
3519 mCaptureIntent = captureIntent;
3520 clear_metadata_buffer(mParameters);
3521 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
3522 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
3523 }
3524
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003525 uint8_t fwkVideoStabMode=0;
3526 if (meta.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
3527 fwkVideoStabMode = meta.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
3528 }
3529
3530 // If EIS setprop is enabled & if first capture setting has EIS enabled then only
3531 // turn it on for video/preview
3532 bool setEis = m_bEisEnable && fwkVideoStabMode && m_bEisSupportedSize &&
3533 (isTypeVideo >= IS_TYPE_EIS_2_0);
Thierry Strudel3d639192016-09-09 11:52:26 -07003534 int32_t vsMode;
3535 vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
3536 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
3537 rc = BAD_VALUE;
3538 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003539 LOGD("setEis %d", setEis);
3540 bool eis3Supported = false;
3541 size_t count = IS_TYPE_MAX;
3542 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
3543 for (size_t i = 0; i < count; i++) {
3544 if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0) {
3545 eis3Supported = true;
3546 break;
3547 }
3548 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003549
3550 //IS type will be 0 unless EIS is supported. If EIS is supported
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003551 //it could either be 4 or 5 depending on the stream and video size
Thierry Strudel3d639192016-09-09 11:52:26 -07003552 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
3553 if (setEis) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003554 if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_PREVIEW) {
3555 is_type = isTypePreview;
3556 } else if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_VIDEO ) {
3557 if ( (isTypeVideo == IS_TYPE_EIS_3_0) && (eis3Supported == FALSE) ) {
3558 LOGW(" EIS_3.0 is not supported and so setting EIS_2.0");
Thierry Strudel3d639192016-09-09 11:52:26 -07003559 is_type = IS_TYPE_EIS_2_0;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003560 } else {
3561 is_type = isTypeVideo;
Thierry Strudel3d639192016-09-09 11:52:26 -07003562 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003563 } else {
3564 is_type = IS_TYPE_NONE;
3565 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003566 mStreamConfigInfo.is_type[i] = is_type;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003567 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07003568 mStreamConfigInfo.is_type[i] = IS_TYPE_NONE;
3569 }
3570 }
3571
3572 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3573 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
3574
3575 int32_t tintless_value = 1;
3576 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3577 CAM_INTF_PARM_TINTLESS, tintless_value);
3578 //Disable CDS for HFR mode or if DIS/EIS is on.
3579 //CDS is a session parameter in the backend/ISP, so need to be set/reset
3580 //after every configure_stream
3581 if ((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
3582 (m_bIsVideo)) {
3583 int32_t cds = CAM_CDS_MODE_OFF;
3584 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
3585 CAM_INTF_PARM_CDS_MODE, cds))
3586 LOGE("Failed to disable CDS for HFR mode");
3587
3588 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003589
3590 if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
3591 uint8_t* use_av_timer = NULL;
3592
3593 if (m_debug_avtimer){
3594 use_av_timer = &m_debug_avtimer;
3595 }
3596 else{
3597 use_av_timer =
3598 meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
3599 }
3600
3601 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
3602 rc = BAD_VALUE;
3603 }
3604 }
3605
Thierry Strudel3d639192016-09-09 11:52:26 -07003606 setMobicat();
3607
3608 /* Set fps and hfr mode while sending meta stream info so that sensor
3609 * can configure appropriate streaming mode */
3610 mHFRVideoFps = DEFAULT_VIDEO_FPS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003611 mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
3612 mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07003613 if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
3614 rc = setHalFpsRange(meta, mParameters);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003615 if (rc == NO_ERROR) {
3616 int32_t max_fps =
3617 (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
3618 if (max_fps == 60) {
3619 mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
3620 }
3621 /* For HFR, more buffers are dequeued upfront to improve the performance */
3622 if (mBatchSize) {
3623 mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
3624 mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
3625 }
3626 }
3627 else {
Thierry Strudel3d639192016-09-09 11:52:26 -07003628 LOGE("setHalFpsRange failed");
3629 }
3630 }
3631 if (meta.exists(ANDROID_CONTROL_MODE)) {
3632 uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
3633 rc = extractSceneMode(meta, metaMode, mParameters);
3634 if (rc != NO_ERROR) {
3635 LOGE("extractSceneMode failed");
3636 }
3637 }
3638
Thierry Strudel04e026f2016-10-10 11:27:36 -07003639 if (meta.exists(QCAMERA3_VIDEO_HDR_MODE)) {
3640 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
3641 meta.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
3642 rc = setVideoHdrMode(mParameters, vhdr);
3643 if (rc != NO_ERROR) {
3644 LOGE("setVideoHDR is failed");
3645 }
3646 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003647
Thierry Strudel3d639192016-09-09 11:52:26 -07003648 //TODO: validate the arguments, HSV scenemode should have only the
3649 //advertised fps ranges
3650
3651 /*set the capture intent, hal version, tintless, stream info,
3652 *and disenable parameters to the backend*/
3653 LOGD("set_parms META_STREAM_INFO " );
3654 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
3655 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x "
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003656 "Format:%d is_type: %d",
Thierry Strudel3d639192016-09-09 11:52:26 -07003657 mStreamConfigInfo.type[i],
3658 mStreamConfigInfo.stream_sizes[i].width,
3659 mStreamConfigInfo.stream_sizes[i].height,
3660 mStreamConfigInfo.postprocess_mask[i],
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003661 mStreamConfigInfo.format[i],
3662 mStreamConfigInfo.is_type[i]);
Thierry Strudel3d639192016-09-09 11:52:26 -07003663 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003664
Thierry Strudel3d639192016-09-09 11:52:26 -07003665 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
3666 mParameters);
3667 if (rc < 0) {
3668 LOGE("set_parms failed for hal version, stream info");
3669 }
3670
3671 cam_dimension_t sensor_dim;
3672 memset(&sensor_dim, 0, sizeof(sensor_dim));
3673 rc = getSensorOutputSize(sensor_dim);
3674 if (rc != NO_ERROR) {
3675 LOGE("Failed to get sensor output size");
3676 pthread_mutex_unlock(&mMutex);
3677 goto error_exit;
3678 }
3679
3680 mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
3681 gCamCapability[mCameraId]->active_array_size.height,
3682 sensor_dim.width, sensor_dim.height);
3683
3684 /* Set batchmode before initializing channel. Since registerBuffer
3685 * internally initializes some of the channels, better set batchmode
3686 * even before first register buffer */
3687 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3688 it != mStreamInfo.end(); it++) {
3689 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
3690 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
3691 && mBatchSize) {
3692 rc = channel->setBatchSize(mBatchSize);
3693 //Disable per frame map unmap for HFR/batchmode case
3694 rc |= channel->setPerFrameMapUnmap(false);
3695 if (NO_ERROR != rc) {
3696 LOGE("Channel init failed %d", rc);
3697 pthread_mutex_unlock(&mMutex);
3698 goto error_exit;
3699 }
3700 }
3701 }
3702
3703 //First initialize all streams
3704 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3705 it != mStreamInfo.end(); it++) {
3706 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
3707 if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
3708 ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003709 setEis) {
3710 for (size_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
3711 if ( (1U << mStreamConfigInfo.type[i]) == channel->getStreamTypeMask() ) {
3712 is_type = mStreamConfigInfo.is_type[i];
3713 break;
3714 }
3715 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003716 rc = channel->initialize(is_type);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003717 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07003718 rc = channel->initialize(IS_TYPE_NONE);
3719 }
3720 if (NO_ERROR != rc) {
3721 LOGE("Channel initialization failed %d", rc);
3722 pthread_mutex_unlock(&mMutex);
3723 goto error_exit;
3724 }
3725 }
3726
3727 if (mRawDumpChannel) {
3728 rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
3729 if (rc != NO_ERROR) {
3730 LOGE("Error: Raw Dump Channel init failed");
3731 pthread_mutex_unlock(&mMutex);
3732 goto error_exit;
3733 }
3734 }
3735 if (mSupportChannel) {
3736 rc = mSupportChannel->initialize(IS_TYPE_NONE);
3737 if (rc < 0) {
3738 LOGE("Support channel initialization failed");
3739 pthread_mutex_unlock(&mMutex);
3740 goto error_exit;
3741 }
3742 }
3743 if (mAnalysisChannel) {
3744 rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
3745 if (rc < 0) {
3746 LOGE("Analysis channel initialization failed");
3747 pthread_mutex_unlock(&mMutex);
3748 goto error_exit;
3749 }
3750 }
3751 if (mDummyBatchChannel) {
3752 rc = mDummyBatchChannel->setBatchSize(mBatchSize);
3753 if (rc < 0) {
3754 LOGE("mDummyBatchChannel setBatchSize failed");
3755 pthread_mutex_unlock(&mMutex);
3756 goto error_exit;
3757 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003758 rc = mDummyBatchChannel->initialize(IS_TYPE_NONE);
Thierry Strudel3d639192016-09-09 11:52:26 -07003759 if (rc < 0) {
3760 LOGE("mDummyBatchChannel initialization failed");
3761 pthread_mutex_unlock(&mMutex);
3762 goto error_exit;
3763 }
3764 }
3765
3766 // Set bundle info
3767 rc = setBundleInfo();
3768 if (rc < 0) {
3769 LOGE("setBundleInfo failed %d", rc);
3770 pthread_mutex_unlock(&mMutex);
3771 goto error_exit;
3772 }
3773
3774 //update settings from app here
3775 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
3776 mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
3777 LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
3778 }
3779 if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
3780 mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
3781 LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
3782 }
3783 if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
3784 mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
3785 LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
3786
3787 if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
3788 (mLinkedCameraId != mCameraId) ) {
3789 LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
3790 mLinkedCameraId, mCameraId);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003791 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07003792 goto error_exit;
3793 }
3794 }
3795
3796 // add bundle related cameras
3797 LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
3798 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
3799 if (mIsDeviceLinked)
3800 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
3801 else
3802 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
3803
3804 pthread_mutex_lock(&gCamLock);
3805
3806 if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
3807 LOGE("Dualcam: Invalid Session Id ");
3808 pthread_mutex_unlock(&gCamLock);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003809 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07003810 goto error_exit;
3811 }
3812
3813 if (mIsMainCamera == 1) {
3814 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
3815 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
3816 // related session id should be session id of linked session
3817 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
3818 } else {
3819 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
3820 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
3821 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
3822 }
3823 pthread_mutex_unlock(&gCamLock);
3824
3825 rc = mCameraHandle->ops->sync_related_sensors(
3826 mCameraHandle->camera_handle, m_pRelCamSyncBuf);
3827 if (rc < 0) {
3828 LOGE("Dualcam: link failed");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003829 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07003830 goto error_exit;
3831 }
3832 }
3833
3834 //Then start them.
3835 LOGH("Start META Channel");
3836 rc = mMetadataChannel->start();
3837 if (rc < 0) {
3838 LOGE("META channel start failed");
3839 pthread_mutex_unlock(&mMutex);
3840 goto error_exit;
3841 }
3842
3843 if (mAnalysisChannel) {
3844 rc = mAnalysisChannel->start();
3845 if (rc < 0) {
3846 LOGE("Analysis channel start failed");
3847 mMetadataChannel->stop();
3848 pthread_mutex_unlock(&mMutex);
3849 goto error_exit;
3850 }
3851 }
3852
3853 if (mSupportChannel) {
3854 rc = mSupportChannel->start();
3855 if (rc < 0) {
3856 LOGE("Support channel start failed");
3857 mMetadataChannel->stop();
3858 /* Although support and analysis are mutually exclusive today
3859 adding it in anycase for future proofing */
3860 if (mAnalysisChannel) {
3861 mAnalysisChannel->stop();
3862 }
3863 pthread_mutex_unlock(&mMutex);
3864 goto error_exit;
3865 }
3866 }
3867 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3868 it != mStreamInfo.end(); it++) {
3869 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
3870 LOGH("Start Processing Channel mask=%d",
3871 channel->getStreamTypeMask());
3872 rc = channel->start();
3873 if (rc < 0) {
3874 LOGE("channel start failed");
3875 pthread_mutex_unlock(&mMutex);
3876 goto error_exit;
3877 }
3878 }
3879
3880 if (mRawDumpChannel) {
3881 LOGD("Starting raw dump stream");
3882 rc = mRawDumpChannel->start();
3883 if (rc != NO_ERROR) {
3884 LOGE("Error Starting Raw Dump Channel");
3885 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
3886 it != mStreamInfo.end(); it++) {
3887 QCamera3Channel *channel =
3888 (QCamera3Channel *)(*it)->stream->priv;
3889 LOGH("Stopping Processing Channel mask=%d",
3890 channel->getStreamTypeMask());
3891 channel->stop();
3892 }
3893 if (mSupportChannel)
3894 mSupportChannel->stop();
3895 if (mAnalysisChannel) {
3896 mAnalysisChannel->stop();
3897 }
3898 mMetadataChannel->stop();
3899 pthread_mutex_unlock(&mMutex);
3900 goto error_exit;
3901 }
3902 }
3903
3904 if (mChannelHandle) {
3905
3906 rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
3907 mChannelHandle);
3908 if (rc != NO_ERROR) {
3909 LOGE("start_channel failed %d", rc);
3910 pthread_mutex_unlock(&mMutex);
3911 goto error_exit;
3912 }
3913 }
3914
3915 goto no_error;
3916error_exit:
3917 m_perfLock.lock_rel();
3918 return rc;
3919no_error:
3920 m_perfLock.lock_rel();
3921
3922 mWokenUpByDaemon = false;
3923 mPendingLiveRequest = 0;
3924 mFirstConfiguration = false;
3925 enablePowerHint();
3926 }
3927
3928 uint32_t frameNumber = request->frame_number;
3929 cam_stream_ID_t streamID;
3930
3931 if (mFlushPerf) {
3932 //we cannot accept any requests during flush
3933 LOGE("process_capture_request cannot proceed during flush");
3934 pthread_mutex_unlock(&mMutex);
3935 return NO_ERROR; //should return an error
3936 }
3937
3938 if (meta.exists(ANDROID_REQUEST_ID)) {
3939 request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
3940 mCurrentRequestId = request_id;
3941 LOGD("Received request with id: %d", request_id);
3942 } else if (mState == CONFIGURED || mCurrentRequestId == -1){
3943 LOGE("Unable to find request id field, \
3944 & no previous id available");
3945 pthread_mutex_unlock(&mMutex);
3946 return NAME_NOT_FOUND;
3947 } else {
3948 LOGD("Re-using old request id");
3949 request_id = mCurrentRequestId;
3950 }
3951
3952 LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
3953 request->num_output_buffers,
3954 request->input_buffer,
3955 frameNumber);
3956 // Acquire all request buffers first
3957 streamID.num_streams = 0;
3958 int blob_request = 0;
3959 uint32_t snapshotStreamId = 0;
3960 for (size_t i = 0; i < request->num_output_buffers; i++) {
3961 const camera3_stream_buffer_t& output = request->output_buffers[i];
3962 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
3963
3964 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
3965 //Call function to store local copy of jpeg data for encode params.
3966 blob_request = 1;
3967 snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
3968 }
3969
3970 if (output.acquire_fence != -1) {
3971 rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
3972 close(output.acquire_fence);
3973 if (rc != OK) {
3974 LOGE("sync wait failed %d", rc);
3975 pthread_mutex_unlock(&mMutex);
3976 return rc;
3977 }
3978 }
3979
3980 streamID.streamID[streamID.num_streams] =
3981 channel->getStreamID(channel->getStreamTypeMask());
3982 streamID.num_streams++;
3983
3984 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
3985 isVidBufRequested = true;
3986 }
3987 }
3988
3989 if (blob_request) {
3990 KPI_ATRACE_INT("SNAPSHOT", 1);
3991 }
3992 if (blob_request && mRawDumpChannel) {
3993 LOGD("Trigger Raw based on blob request if Raw dump is enabled");
3994 streamID.streamID[streamID.num_streams] =
3995 mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
3996 streamID.num_streams++;
3997 }
3998
3999 if(request->input_buffer == NULL) {
4000 /* Parse the settings:
4001 * - For every request in NORMAL MODE
4002 * - For every request in HFR mode during preview only case
4003 * - For first request of every batch in HFR mode during video
4004 * recording. In batchmode the same settings except frame number is
4005 * repeated in each request of the batch.
4006 */
4007 if (!mBatchSize ||
4008 (mBatchSize && !isVidBufRequested) ||
4009 (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
4010 rc = setFrameParameters(request, streamID, blob_request, snapshotStreamId);
4011 if (rc < 0) {
4012 LOGE("fail to set frame parameters");
4013 pthread_mutex_unlock(&mMutex);
4014 return rc;
4015 }
4016 }
4017 /* For batchMode HFR, setFrameParameters is not called for every
4018 * request. But only frame number of the latest request is parsed.
4019 * Keep track of first and last frame numbers in a batch so that
4020 * metadata for the frame numbers of batch can be duplicated in
4021 * handleBatchMetadta */
4022 if (mBatchSize) {
4023 if (!mToBeQueuedVidBufs) {
4024 //start of the batch
4025 mFirstFrameNumberInBatch = request->frame_number;
4026 }
4027 if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4028 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
4029 LOGE("Failed to set the frame number in the parameters");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004030 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07004031 return BAD_VALUE;
4032 }
4033 }
4034 if (mNeedSensorRestart) {
4035 /* Unlock the mutex as restartSensor waits on the channels to be
4036 * stopped, which in turn calls stream callback functions -
4037 * handleBufferWithLock and handleMetadataWithLock */
4038 pthread_mutex_unlock(&mMutex);
4039 rc = dynamicUpdateMetaStreamInfo();
4040 if (rc != NO_ERROR) {
4041 LOGE("Restarting the sensor failed");
4042 return BAD_VALUE;
4043 }
4044 mNeedSensorRestart = false;
4045 pthread_mutex_lock(&mMutex);
4046 }
4047 } else {
4048
4049 if (request->input_buffer->acquire_fence != -1) {
4050 rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
4051 close(request->input_buffer->acquire_fence);
4052 if (rc != OK) {
4053 LOGE("input buffer sync wait failed %d", rc);
4054 pthread_mutex_unlock(&mMutex);
4055 return rc;
4056 }
4057 }
4058 }
4059
4060 if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
4061 mLastCustIntentFrmNum = frameNumber;
4062 }
4063 /* Update pending request list and pending buffers map */
4064 PendingRequestInfo pendingRequest;
4065 pendingRequestIterator latestRequest;
4066 pendingRequest.frame_number = frameNumber;
4067 pendingRequest.num_buffers = request->num_output_buffers;
4068 pendingRequest.request_id = request_id;
4069 pendingRequest.blob_request = blob_request;
4070 pendingRequest.timestamp = 0;
4071 pendingRequest.bUrgentReceived = 0;
4072 if (request->input_buffer) {
4073 pendingRequest.input_buffer =
4074 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
4075 *(pendingRequest.input_buffer) = *(request->input_buffer);
4076 pInputBuffer = pendingRequest.input_buffer;
4077 } else {
4078 pendingRequest.input_buffer = NULL;
4079 pInputBuffer = NULL;
4080 }
4081
4082 pendingRequest.pipeline_depth = 0;
4083 pendingRequest.partial_result_cnt = 0;
4084 extractJpegMetadata(mCurJpegMeta, request);
4085 pendingRequest.jpegMetadata = mCurJpegMeta;
4086 pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
4087 pendingRequest.shutter_notified = false;
4088
4089 //extract capture intent
4090 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4091 mCaptureIntent =
4092 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4093 }
4094 pendingRequest.capture_intent = mCaptureIntent;
4095
4096 //extract CAC info
4097 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
4098 mCacMode =
4099 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
4100 }
4101 pendingRequest.fwkCacMode = mCacMode;
4102
4103 PendingBuffersInRequest bufsForCurRequest;
4104 bufsForCurRequest.frame_number = frameNumber;
4105 // Mark current timestamp for the new request
4106 bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
4107
4108 for (size_t i = 0; i < request->num_output_buffers; i++) {
4109 RequestedBufferInfo requestedBuf;
4110 memset(&requestedBuf, 0, sizeof(requestedBuf));
4111 requestedBuf.stream = request->output_buffers[i].stream;
4112 requestedBuf.buffer = NULL;
4113 pendingRequest.buffers.push_back(requestedBuf);
4114
4115 // Add to buffer handle the pending buffers list
4116 PendingBufferInfo bufferInfo;
4117 bufferInfo.buffer = request->output_buffers[i].buffer;
4118 bufferInfo.stream = request->output_buffers[i].stream;
4119 bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
4120 QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
4121 LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
4122 frameNumber, bufferInfo.buffer,
4123 channel->getStreamTypeMask(), bufferInfo.stream->format);
4124 }
4125 // Add this request packet into mPendingBuffersMap
4126 mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
4127 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
4128 mPendingBuffersMap.get_num_overall_buffers());
4129
4130 latestRequest = mPendingRequestsList.insert(
4131 mPendingRequestsList.end(), pendingRequest);
4132 if(mFlush) {
4133 LOGI("mFlush is true");
4134 pthread_mutex_unlock(&mMutex);
4135 return NO_ERROR;
4136 }
4137
4138 // Notify metadata channel we receive a request
4139 mMetadataChannel->request(NULL, frameNumber);
4140
4141 if(request->input_buffer != NULL){
4142 LOGD("Input request, frame_number %d", frameNumber);
4143 rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
4144 if (NO_ERROR != rc) {
4145 LOGE("fail to set reproc parameters");
4146 pthread_mutex_unlock(&mMutex);
4147 return rc;
4148 }
4149 }
4150
4151 // Call request on other streams
4152 uint32_t streams_need_metadata = 0;
4153 pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
4154 for (size_t i = 0; i < request->num_output_buffers; i++) {
4155 const camera3_stream_buffer_t& output = request->output_buffers[i];
4156 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
4157
4158 if (channel == NULL) {
4159 LOGW("invalid channel pointer for stream");
4160 continue;
4161 }
4162
4163 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
4164 LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
4165 output.buffer, request->input_buffer, frameNumber);
4166 if(request->input_buffer != NULL){
4167 rc = channel->request(output.buffer, frameNumber,
4168 pInputBuffer, &mReprocMeta);
4169 if (rc < 0) {
4170 LOGE("Fail to request on picture channel");
4171 pthread_mutex_unlock(&mMutex);
4172 return rc;
4173 }
4174 } else {
4175 LOGD("snapshot request with buffer %p, frame_number %d",
4176 output.buffer, frameNumber);
4177 if (!request->settings) {
4178 rc = channel->request(output.buffer, frameNumber,
4179 NULL, mPrevParameters);
4180 } else {
4181 rc = channel->request(output.buffer, frameNumber,
4182 NULL, mParameters);
4183 }
4184 if (rc < 0) {
4185 LOGE("Fail to request on picture channel");
4186 pthread_mutex_unlock(&mMutex);
4187 return rc;
4188 }
4189 pendingBufferIter->need_metadata = true;
4190 streams_need_metadata++;
4191 }
4192 } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
4193 bool needMetadata = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07004194 QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
4195 rc = yuvChannel->request(output.buffer, frameNumber,
4196 pInputBuffer,
4197 (pInputBuffer ? &mReprocMeta : mParameters), needMetadata);
4198 if (rc < 0) {
4199 LOGE("Fail to request on YUV channel");
4200 pthread_mutex_unlock(&mMutex);
4201 return rc;
4202 }
4203 pendingBufferIter->need_metadata = needMetadata;
4204 if (needMetadata)
4205 streams_need_metadata += 1;
4206 LOGD("calling YUV channel request, need_metadata is %d",
4207 needMetadata);
4208 } else {
4209 LOGD("request with buffer %p, frame_number %d",
4210 output.buffer, frameNumber);
Thierry Strudel3d639192016-09-09 11:52:26 -07004211 rc = channel->request(output.buffer, frameNumber);
4212 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
4213 && mBatchSize) {
4214 mToBeQueuedVidBufs++;
4215 if (mToBeQueuedVidBufs == mBatchSize) {
4216 channel->queueBatchBuf();
4217 }
4218 }
4219 if (rc < 0) {
4220 LOGE("request failed");
4221 pthread_mutex_unlock(&mMutex);
4222 return rc;
4223 }
4224 }
4225 pendingBufferIter++;
4226 }
4227
4228 //If 2 streams have need_metadata set to true, fail the request, unless
4229 //we copy/reference count the metadata buffer
4230 if (streams_need_metadata > 1) {
4231 LOGE("not supporting request in which two streams requires"
4232 " 2 HAL metadata for reprocessing");
4233 pthread_mutex_unlock(&mMutex);
4234 return -EINVAL;
4235 }
4236
4237 if(request->input_buffer == NULL) {
4238 /* Set the parameters to backend:
4239 * - For every request in NORMAL MODE
4240 * - For every request in HFR mode during preview only case
4241 * - Once every batch in HFR mode during video recording
4242 */
4243 if (!mBatchSize ||
4244 (mBatchSize && !isVidBufRequested) ||
4245 (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
4246 LOGD("set_parms batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
4247 mBatchSize, isVidBufRequested,
4248 mToBeQueuedVidBufs);
4249 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4250 mParameters);
4251 if (rc < 0) {
4252 LOGE("set_parms failed");
4253 }
4254 /* reset to zero coz, the batch is queued */
4255 mToBeQueuedVidBufs = 0;
4256 mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
4257 }
4258 mPendingLiveRequest++;
4259 }
4260
4261 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
4262
4263 mState = STARTED;
4264 // Added a timed condition wait
4265 struct timespec ts;
4266 uint8_t isValidTimeout = 1;
4267 rc = clock_gettime(CLOCK_REALTIME, &ts);
4268 if (rc < 0) {
4269 isValidTimeout = 0;
4270 LOGE("Error reading the real time clock!!");
4271 }
4272 else {
4273 // Make timeout as 5 sec for request to be honored
4274 ts.tv_sec += 5;
4275 }
4276 //Block on conditional variable
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004277 while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
Thierry Strudel3d639192016-09-09 11:52:26 -07004278 (mState != ERROR) && (mState != DEINIT)) {
4279 if (!isValidTimeout) {
4280 LOGD("Blocking on conditional wait");
4281 pthread_cond_wait(&mRequestCond, &mMutex);
4282 }
4283 else {
4284 LOGD("Blocking on timed conditional wait");
4285 rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
4286 if (rc == ETIMEDOUT) {
4287 rc = -ENODEV;
4288 LOGE("Unblocked on timeout!!!!");
4289 break;
4290 }
4291 }
4292 LOGD("Unblocked");
4293 if (mWokenUpByDaemon) {
4294 mWokenUpByDaemon = false;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004295 if (mPendingLiveRequest < mMaxInFlightRequests)
Thierry Strudel3d639192016-09-09 11:52:26 -07004296 break;
4297 }
4298 }
4299 pthread_mutex_unlock(&mMutex);
4300
4301 return rc;
4302}
4303
4304/*===========================================================================
4305 * FUNCTION : dump
4306 *
4307 * DESCRIPTION:
4308 *
4309 * PARAMETERS :
4310 *
4311 *
4312 * RETURN :
4313 *==========================================================================*/
4314void QCamera3HardwareInterface::dump(int fd)
4315{
4316 pthread_mutex_lock(&mMutex);
4317 dprintf(fd, "\n Camera HAL3 information Begin \n");
4318
4319 dprintf(fd, "\nNumber of pending requests: %zu \n",
4320 mPendingRequestsList.size());
4321 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
4322 dprintf(fd, " Frame | Number of Buffers | Req Id: | Blob Req | Input buffer present\n");
4323 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
4324 for(pendingRequestIterator i = mPendingRequestsList.begin();
4325 i != mPendingRequestsList.end(); i++) {
4326 dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
4327 i->frame_number, i->num_buffers, i->request_id, i->blob_request,
4328 i->input_buffer);
4329 }
4330 dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
4331 mPendingBuffersMap.get_num_overall_buffers());
4332 dprintf(fd, "-------+------------------\n");
4333 dprintf(fd, " Frame | Stream type mask \n");
4334 dprintf(fd, "-------+------------------\n");
4335 for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
4336 for(auto &j : req.mPendingBufferList) {
4337 QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
4338 dprintf(fd, " %5d | %11d \n",
4339 req.frame_number, channel->getStreamTypeMask());
4340 }
4341 }
4342 dprintf(fd, "-------+------------------\n");
4343
4344 dprintf(fd, "\nPending frame drop list: %zu\n",
4345 mPendingFrameDropList.size());
4346 dprintf(fd, "-------+-----------\n");
4347 dprintf(fd, " Frame | Stream ID \n");
4348 dprintf(fd, "-------+-----------\n");
4349 for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
4350 i != mPendingFrameDropList.end(); i++) {
4351 dprintf(fd, " %5d | %9d \n",
4352 i->frame_number, i->stream_ID);
4353 }
4354 dprintf(fd, "-------+-----------\n");
4355
4356 dprintf(fd, "\n Camera HAL3 information End \n");
4357
4358 /* use dumpsys media.camera as trigger to send update debug level event */
4359 mUpdateDebugLevel = true;
4360 pthread_mutex_unlock(&mMutex);
4361 return;
4362}
4363
4364/*===========================================================================
4365 * FUNCTION : flush
4366 *
4367 * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
4368 * conditionally restarts channels
4369 *
4370 * PARAMETERS :
4371 * @ restartChannels: re-start all channels
4372 *
4373 *
4374 * RETURN :
4375 * 0 on success
4376 * Error code on failure
4377 *==========================================================================*/
4378int QCamera3HardwareInterface::flush(bool restartChannels)
4379{
4380 KPI_ATRACE_CALL();
4381 int32_t rc = NO_ERROR;
4382
4383 LOGD("Unblocking Process Capture Request");
4384 pthread_mutex_lock(&mMutex);
4385 mFlush = true;
4386 pthread_mutex_unlock(&mMutex);
4387
4388 rc = stopAllChannels();
4389 // unlink of dualcam
4390 if (mIsDeviceLinked) {
4391 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
4392 pthread_mutex_lock(&gCamLock);
4393
4394 if (mIsMainCamera == 1) {
4395 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
4396 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
4397 // related session id should be session id of linked session
4398 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
4399 } else {
4400 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
4401 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
4402 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
4403 }
4404 pthread_mutex_unlock(&gCamLock);
4405
4406 rc = mCameraHandle->ops->sync_related_sensors(
4407 mCameraHandle->camera_handle, m_pRelCamSyncBuf);
4408 if (rc < 0) {
4409 LOGE("Dualcam: Unlink failed, but still proceed to close");
4410 }
4411 }
4412
4413 if (rc < 0) {
4414 LOGE("stopAllChannels failed");
4415 return rc;
4416 }
4417 if (mChannelHandle) {
4418 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
4419 mChannelHandle);
4420 }
4421
4422 // Reset bundle info
4423 rc = setBundleInfo();
4424 if (rc < 0) {
4425 LOGE("setBundleInfo failed %d", rc);
4426 return rc;
4427 }
4428
4429 // Mutex Lock
4430 pthread_mutex_lock(&mMutex);
4431
4432 // Unblock process_capture_request
4433 mPendingLiveRequest = 0;
4434 pthread_cond_signal(&mRequestCond);
4435
4436 rc = notifyErrorForPendingRequests();
4437 if (rc < 0) {
4438 LOGE("notifyErrorForPendingRequests failed");
4439 pthread_mutex_unlock(&mMutex);
4440 return rc;
4441 }
4442
4443 mFlush = false;
4444
4445 // Start the Streams/Channels
4446 if (restartChannels) {
4447 rc = startAllChannels();
4448 if (rc < 0) {
4449 LOGE("startAllChannels failed");
4450 pthread_mutex_unlock(&mMutex);
4451 return rc;
4452 }
4453 }
4454
4455 if (mChannelHandle) {
4456 mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
4457 mChannelHandle);
4458 if (rc < 0) {
4459 LOGE("start_channel failed");
4460 pthread_mutex_unlock(&mMutex);
4461 return rc;
4462 }
4463 }
4464
4465 pthread_mutex_unlock(&mMutex);
4466
4467 return 0;
4468}
4469
4470/*===========================================================================
4471 * FUNCTION : flushPerf
4472 *
4473 * DESCRIPTION: This is the performance optimization version of flush that does
4474 * not use stream off, rather flushes the system
4475 *
4476 * PARAMETERS :
4477 *
4478 *
4479 * RETURN : 0 : success
4480 * -EINVAL: input is malformed (device is not valid)
4481 * -ENODEV: if the device has encountered a serious error
4482 *==========================================================================*/
4483int QCamera3HardwareInterface::flushPerf()
4484{
4485 ATRACE_CALL();
4486 int32_t rc = 0;
4487 struct timespec timeout;
4488 bool timed_wait = false;
4489
4490 pthread_mutex_lock(&mMutex);
4491 mFlushPerf = true;
4492 mPendingBuffersMap.numPendingBufsAtFlush =
4493 mPendingBuffersMap.get_num_overall_buffers();
4494 LOGD("Calling flush. Wait for %d buffers to return",
4495 mPendingBuffersMap.numPendingBufsAtFlush);
4496
4497 /* send the flush event to the backend */
4498 rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
4499 if (rc < 0) {
4500 LOGE("Error in flush: IOCTL failure");
4501 mFlushPerf = false;
4502 pthread_mutex_unlock(&mMutex);
4503 return -ENODEV;
4504 }
4505
4506 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
4507 LOGD("No pending buffers in HAL, return flush");
4508 mFlushPerf = false;
4509 pthread_mutex_unlock(&mMutex);
4510 return rc;
4511 }
4512
4513 /* wait on a signal that buffers were received */
4514 rc = clock_gettime(CLOCK_REALTIME, &timeout);
4515 if (rc < 0) {
4516 LOGE("Error reading the real time clock, cannot use timed wait");
4517 } else {
4518 timeout.tv_sec += FLUSH_TIMEOUT;
4519 timed_wait = true;
4520 }
4521
4522 //Block on conditional variable
4523 while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
4524 LOGD("Waiting on mBuffersCond");
4525 if (!timed_wait) {
4526 rc = pthread_cond_wait(&mBuffersCond, &mMutex);
4527 if (rc != 0) {
4528 LOGE("pthread_cond_wait failed due to rc = %s",
4529 strerror(rc));
4530 break;
4531 }
4532 } else {
4533 rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
4534 if (rc != 0) {
4535 LOGE("pthread_cond_timedwait failed due to rc = %s",
4536 strerror(rc));
4537 break;
4538 }
4539 }
4540 }
4541 if (rc != 0) {
4542 mFlushPerf = false;
4543 pthread_mutex_unlock(&mMutex);
4544 return -ENODEV;
4545 }
4546
4547 LOGD("Received buffers, now safe to return them");
4548
4549 //make sure the channels handle flush
4550 //currently only required for the picture channel to release snapshot resources
4551 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4552 it != mStreamInfo.end(); it++) {
4553 QCamera3Channel *channel = (*it)->channel;
4554 if (channel) {
4555 rc = channel->flush();
4556 if (rc) {
4557 LOGE("Flushing the channels failed with error %d", rc);
4558 // even though the channel flush failed we need to continue and
4559 // return the buffers we have to the framework, however the return
4560 // value will be an error
4561 rc = -ENODEV;
4562 }
4563 }
4564 }
4565
4566 /* notify the frameworks and send errored results */
4567 rc = notifyErrorForPendingRequests();
4568 if (rc < 0) {
4569 LOGE("notifyErrorForPendingRequests failed");
4570 pthread_mutex_unlock(&mMutex);
4571 return rc;
4572 }
4573
4574 //unblock process_capture_request
4575 mPendingLiveRequest = 0;
4576 unblockRequestIfNecessary();
4577
4578 mFlushPerf = false;
4579 pthread_mutex_unlock(&mMutex);
4580 LOGD ("Flush Operation complete. rc = %d", rc);
4581 return rc;
4582}
4583
4584/*===========================================================================
4585 * FUNCTION : handleCameraDeviceError
4586 *
4587 * DESCRIPTION: This function calls internal flush and notifies the error to
4588 * framework and updates the state variable.
4589 *
4590 * PARAMETERS : None
4591 *
4592 * RETURN : NO_ERROR on Success
4593 * Error code on failure
4594 *==========================================================================*/
4595int32_t QCamera3HardwareInterface::handleCameraDeviceError()
4596{
4597 int32_t rc = NO_ERROR;
4598
4599 pthread_mutex_lock(&mMutex);
4600 if (mState != ERROR) {
4601 //if mState != ERROR, nothing to be done
4602 pthread_mutex_unlock(&mMutex);
4603 return NO_ERROR;
4604 }
4605 pthread_mutex_unlock(&mMutex);
4606
4607 rc = flush(false /* restart channels */);
4608 if (NO_ERROR != rc) {
4609 LOGE("internal flush to handle mState = ERROR failed");
4610 }
4611
4612 pthread_mutex_lock(&mMutex);
4613 mState = DEINIT;
4614 pthread_mutex_unlock(&mMutex);
4615
4616 camera3_notify_msg_t notify_msg;
4617 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
4618 notify_msg.type = CAMERA3_MSG_ERROR;
4619 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
4620 notify_msg.message.error.error_stream = NULL;
4621 notify_msg.message.error.frame_number = 0;
4622 mCallbackOps->notify(mCallbackOps, &notify_msg);
4623
4624 return rc;
4625}
4626
4627/*===========================================================================
4628 * FUNCTION : captureResultCb
4629 *
4630 * DESCRIPTION: Callback handler for all capture result
4631 * (streams, as well as metadata)
4632 *
4633 * PARAMETERS :
4634 * @metadata : metadata information
4635 * @buffer : actual gralloc buffer to be returned to frameworks.
4636 * NULL if metadata.
4637 *
4638 * RETURN : NONE
4639 *==========================================================================*/
4640void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
4641 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
4642{
4643 if (metadata_buf) {
4644 if (mBatchSize) {
4645 handleBatchMetadata(metadata_buf,
4646 true /* free_and_bufdone_meta_buf */);
4647 } else { /* mBatchSize = 0 */
4648 hdrPlusPerfLock(metadata_buf);
4649 pthread_mutex_lock(&mMutex);
4650 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004651 true /* free_and_bufdone_meta_buf */,
4652 false /* first frame of batch metadata */ );
Thierry Strudel3d639192016-09-09 11:52:26 -07004653 pthread_mutex_unlock(&mMutex);
4654 }
4655 } else if (isInputBuffer) {
4656 pthread_mutex_lock(&mMutex);
4657 handleInputBufferWithLock(frame_number);
4658 pthread_mutex_unlock(&mMutex);
4659 } else {
4660 pthread_mutex_lock(&mMutex);
4661 handleBufferWithLock(buffer, frame_number);
4662 pthread_mutex_unlock(&mMutex);
4663 }
4664 return;
4665}
4666
4667/*===========================================================================
4668 * FUNCTION : getReprocessibleOutputStreamId
4669 *
4670 * DESCRIPTION: Get source output stream id for the input reprocess stream
4671 * based on size and format, which would be the largest
4672 * output stream if an input stream exists.
4673 *
4674 * PARAMETERS :
4675 * @id : return the stream id if found
4676 *
4677 * RETURN : int32_t type of status
4678 * NO_ERROR -- success
4679 * none-zero failure code
4680 *==========================================================================*/
4681int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
4682{
4683 /* check if any output or bidirectional stream with the same size and format
4684 and return that stream */
4685 if ((mInputStreamInfo.dim.width > 0) &&
4686 (mInputStreamInfo.dim.height > 0)) {
4687 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
4688 it != mStreamInfo.end(); it++) {
4689
4690 camera3_stream_t *stream = (*it)->stream;
4691 if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
4692 (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
4693 (stream->format == mInputStreamInfo.format)) {
4694 // Usage flag for an input stream and the source output stream
4695 // may be different.
4696 LOGD("Found reprocessible output stream! %p", *it);
4697 LOGD("input stream usage 0x%x, current stream usage 0x%x",
4698 stream->usage, mInputStreamInfo.usage);
4699
4700 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
4701 if (channel != NULL && channel->mStreams[0]) {
4702 id = channel->mStreams[0]->getMyServerID();
4703 return NO_ERROR;
4704 }
4705 }
4706 }
4707 } else {
4708 LOGD("No input stream, so no reprocessible output stream");
4709 }
4710 return NAME_NOT_FOUND;
4711}
4712
4713/*===========================================================================
4714 * FUNCTION : lookupFwkName
4715 *
4716 * DESCRIPTION: In case the enum is not same in fwk and backend
4717 * make sure the parameter is correctly propogated
4718 *
4719 * PARAMETERS :
4720 * @arr : map between the two enums
4721 * @len : len of the map
4722 * @hal_name : name of the hal_parm to map
4723 *
4724 * RETURN : int type of status
4725 * fwk_name -- success
4726 * none-zero failure code
4727 *==========================================================================*/
4728template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
4729 size_t len, halType hal_name)
4730{
4731
4732 for (size_t i = 0; i < len; i++) {
4733 if (arr[i].hal_name == hal_name) {
4734 return arr[i].fwk_name;
4735 }
4736 }
4737
4738 /* Not able to find matching framework type is not necessarily
4739 * an error case. This happens when mm-camera supports more attributes
4740 * than the frameworks do */
4741 LOGH("Cannot find matching framework type");
4742 return NAME_NOT_FOUND;
4743}
4744
4745/*===========================================================================
4746 * FUNCTION : lookupHalName
4747 *
4748 * DESCRIPTION: In case the enum is not same in fwk and backend
4749 * make sure the parameter is correctly propogated
4750 *
4751 * PARAMETERS :
4752 * @arr : map between the two enums
4753 * @len : len of the map
4754 * @fwk_name : name of the hal_parm to map
4755 *
4756 * RETURN : int32_t type of status
4757 * hal_name -- success
4758 * none-zero failure code
4759 *==========================================================================*/
4760template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
4761 size_t len, fwkType fwk_name)
4762{
4763 for (size_t i = 0; i < len; i++) {
4764 if (arr[i].fwk_name == fwk_name) {
4765 return arr[i].hal_name;
4766 }
4767 }
4768
4769 LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
4770 return NAME_NOT_FOUND;
4771}
4772
4773/*===========================================================================
4774 * FUNCTION : lookupProp
4775 *
4776 * DESCRIPTION: lookup a value by its name
4777 *
4778 * PARAMETERS :
4779 * @arr : map between the two enums
4780 * @len : size of the map
4781 * @name : name to be looked up
4782 *
4783 * RETURN : Value if found
4784 * CAM_CDS_MODE_MAX if not found
4785 *==========================================================================*/
4786template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
4787 size_t len, const char *name)
4788{
4789 if (name) {
4790 for (size_t i = 0; i < len; i++) {
4791 if (!strcmp(arr[i].desc, name)) {
4792 return arr[i].val;
4793 }
4794 }
4795 }
4796 return CAM_CDS_MODE_MAX;
4797}
4798
4799/*===========================================================================
4800 *
4801 * DESCRIPTION:
4802 *
4803 * PARAMETERS :
4804 * @metadata : metadata information from callback
4805 * @timestamp: metadata buffer timestamp
4806 * @request_id: request id
4807 * @jpegMetadata: additional jpeg metadata
4808 * @pprocDone: whether internal offline postprocsesing is done
4809 *
4810 * RETURN : camera_metadata_t*
4811 * metadata in a format specified by fwk
4812 *==========================================================================*/
4813camera_metadata_t*
4814QCamera3HardwareInterface::translateFromHalMetadata(
4815 metadata_buffer_t *metadata,
4816 nsecs_t timestamp,
4817 int32_t request_id,
4818 const CameraMetadata& jpegMetadata,
4819 uint8_t pipeline_depth,
4820 uint8_t capture_intent,
4821 bool pprocDone,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004822 uint8_t fwk_cacMode,
4823 bool firstMetadataInBatch)
Thierry Strudel3d639192016-09-09 11:52:26 -07004824{
4825 CameraMetadata camMetadata;
4826 camera_metadata_t *resultMetadata;
4827
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004828 if (mBatchSize && !firstMetadataInBatch) {
4829 /* In batch mode, use cached metadata from the first metadata
4830 in the batch */
4831 camMetadata.clear();
4832 camMetadata = mCachedMetadata;
4833 }
4834
Thierry Strudel3d639192016-09-09 11:52:26 -07004835 if (jpegMetadata.entryCount())
4836 camMetadata.append(jpegMetadata);
4837
4838 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &timestamp, 1);
4839 camMetadata.update(ANDROID_REQUEST_ID, &request_id, 1);
4840 camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pipeline_depth, 1);
4841 camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &capture_intent, 1);
4842
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004843 if (mBatchSize && !firstMetadataInBatch) {
4844 /* In batch mode, use cached metadata instead of parsing metadata buffer again */
4845 resultMetadata = camMetadata.release();
4846 return resultMetadata;
4847 }
4848
Thierry Strudel3d639192016-09-09 11:52:26 -07004849 IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
4850 int64_t fwk_frame_number = *frame_number;
4851 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
4852 }
4853
4854 IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
4855 int32_t fps_range[2];
4856 fps_range[0] = (int32_t)float_range->min_fps;
4857 fps_range[1] = (int32_t)float_range->max_fps;
4858 camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
4859 fps_range, 2);
4860 LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
4861 fps_range[0], fps_range[1]);
4862 }
4863
4864 IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
4865 camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
4866 }
4867
4868 IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
4869 int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
4870 METADATA_MAP_SIZE(SCENE_MODES_MAP),
4871 *sceneMode);
4872 if (NAME_NOT_FOUND != val) {
4873 uint8_t fwkSceneMode = (uint8_t)val;
4874 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
4875 LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
4876 fwkSceneMode);
4877 }
4878 }
4879
4880 IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
4881 uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
4882 camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
4883 }
4884
4885 IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
4886 uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
4887 camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
4888 }
4889
4890 IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
4891 uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
4892 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
4893 }
4894
4895 IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
4896 CAM_INTF_META_EDGE_MODE, metadata) {
4897 camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
4898 }
4899
4900 IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
4901 uint8_t fwk_flashPower = (uint8_t) *flashPower;
4902 camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
4903 }
4904
4905 IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
4906 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
4907 }
4908
4909 IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
4910 if (0 <= *flashState) {
4911 uint8_t fwk_flashState = (uint8_t) *flashState;
4912 if (!gCamCapability[mCameraId]->flash_available) {
4913 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
4914 }
4915 camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
4916 }
4917 }
4918
4919 IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
4920 int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
4921 if (NAME_NOT_FOUND != val) {
4922 uint8_t fwk_flashMode = (uint8_t)val;
4923 camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
4924 }
4925 }
4926
4927 IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
4928 uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
4929 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
4930 }
4931
4932 IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
4933 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
4934 }
4935
4936 IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
4937 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
4938 }
4939
4940 IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
4941 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
4942 }
4943
4944 IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
4945 uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
4946 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
4947 }
4948
4949 IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
4950 uint8_t fwk_videoStab = (uint8_t) *videoStab;
4951 LOGD("fwk_videoStab = %d", fwk_videoStab);
4952 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
4953 } else {
4954 // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
4955 // and so hardcoding the Video Stab result to OFF mode.
4956 uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
4957 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004958 LOGD("EIS result default to OFF mode");
Thierry Strudel3d639192016-09-09 11:52:26 -07004959 }
4960
4961 IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
4962 uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
4963 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
4964 }
4965
4966 IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
4967 camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
4968 }
4969
4970 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelSourcePattern,
4971 CAM_INTF_META_BLACK_LEVEL_SOURCE_PATTERN, metadata) {
4972
4973 LOGD("dynamicblackLevel = %f %f %f %f",
4974 blackLevelSourcePattern->cam_black_level[0],
4975 blackLevelSourcePattern->cam_black_level[1],
4976 blackLevelSourcePattern->cam_black_level[2],
4977 blackLevelSourcePattern->cam_black_level[3]);
4978 }
4979
4980 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
4981 CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
4982 float fwk_blackLevelInd[4];
4983
4984 fwk_blackLevelInd[0] = blackLevelAppliedPattern->cam_black_level[0];
4985 fwk_blackLevelInd[1] = blackLevelAppliedPattern->cam_black_level[1];
4986 fwk_blackLevelInd[2] = blackLevelAppliedPattern->cam_black_level[2];
4987 fwk_blackLevelInd[3] = blackLevelAppliedPattern->cam_black_level[3];
4988
4989 LOGD("applied dynamicblackLevel = %f %f %f %f",
4990 blackLevelAppliedPattern->cam_black_level[0],
4991 blackLevelAppliedPattern->cam_black_level[1],
4992 blackLevelAppliedPattern->cam_black_level[2],
4993 blackLevelAppliedPattern->cam_black_level[3]);
4994 camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd, 4);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004995
4996#ifndef USE_HAL_3_3
4997 // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
4998 // Need convert the internal 16 bit depth to sensor 10 bit sensor raw
4999 // depth space.
5000 fwk_blackLevelInd[0] /= 64.0;
5001 fwk_blackLevelInd[1] /= 64.0;
5002 fwk_blackLevelInd[2] /= 64.0;
5003 fwk_blackLevelInd[3] /= 64.0;
5004 camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd, 4);
5005#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07005006 }
5007
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005008#ifndef USE_HAL_3_3
5009 // Fixed whitelevel is used by ISP/Sensor
5010 camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
5011 &gCamCapability[mCameraId]->white_level, 1);
5012#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07005013
5014 IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
5015 CAM_INTF_META_SCALER_CROP_REGION, metadata) {
5016 int32_t scalerCropRegion[4];
5017 scalerCropRegion[0] = hScalerCropRegion->left;
5018 scalerCropRegion[1] = hScalerCropRegion->top;
5019 scalerCropRegion[2] = hScalerCropRegion->width;
5020 scalerCropRegion[3] = hScalerCropRegion->height;
5021
5022 // Adjust crop region from sensor output coordinate system to active
5023 // array coordinate system.
5024 mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
5025 scalerCropRegion[2], scalerCropRegion[3]);
5026
5027 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
5028 }
5029
5030 IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
5031 LOGD("sensorExpTime = %lld", *sensorExpTime);
5032 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
5033 }
5034
5035 IF_META_AVAILABLE(int64_t, sensorFameDuration,
5036 CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
5037 LOGD("sensorFameDuration = %lld", *sensorFameDuration);
5038 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
5039 }
5040
5041 IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
5042 CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
5043 LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
5044 camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
5045 sensorRollingShutterSkew, 1);
5046 }
5047
5048 IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
5049 LOGD("sensorSensitivity = %d", *sensorSensitivity);
5050 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
5051
5052 //calculate the noise profile based on sensitivity
5053 double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
5054 double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
5055 double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
5056 for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
5057 noise_profile[i] = noise_profile_S;
5058 noise_profile[i+1] = noise_profile_O;
5059 }
5060 LOGD("noise model entry (S, O) is (%f, %f)",
5061 noise_profile_S, noise_profile_O);
5062 camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
5063 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
5064 }
5065
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005066#ifndef USE_HAL_3_3
5067 IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
5068 int32_t fwk_ispSensitivity = (int32_t) *ispSensitivity;
5069 camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
5070 }
5071#endif
5072
Thierry Strudel3d639192016-09-09 11:52:26 -07005073 IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
5074 uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
5075 camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
5076 }
5077
5078 IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
5079 int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
5080 *faceDetectMode);
5081 if (NAME_NOT_FOUND != val) {
5082 uint8_t fwk_faceDetectMode = (uint8_t)val;
5083 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
5084
5085 if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
5086 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
5087 CAM_INTF_META_FACE_DETECTION, metadata) {
5088 uint8_t numFaces = MIN(
5089 faceDetectionInfo->num_faces_detected, MAX_ROI);
5090 int32_t faceIds[MAX_ROI];
5091 uint8_t faceScores[MAX_ROI];
5092 int32_t faceRectangles[MAX_ROI * 4];
5093 int32_t faceLandmarks[MAX_ROI * 6];
5094 size_t j = 0, k = 0;
5095
5096 for (size_t i = 0; i < numFaces; i++) {
5097 faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
5098 // Adjust crop region from sensor output coordinate system to active
5099 // array coordinate system.
5100 cam_rect_t& rect = faceDetectionInfo->faces[i].face_boundary;
5101 mCropRegionMapper.toActiveArray(rect.left, rect.top,
5102 rect.width, rect.height);
5103
5104 convertToRegions(faceDetectionInfo->faces[i].face_boundary,
5105 faceRectangles+j, -1);
5106
5107 j+= 4;
5108 }
5109 if (numFaces <= 0) {
5110 memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
5111 memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
5112 memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
5113 memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
5114 }
5115
5116 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
5117 numFaces);
5118 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
5119 faceRectangles, numFaces * 4U);
5120 if (fwk_faceDetectMode ==
5121 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
5122 IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
5123 CAM_INTF_META_FACE_LANDMARK, metadata) {
5124
5125 for (size_t i = 0; i < numFaces; i++) {
5126 // Map the co-ordinate sensor output coordinate system to active
5127 // array coordinate system.
5128 mCropRegionMapper.toActiveArray(
5129 landmarks->face_landmarks[i].left_eye_center.x,
5130 landmarks->face_landmarks[i].left_eye_center.y);
5131 mCropRegionMapper.toActiveArray(
5132 landmarks->face_landmarks[i].right_eye_center.x,
5133 landmarks->face_landmarks[i].right_eye_center.y);
5134 mCropRegionMapper.toActiveArray(
5135 landmarks->face_landmarks[i].mouth_center.x,
5136 landmarks->face_landmarks[i].mouth_center.y);
5137
5138 convertLandmarks(landmarks->face_landmarks[i], faceLandmarks+k);
Thierry Strudel04e026f2016-10-10 11:27:36 -07005139 k+= TOTAL_LANDMARK_INDICES;
5140 }
5141 } else {
5142 for (size_t i = 0; i < numFaces; i++) {
5143 setInvalidLandmarks(faceLandmarks+k);
5144 k+= TOTAL_LANDMARK_INDICES;
Thierry Strudel3d639192016-09-09 11:52:26 -07005145 }
5146 }
5147
5148 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
5149 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
5150 faceLandmarks, numFaces * 6U);
5151 }
5152 }
5153 }
5154 }
5155 }
5156
5157 IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
5158 uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
5159 camMetadata.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &fwk_histogramMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005160
5161 if (fwk_histogramMode == ANDROID_STATISTICS_HISTOGRAM_MODE_ON) {
5162 IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, metadata) {
5163 // process histogram statistics info
5164 uint32_t hist_buf[3][CAM_HISTOGRAM_STATS_SIZE];
5165 uint32_t hist_size = sizeof(cam_histogram_data_t::hist_buf);
5166 cam_histogram_data_t rHistData, gHistData, bHistData;
5167 memset(&rHistData, 0, sizeof(rHistData));
5168 memset(&gHistData, 0, sizeof(gHistData));
5169 memset(&bHistData, 0, sizeof(bHistData));
5170
5171 switch (stats_data->type) {
5172 case CAM_HISTOGRAM_TYPE_BAYER:
5173 switch (stats_data->bayer_stats.data_type) {
5174 case CAM_STATS_CHANNEL_GR:
5175 rHistData = gHistData = bHistData = stats_data->bayer_stats.gr_stats;
5176 break;
5177 case CAM_STATS_CHANNEL_GB:
5178 rHistData = gHistData = bHistData = stats_data->bayer_stats.gb_stats;
5179 break;
5180 case CAM_STATS_CHANNEL_B:
5181 rHistData = gHistData = bHistData = stats_data->bayer_stats.b_stats;
5182 break;
5183 case CAM_STATS_CHANNEL_ALL:
5184 rHistData = stats_data->bayer_stats.r_stats;
5185 //Framework expects only 3 channels. So, for now,
5186 //use gb stats for G channel.
5187 gHistData = stats_data->bayer_stats.gb_stats;
5188 bHistData = stats_data->bayer_stats.b_stats;
5189 break;
5190 case CAM_STATS_CHANNEL_Y:
5191 case CAM_STATS_CHANNEL_R:
5192 default:
5193 rHistData = gHistData = bHistData = stats_data->bayer_stats.r_stats;
5194 break;
5195 }
5196 break;
5197 case CAM_HISTOGRAM_TYPE_YUV:
5198 rHistData = gHistData = bHistData = stats_data->yuv_stats;
5199 break;
5200 }
5201
5202 memcpy(hist_buf, rHistData.hist_buf, hist_size);
5203 memcpy(hist_buf[1], gHistData.hist_buf, hist_size);
5204 memcpy(hist_buf[2], bHistData.hist_buf, hist_size);
5205
5206 camMetadata.update(ANDROID_STATISTICS_HISTOGRAM, (int32_t*)hist_buf, hist_size*3);
5207 }
5208 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005209 }
5210
5211 IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
5212 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
5213 uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
5214 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
5215 }
5216
5217 IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
5218 CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
5219 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
5220 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
5221 }
5222
5223 IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
5224 CAM_INTF_META_LENS_SHADING_MAP, metadata) {
5225 size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
5226 CAM_MAX_SHADING_MAP_HEIGHT);
5227 size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
5228 CAM_MAX_SHADING_MAP_WIDTH);
5229 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
5230 lensShadingMap->lens_shading, 4U * map_width * map_height);
5231 }
5232
5233 IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
5234 uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
5235 camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
5236 }
5237
5238 IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
5239 //Populate CAM_INTF_META_TONEMAP_CURVES
5240 /* ch0 = G, ch 1 = B, ch 2 = R*/
5241 if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
5242 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
5243 tonemap->tonemap_points_cnt,
5244 CAM_MAX_TONEMAP_CURVE_SIZE);
5245 tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
5246 }
5247
5248 camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
5249 &tonemap->curves[0].tonemap_points[0][0],
5250 tonemap->tonemap_points_cnt * 2);
5251
5252 camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
5253 &tonemap->curves[1].tonemap_points[0][0],
5254 tonemap->tonemap_points_cnt * 2);
5255
5256 camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
5257 &tonemap->curves[2].tonemap_points[0][0],
5258 tonemap->tonemap_points_cnt * 2);
5259 }
5260
5261 IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
5262 CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
5263 camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
5264 CC_GAIN_MAX);
5265 }
5266
5267 IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
5268 CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
5269 camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
5270 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
5271 CC_MATRIX_COLS * CC_MATRIX_ROWS);
5272 }
5273
5274 IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
5275 CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
5276 if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
5277 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
5278 toneCurve->tonemap_points_cnt,
5279 CAM_MAX_TONEMAP_CURVE_SIZE);
5280 toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
5281 }
5282 camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
5283 (float*)toneCurve->curve.tonemap_points,
5284 toneCurve->tonemap_points_cnt * 2);
5285 }
5286
5287 IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
5288 CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
5289 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
5290 predColorCorrectionGains->gains, 4);
5291 }
5292
5293 IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
5294 CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
5295 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
5296 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
5297 CC_MATRIX_ROWS * CC_MATRIX_COLS);
5298 }
5299
5300 IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
5301 camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
5302 }
5303
5304 IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
5305 uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
5306 camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
5307 }
5308
5309 IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
5310 uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
5311 camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
5312 }
5313
5314 IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
5315 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
5316 *effectMode);
5317 if (NAME_NOT_FOUND != val) {
5318 uint8_t fwk_effectMode = (uint8_t)val;
5319 camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
5320 }
5321 }
5322
5323 IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
5324 CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
5325 int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
5326 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
5327 if (NAME_NOT_FOUND != fwk_testPatternMode) {
5328 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
5329 }
5330 int32_t fwk_testPatternData[4];
5331 fwk_testPatternData[0] = testPatternData->r;
5332 fwk_testPatternData[3] = testPatternData->b;
5333 switch (gCamCapability[mCameraId]->color_arrangement) {
5334 case CAM_FILTER_ARRANGEMENT_RGGB:
5335 case CAM_FILTER_ARRANGEMENT_GRBG:
5336 fwk_testPatternData[1] = testPatternData->gr;
5337 fwk_testPatternData[2] = testPatternData->gb;
5338 break;
5339 case CAM_FILTER_ARRANGEMENT_GBRG:
5340 case CAM_FILTER_ARRANGEMENT_BGGR:
5341 fwk_testPatternData[2] = testPatternData->gr;
5342 fwk_testPatternData[1] = testPatternData->gb;
5343 break;
5344 default:
5345 LOGE("color arrangement %d is not supported",
5346 gCamCapability[mCameraId]->color_arrangement);
5347 break;
5348 }
5349 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
5350 }
5351
5352 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
5353 camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
5354 }
5355
5356 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
5357 String8 str((const char *)gps_methods);
5358 camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
5359 }
5360
5361 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
5362 camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
5363 }
5364
5365 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
5366 camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
5367 }
5368
5369 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
5370 uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
5371 camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
5372 }
5373
5374 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
5375 uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
5376 camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
5377 }
5378
5379 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
5380 int32_t fwk_thumb_size[2];
5381 fwk_thumb_size[0] = thumb_size->width;
5382 fwk_thumb_size[1] = thumb_size->height;
5383 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
5384 }
5385
5386 IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
5387 camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
5388 privateData,
5389 MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
5390 }
5391
5392 if (metadata->is_tuning_params_valid) {
5393 uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
5394 uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
5395 metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
5396
5397
5398 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
5399 sizeof(uint32_t));
5400 data += sizeof(uint32_t);
5401
5402 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
5403 sizeof(uint32_t));
5404 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
5405 data += sizeof(uint32_t);
5406
5407 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
5408 sizeof(uint32_t));
5409 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
5410 data += sizeof(uint32_t);
5411
5412 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
5413 sizeof(uint32_t));
5414 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
5415 data += sizeof(uint32_t);
5416
5417 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
5418 sizeof(uint32_t));
5419 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
5420 data += sizeof(uint32_t);
5421
5422 metadata->tuning_params.tuning_mod3_data_size = 0;
5423 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
5424 sizeof(uint32_t));
5425 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
5426 data += sizeof(uint32_t);
5427
5428 size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
5429 TUNING_SENSOR_DATA_MAX);
5430 memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
5431 count);
5432 data += count;
5433
5434 count = MIN(metadata->tuning_params.tuning_vfe_data_size,
5435 TUNING_VFE_DATA_MAX);
5436 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
5437 count);
5438 data += count;
5439
5440 count = MIN(metadata->tuning_params.tuning_cpp_data_size,
5441 TUNING_CPP_DATA_MAX);
5442 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
5443 count);
5444 data += count;
5445
5446 count = MIN(metadata->tuning_params.tuning_cac_data_size,
5447 TUNING_CAC_DATA_MAX);
5448 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
5449 count);
5450 data += count;
5451
5452 camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
5453 (int32_t *)(void *)tuning_meta_data_blob,
5454 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
5455 }
5456
5457 IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
5458 CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
5459 camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
5460 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
5461 NEUTRAL_COL_POINTS);
5462 }
5463
5464 IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
5465 uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
5466 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
5467 }
5468
5469 IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
5470 int32_t aeRegions[REGIONS_TUPLE_COUNT];
5471 // Adjust crop region from sensor output coordinate system to active
5472 // array coordinate system.
5473 mCropRegionMapper.toActiveArray(hAeRegions->rect.left, hAeRegions->rect.top,
5474 hAeRegions->rect.width, hAeRegions->rect.height);
5475
5476 convertToRegions(hAeRegions->rect, aeRegions, hAeRegions->weight);
5477 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
5478 REGIONS_TUPLE_COUNT);
5479 LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
5480 aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
5481 hAeRegions->rect.left, hAeRegions->rect.top, hAeRegions->rect.width,
5482 hAeRegions->rect.height);
5483 }
5484
5485 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
5486 uint8_t fwk_afState = (uint8_t) *afState;
5487 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
5488 LOGD("urgent Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
5489 }
5490
5491 IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
5492 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
5493 }
5494
5495 IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
5496 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
5497 }
5498
5499 IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
5500 uint8_t fwk_lensState = *lensState;
5501 camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
5502 }
5503
5504 IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
5505 /*af regions*/
5506 int32_t afRegions[REGIONS_TUPLE_COUNT];
5507 // Adjust crop region from sensor output coordinate system to active
5508 // array coordinate system.
5509 mCropRegionMapper.toActiveArray(hAfRegions->rect.left, hAfRegions->rect.top,
5510 hAfRegions->rect.width, hAfRegions->rect.height);
5511
5512 convertToRegions(hAfRegions->rect, afRegions, hAfRegions->weight);
5513 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
5514 REGIONS_TUPLE_COUNT);
5515 LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
5516 afRegions[0], afRegions[1], afRegions[2], afRegions[3],
5517 hAfRegions->rect.left, hAfRegions->rect.top, hAfRegions->rect.width,
5518 hAfRegions->rect.height);
5519 }
5520
5521 IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
5522 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
5523 *hal_ab_mode);
5524 if (NAME_NOT_FOUND != val) {
5525 uint8_t fwk_ab_mode = (uint8_t)val;
5526 camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
5527 }
5528 }
5529
5530 IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
5531 int val = lookupFwkName(SCENE_MODES_MAP,
5532 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
5533 if (NAME_NOT_FOUND != val) {
5534 uint8_t fwkBestshotMode = (uint8_t)val;
5535 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
5536 LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
5537 } else {
5538 LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
5539 }
5540 }
5541
5542 IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
5543 uint8_t fwk_mode = (uint8_t) *mode;
5544 camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
5545 }
5546
5547 /* Constant metadata values to be update*/
5548 uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
5549 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
5550
5551 uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
5552 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
5553
5554 int32_t hotPixelMap[2];
5555 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
5556
5557 // CDS
5558 IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
5559 camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
5560 }
5561
Thierry Strudel04e026f2016-10-10 11:27:36 -07005562 IF_META_AVAILABLE(cam_sensor_hdr_type_t, vhdr, CAM_INTF_PARM_SENSOR_HDR, metadata) {
5563 int32_t fwk_hdr;
5564 if(*vhdr == CAM_SENSOR_HDR_OFF) {
5565 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_OFF;
5566 } else {
5567 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_ON;
5568 }
5569 camMetadata.update(QCAMERA3_VIDEO_HDR_MODE, &fwk_hdr, 1);
5570 }
5571
5572 IF_META_AVAILABLE(cam_ir_mode_type_t, ir, CAM_INTF_META_IR_MODE, metadata) {
5573 camMetadata.update(QCAMERA3_IR_MODE,(int32_t *) &ir, 1);
5574 }
5575
Thierry Strudel3d639192016-09-09 11:52:26 -07005576 // TNR
5577 IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
5578 uint8_t tnr_enable = tnr->denoise_enable;
5579 int32_t tnr_process_type = (int32_t)tnr->process_plates;
5580
5581 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
5582 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
5583 }
5584
5585 // Reprocess crop data
5586 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
5587 uint8_t cnt = crop_data->num_of_streams;
5588 if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
5589 // mm-qcamera-daemon only posts crop_data for streams
5590 // not linked to pproc. So no valid crop metadata is not
5591 // necessarily an error case.
5592 LOGD("No valid crop metadata entries");
5593 } else {
5594 uint32_t reproc_stream_id;
5595 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
5596 LOGD("No reprocessible stream found, ignore crop data");
5597 } else {
5598 int rc = NO_ERROR;
5599 Vector<int32_t> roi_map;
5600 int32_t *crop = new int32_t[cnt*4];
5601 if (NULL == crop) {
5602 rc = NO_MEMORY;
5603 }
5604 if (NO_ERROR == rc) {
5605 int32_t streams_found = 0;
5606 for (size_t i = 0; i < cnt; i++) {
5607 if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
5608 if (pprocDone) {
5609 // HAL already does internal reprocessing,
5610 // either via reprocessing before JPEG encoding,
5611 // or offline postprocessing for pproc bypass case.
5612 crop[0] = 0;
5613 crop[1] = 0;
5614 crop[2] = mInputStreamInfo.dim.width;
5615 crop[3] = mInputStreamInfo.dim.height;
5616 } else {
5617 crop[0] = crop_data->crop_info[i].crop.left;
5618 crop[1] = crop_data->crop_info[i].crop.top;
5619 crop[2] = crop_data->crop_info[i].crop.width;
5620 crop[3] = crop_data->crop_info[i].crop.height;
5621 }
5622 roi_map.add(crop_data->crop_info[i].roi_map.left);
5623 roi_map.add(crop_data->crop_info[i].roi_map.top);
5624 roi_map.add(crop_data->crop_info[i].roi_map.width);
5625 roi_map.add(crop_data->crop_info[i].roi_map.height);
5626 streams_found++;
5627 LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
5628 crop[0], crop[1], crop[2], crop[3]);
5629 LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
5630 crop_data->crop_info[i].roi_map.left,
5631 crop_data->crop_info[i].roi_map.top,
5632 crop_data->crop_info[i].roi_map.width,
5633 crop_data->crop_info[i].roi_map.height);
5634 break;
5635
5636 }
5637 }
5638 camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
5639 &streams_found, 1);
5640 camMetadata.update(QCAMERA3_CROP_REPROCESS,
5641 crop, (size_t)(streams_found * 4));
5642 if (roi_map.array()) {
5643 camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
5644 roi_map.array(), roi_map.size());
5645 }
5646 }
5647 if (crop) {
5648 delete [] crop;
5649 }
5650 }
5651 }
5652 }
5653
5654 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
5655 // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
5656 // so hardcoding the CAC result to OFF mode.
5657 uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
5658 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
5659 } else {
5660 IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
5661 int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
5662 *cacMode);
5663 if (NAME_NOT_FOUND != val) {
5664 uint8_t resultCacMode = (uint8_t)val;
5665 // check whether CAC result from CB is equal to Framework set CAC mode
5666 // If not equal then set the CAC mode came in corresponding request
5667 if (fwk_cacMode != resultCacMode) {
5668 resultCacMode = fwk_cacMode;
5669 }
5670 LOGD("fwk_cacMode=%d resultCacMode=%d", fwk_cacMode, resultCacMode);
5671 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
5672 } else {
5673 LOGE("Invalid CAC camera parameter: %d", *cacMode);
5674 }
5675 }
5676 }
5677
5678 // Post blob of cam_cds_data through vendor tag.
5679 IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
5680 uint8_t cnt = cdsInfo->num_of_streams;
5681 cam_cds_data_t cdsDataOverride;
5682 memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
5683 cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
5684 cdsDataOverride.num_of_streams = 1;
5685 if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
5686 uint32_t reproc_stream_id;
5687 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
5688 LOGD("No reprocessible stream found, ignore cds data");
5689 } else {
5690 for (size_t i = 0; i < cnt; i++) {
5691 if (cdsInfo->cds_info[i].stream_id ==
5692 reproc_stream_id) {
5693 cdsDataOverride.cds_info[0].cds_enable =
5694 cdsInfo->cds_info[i].cds_enable;
5695 break;
5696 }
5697 }
5698 }
5699 } else {
5700 LOGD("Invalid stream count %d in CDS_DATA", cnt);
5701 }
5702 camMetadata.update(QCAMERA3_CDS_INFO,
5703 (uint8_t *)&cdsDataOverride,
5704 sizeof(cam_cds_data_t));
5705 }
5706
5707 // Ldaf calibration data
5708 if (!mLdafCalibExist) {
5709 IF_META_AVAILABLE(uint32_t, ldafCalib,
5710 CAM_INTF_META_LDAF_EXIF, metadata) {
5711 mLdafCalibExist = true;
5712 mLdafCalib[0] = ldafCalib[0];
5713 mLdafCalib[1] = ldafCalib[1];
5714 LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
5715 ldafCalib[0], ldafCalib[1]);
5716 }
5717 }
5718
5719 // DDM debug data through vendor tag
5720 cam_ddm_info_t ddm_info;
5721 memset(&ddm_info, 0, sizeof(cam_ddm_info_t));
5722 IF_META_AVAILABLE(cam_stream_crop_info_t, sensorCropInfo,
5723 CAM_INTF_META_SNAP_CROP_INFO_SENSOR, metadata) {
5724 memcpy(&(ddm_info.sensor_crop_info), sensorCropInfo, sizeof(cam_stream_crop_info_t));
5725 }
5726 IF_META_AVAILABLE(cam_stream_crop_info_t, camifCropInfo,
5727 CAM_INTF_META_SNAP_CROP_INFO_CAMIF, metadata) {
5728 memcpy(&(ddm_info.camif_crop_info), camifCropInfo, sizeof(cam_stream_crop_info_t));
5729 }
5730 IF_META_AVAILABLE(cam_stream_crop_info_t, ispCropInfo,
5731 CAM_INTF_META_SNAP_CROP_INFO_ISP, metadata) {
5732 memcpy(&(ddm_info.isp_crop_info), ispCropInfo, sizeof(cam_stream_crop_info_t));
5733 }
5734 IF_META_AVAILABLE(cam_stream_crop_info_t, cppCropInfo,
5735 CAM_INTF_META_SNAP_CROP_INFO_CPP, metadata) {
5736 memcpy(&(ddm_info.cpp_crop_info), cppCropInfo, sizeof(cam_stream_crop_info_t));
5737 }
5738 IF_META_AVAILABLE(cam_focal_length_ratio_t, ratio,
5739 CAM_INTF_META_AF_FOCAL_LENGTH_RATIO, metadata) {
5740 memcpy(&(ddm_info.af_focal_length_ratio), ratio, sizeof(cam_focal_length_ratio_t));
5741 }
5742 IF_META_AVAILABLE(int32_t, flip, CAM_INTF_PARM_FLIP, metadata) {
5743 memcpy(&(ddm_info.pipeline_flip), flip, sizeof(int32_t));
5744 }
5745 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
5746 CAM_INTF_PARM_ROTATION, metadata) {
5747 memcpy(&(ddm_info.rotation_info), rotationInfo, sizeof(cam_rotation_info_t));
5748 }
5749 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_DDM_DATA_BLOB,
5750 (uint8_t *)&ddm_info, sizeof(cam_ddm_info_t));
5751
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005752 /* In batch mode, cache the first metadata in the batch */
5753 if (mBatchSize && firstMetadataInBatch) {
5754 mCachedMetadata.clear();
5755 mCachedMetadata = camMetadata;
5756 }
5757
Thierry Strudel3d639192016-09-09 11:52:26 -07005758 resultMetadata = camMetadata.release();
5759 return resultMetadata;
5760}
5761
5762/*===========================================================================
5763 * FUNCTION : saveExifParams
5764 *
5765 * DESCRIPTION:
5766 *
5767 * PARAMETERS :
5768 * @metadata : metadata information from callback
5769 *
5770 * RETURN : none
5771 *
5772 *==========================================================================*/
5773void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
5774{
5775 IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
5776 CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
5777 if (mExifParams.debug_params) {
5778 mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
5779 mExifParams.debug_params->ae_debug_params_valid = TRUE;
5780 }
5781 }
5782 IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
5783 CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
5784 if (mExifParams.debug_params) {
5785 mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
5786 mExifParams.debug_params->awb_debug_params_valid = TRUE;
5787 }
5788 }
5789 IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
5790 CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
5791 if (mExifParams.debug_params) {
5792 mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
5793 mExifParams.debug_params->af_debug_params_valid = TRUE;
5794 }
5795 }
5796 IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
5797 CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
5798 if (mExifParams.debug_params) {
5799 mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
5800 mExifParams.debug_params->asd_debug_params_valid = TRUE;
5801 }
5802 }
5803 IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
5804 CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
5805 if (mExifParams.debug_params) {
5806 mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
5807 mExifParams.debug_params->stats_debug_params_valid = TRUE;
5808 }
5809 }
5810 IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
5811 CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
5812 if (mExifParams.debug_params) {
5813 mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
5814 mExifParams.debug_params->bestats_debug_params_valid = TRUE;
5815 }
5816 }
5817 IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
5818 CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
5819 if (mExifParams.debug_params) {
5820 mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
5821 mExifParams.debug_params->bhist_debug_params_valid = TRUE;
5822 }
5823 }
5824 IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
5825 CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
5826 if (mExifParams.debug_params) {
5827 mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
5828 mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
5829 }
5830 }
5831}
5832
5833/*===========================================================================
5834 * FUNCTION : get3AExifParams
5835 *
5836 * DESCRIPTION:
5837 *
5838 * PARAMETERS : none
5839 *
5840 *
5841 * RETURN : mm_jpeg_exif_params_t
5842 *
5843 *==========================================================================*/
5844mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
5845{
5846 return mExifParams;
5847}
5848
5849/*===========================================================================
5850 * FUNCTION : translateCbUrgentMetadataToResultMetadata
5851 *
5852 * DESCRIPTION:
5853 *
5854 * PARAMETERS :
5855 * @metadata : metadata information from callback
5856 *
5857 * RETURN : camera_metadata_t*
5858 * metadata in a format specified by fwk
5859 *==========================================================================*/
5860camera_metadata_t*
5861QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
5862 (metadata_buffer_t *metadata)
5863{
5864 CameraMetadata camMetadata;
5865 camera_metadata_t *resultMetadata;
5866
5867
5868 IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
5869 uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
5870 camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
5871 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
5872 }
5873
5874 IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
5875 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
5876 &aecTrigger->trigger, 1);
5877 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
5878 &aecTrigger->trigger_id, 1);
5879 LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
5880 aecTrigger->trigger);
5881 LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
5882 aecTrigger->trigger_id);
5883 }
5884
5885 IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
5886 uint8_t fwk_ae_state = (uint8_t) *ae_state;
5887 camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
5888 LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
5889 }
5890
5891 IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
5892 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
5893 if (NAME_NOT_FOUND != val) {
5894 uint8_t fwkAfMode = (uint8_t)val;
5895 camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
5896 LOGD("urgent Metadata : ANDROID_CONTROL_AF_MODE %d", val);
5897 } else {
5898 LOGH("urgent Metadata not found : ANDROID_CONTROL_AF_MODE %d",
5899 val);
5900 }
5901 }
5902
5903 IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
5904 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
5905 &af_trigger->trigger, 1);
5906 LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
5907 af_trigger->trigger);
5908 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &af_trigger->trigger_id, 1);
5909 LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
5910 af_trigger->trigger_id);
5911 }
5912
5913 IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
5914 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
5915 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
5916 if (NAME_NOT_FOUND != val) {
5917 uint8_t fwkWhiteBalanceMode = (uint8_t)val;
5918 camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
5919 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
5920 } else {
5921 LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
5922 }
5923 }
5924
5925 uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
5926 uint32_t aeMode = CAM_AE_MODE_MAX;
5927 int32_t flashMode = CAM_FLASH_MODE_MAX;
5928 int32_t redeye = -1;
5929 IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
5930 aeMode = *pAeMode;
5931 }
5932 IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
5933 flashMode = *pFlashMode;
5934 }
5935 IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
5936 redeye = *pRedeye;
5937 }
5938
5939 if (1 == redeye) {
5940 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
5941 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
5942 } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
5943 int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
5944 flashMode);
5945 if (NAME_NOT_FOUND != val) {
5946 fwk_aeMode = (uint8_t)val;
5947 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
5948 } else {
5949 LOGE("Unsupported flash mode %d", flashMode);
5950 }
5951 } else if (aeMode == CAM_AE_MODE_ON) {
5952 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
5953 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
5954 } else if (aeMode == CAM_AE_MODE_OFF) {
5955 fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
5956 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
5957 } else {
5958 LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
5959 "flashMode:%d, aeMode:%u!!!",
5960 redeye, flashMode, aeMode);
5961 }
5962
5963 resultMetadata = camMetadata.release();
5964 return resultMetadata;
5965}
5966
5967/*===========================================================================
5968 * FUNCTION : dumpMetadataToFile
5969 *
5970 * DESCRIPTION: Dumps tuning metadata to file system
5971 *
5972 * PARAMETERS :
5973 * @meta : tuning metadata
5974 * @dumpFrameCount : current dump frame count
5975 * @enabled : Enable mask
5976 *
5977 *==========================================================================*/
5978void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
5979 uint32_t &dumpFrameCount,
5980 bool enabled,
5981 const char *type,
5982 uint32_t frameNumber)
5983{
5984 //Some sanity checks
5985 if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
5986 LOGE("Tuning sensor data size bigger than expected %d: %d",
5987 meta.tuning_sensor_data_size,
5988 TUNING_SENSOR_DATA_MAX);
5989 return;
5990 }
5991
5992 if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
5993 LOGE("Tuning VFE data size bigger than expected %d: %d",
5994 meta.tuning_vfe_data_size,
5995 TUNING_VFE_DATA_MAX);
5996 return;
5997 }
5998
5999 if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
6000 LOGE("Tuning CPP data size bigger than expected %d: %d",
6001 meta.tuning_cpp_data_size,
6002 TUNING_CPP_DATA_MAX);
6003 return;
6004 }
6005
6006 if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
6007 LOGE("Tuning CAC data size bigger than expected %d: %d",
6008 meta.tuning_cac_data_size,
6009 TUNING_CAC_DATA_MAX);
6010 return;
6011 }
6012 //
6013
6014 if(enabled){
6015 char timeBuf[FILENAME_MAX];
6016 char buf[FILENAME_MAX];
6017 memset(buf, 0, sizeof(buf));
6018 memset(timeBuf, 0, sizeof(timeBuf));
6019 time_t current_time;
6020 struct tm * timeinfo;
6021 time (&current_time);
6022 timeinfo = localtime (&current_time);
6023 if (timeinfo != NULL) {
6024 strftime (timeBuf, sizeof(timeBuf),
6025 QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
6026 }
6027 String8 filePath(timeBuf);
6028 snprintf(buf,
6029 sizeof(buf),
6030 "%dm_%s_%d.bin",
6031 dumpFrameCount,
6032 type,
6033 frameNumber);
6034 filePath.append(buf);
6035 int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
6036 if (file_fd >= 0) {
6037 ssize_t written_len = 0;
6038 meta.tuning_data_version = TUNING_DATA_VERSION;
6039 void *data = (void *)((uint8_t *)&meta.tuning_data_version);
6040 written_len += write(file_fd, data, sizeof(uint32_t));
6041 data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
6042 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
6043 written_len += write(file_fd, data, sizeof(uint32_t));
6044 data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
6045 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
6046 written_len += write(file_fd, data, sizeof(uint32_t));
6047 data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
6048 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
6049 written_len += write(file_fd, data, sizeof(uint32_t));
6050 data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
6051 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
6052 written_len += write(file_fd, data, sizeof(uint32_t));
6053 meta.tuning_mod3_data_size = 0;
6054 data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
6055 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
6056 written_len += write(file_fd, data, sizeof(uint32_t));
6057 size_t total_size = meta.tuning_sensor_data_size;
6058 data = (void *)((uint8_t *)&meta.data);
6059 written_len += write(file_fd, data, total_size);
6060 total_size = meta.tuning_vfe_data_size;
6061 data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
6062 written_len += write(file_fd, data, total_size);
6063 total_size = meta.tuning_cpp_data_size;
6064 data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
6065 written_len += write(file_fd, data, total_size);
6066 total_size = meta.tuning_cac_data_size;
6067 data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
6068 written_len += write(file_fd, data, total_size);
6069 close(file_fd);
6070 }else {
6071 LOGE("fail to open file for metadata dumping");
6072 }
6073 }
6074}
6075
6076/*===========================================================================
6077 * FUNCTION : cleanAndSortStreamInfo
6078 *
6079 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
6080 * and sort them such that raw stream is at the end of the list
6081 * This is a workaround for camera daemon constraint.
6082 *
6083 * PARAMETERS : None
6084 *
6085 *==========================================================================*/
6086void QCamera3HardwareInterface::cleanAndSortStreamInfo()
6087{
6088 List<stream_info_t *> newStreamInfo;
6089
6090 /*clean up invalid streams*/
6091 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
6092 it != mStreamInfo.end();) {
6093 if(((*it)->status) == INVALID){
6094 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
6095 delete channel;
6096 free(*it);
6097 it = mStreamInfo.erase(it);
6098 } else {
6099 it++;
6100 }
6101 }
6102
6103 // Move preview/video/callback/snapshot streams into newList
6104 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6105 it != mStreamInfo.end();) {
6106 if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
6107 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
6108 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
6109 newStreamInfo.push_back(*it);
6110 it = mStreamInfo.erase(it);
6111 } else
6112 it++;
6113 }
6114 // Move raw streams into newList
6115 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6116 it != mStreamInfo.end();) {
6117 newStreamInfo.push_back(*it);
6118 it = mStreamInfo.erase(it);
6119 }
6120
6121 mStreamInfo = newStreamInfo;
6122}
6123
6124/*===========================================================================
6125 * FUNCTION : extractJpegMetadata
6126 *
6127 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
6128 * JPEG metadata is cached in HAL, and return as part of capture
6129 * result when metadata is returned from camera daemon.
6130 *
6131 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
6132 * @request: capture request
6133 *
6134 *==========================================================================*/
6135void QCamera3HardwareInterface::extractJpegMetadata(
6136 CameraMetadata& jpegMetadata,
6137 const camera3_capture_request_t *request)
6138{
6139 CameraMetadata frame_settings;
6140 frame_settings = request->settings;
6141
6142 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
6143 jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
6144 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
6145 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
6146
6147 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
6148 jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
6149 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
6150 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
6151
6152 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
6153 jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
6154 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
6155 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
6156
6157 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
6158 jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
6159 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
6160 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
6161
6162 if (frame_settings.exists(ANDROID_JPEG_QUALITY))
6163 jpegMetadata.update(ANDROID_JPEG_QUALITY,
6164 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
6165 frame_settings.find(ANDROID_JPEG_QUALITY).count);
6166
6167 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
6168 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
6169 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
6170 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
6171
6172 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
6173 int32_t thumbnail_size[2];
6174 thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
6175 thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
6176 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
6177 int32_t orientation =
6178 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006179 if ((!needJpegExifRotation()) && ((orientation == 90) || (orientation == 270))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07006180 //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
6181 int32_t temp;
6182 temp = thumbnail_size[0];
6183 thumbnail_size[0] = thumbnail_size[1];
6184 thumbnail_size[1] = temp;
6185 }
6186 }
6187 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
6188 thumbnail_size,
6189 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
6190 }
6191
6192}
6193
6194/*===========================================================================
6195 * FUNCTION : convertToRegions
6196 *
6197 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
6198 *
6199 * PARAMETERS :
6200 * @rect : cam_rect_t struct to convert
6201 * @region : int32_t destination array
6202 * @weight : if we are converting from cam_area_t, weight is valid
6203 * else weight = -1
6204 *
6205 *==========================================================================*/
6206void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
6207 int32_t *region, int weight)
6208{
6209 region[0] = rect.left;
6210 region[1] = rect.top;
6211 region[2] = rect.left + rect.width;
6212 region[3] = rect.top + rect.height;
6213 if (weight > -1) {
6214 region[4] = weight;
6215 }
6216}
6217
6218/*===========================================================================
6219 * FUNCTION : convertFromRegions
6220 *
6221 * DESCRIPTION: helper method to convert from array to cam_rect_t
6222 *
6223 * PARAMETERS :
6224 * @rect : cam_rect_t struct to convert
6225 * @region : int32_t destination array
6226 * @weight : if we are converting from cam_area_t, weight is valid
6227 * else weight = -1
6228 *
6229 *==========================================================================*/
6230void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
6231 const camera_metadata_t *settings, uint32_t tag)
6232{
6233 CameraMetadata frame_settings;
6234 frame_settings = settings;
6235 int32_t x_min = frame_settings.find(tag).data.i32[0];
6236 int32_t y_min = frame_settings.find(tag).data.i32[1];
6237 int32_t x_max = frame_settings.find(tag).data.i32[2];
6238 int32_t y_max = frame_settings.find(tag).data.i32[3];
6239 roi.weight = frame_settings.find(tag).data.i32[4];
6240 roi.rect.left = x_min;
6241 roi.rect.top = y_min;
6242 roi.rect.width = x_max - x_min;
6243 roi.rect.height = y_max - y_min;
6244}
6245
6246/*===========================================================================
6247 * FUNCTION : resetIfNeededROI
6248 *
6249 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
6250 * crop region
6251 *
6252 * PARAMETERS :
6253 * @roi : cam_area_t struct to resize
6254 * @scalerCropRegion : cam_crop_region_t region to compare against
6255 *
6256 *
6257 *==========================================================================*/
6258bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
6259 const cam_crop_region_t* scalerCropRegion)
6260{
6261 int32_t roi_x_max = roi->rect.width + roi->rect.left;
6262 int32_t roi_y_max = roi->rect.height + roi->rect.top;
6263 int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
6264 int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
6265
6266 /* According to spec weight = 0 is used to indicate roi needs to be disabled
6267 * without having this check the calculations below to validate if the roi
6268 * is inside scalar crop region will fail resulting in the roi not being
6269 * reset causing algorithm to continue to use stale roi window
6270 */
6271 if (roi->weight == 0) {
6272 return true;
6273 }
6274
6275 if ((roi_x_max < scalerCropRegion->left) ||
6276 // right edge of roi window is left of scalar crop's left edge
6277 (roi_y_max < scalerCropRegion->top) ||
6278 // bottom edge of roi window is above scalar crop's top edge
6279 (roi->rect.left > crop_x_max) ||
6280 // left edge of roi window is beyond(right) of scalar crop's right edge
6281 (roi->rect.top > crop_y_max)){
6282 // top edge of roi windo is above scalar crop's top edge
6283 return false;
6284 }
6285 if (roi->rect.left < scalerCropRegion->left) {
6286 roi->rect.left = scalerCropRegion->left;
6287 }
6288 if (roi->rect.top < scalerCropRegion->top) {
6289 roi->rect.top = scalerCropRegion->top;
6290 }
6291 if (roi_x_max > crop_x_max) {
6292 roi_x_max = crop_x_max;
6293 }
6294 if (roi_y_max > crop_y_max) {
6295 roi_y_max = crop_y_max;
6296 }
6297 roi->rect.width = roi_x_max - roi->rect.left;
6298 roi->rect.height = roi_y_max - roi->rect.top;
6299 return true;
6300}
6301
6302/*===========================================================================
6303 * FUNCTION : convertLandmarks
6304 *
6305 * DESCRIPTION: helper method to extract the landmarks from face detection info
6306 *
6307 * PARAMETERS :
6308 * @landmark_data : input landmark data to be converted
6309 * @landmarks : int32_t destination array
6310 *
6311 *
6312 *==========================================================================*/
6313void QCamera3HardwareInterface::convertLandmarks(
6314 cam_face_landmarks_info_t landmark_data,
6315 int32_t *landmarks)
6316{
Thierry Strudel04e026f2016-10-10 11:27:36 -07006317 if (landmark_data.is_left_eye_valid) {
6318 landmarks[LEFT_EYE_X] = (int32_t)landmark_data.left_eye_center.x;
6319 landmarks[LEFT_EYE_Y] = (int32_t)landmark_data.left_eye_center.y;
6320 } else {
6321 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
6322 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
6323 }
6324
6325 if (landmark_data.is_right_eye_valid) {
6326 landmarks[RIGHT_EYE_X] = (int32_t)landmark_data.right_eye_center.x;
6327 landmarks[RIGHT_EYE_Y] = (int32_t)landmark_data.right_eye_center.y;
6328 } else {
6329 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
6330 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
6331 }
6332
6333 if (landmark_data.is_mouth_valid) {
6334 landmarks[MOUTH_X] = (int32_t)landmark_data.mouth_center.x;
6335 landmarks[MOUTH_Y] = (int32_t)landmark_data.mouth_center.y;
6336 } else {
6337 landmarks[MOUTH_X] = FACE_INVALID_POINT;
6338 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
6339 }
6340}
6341
6342/*===========================================================================
6343 * FUNCTION : setInvalidLandmarks
6344 *
6345 * DESCRIPTION: helper method to set invalid landmarks
6346 *
6347 * PARAMETERS :
6348 * @landmarks : int32_t destination array
6349 *
6350 *
6351 *==========================================================================*/
6352void QCamera3HardwareInterface::setInvalidLandmarks(
6353 int32_t *landmarks)
6354{
6355 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
6356 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
6357 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
6358 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
6359 landmarks[MOUTH_X] = FACE_INVALID_POINT;
6360 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
Thierry Strudel3d639192016-09-09 11:52:26 -07006361}
6362
6363#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
6364/*===========================================================================
6365 * FUNCTION : initCapabilities
6366 *
6367 * DESCRIPTION: initialize camera capabilities in static data struct
6368 *
6369 * PARAMETERS :
6370 * @cameraId : camera Id
6371 *
6372 * RETURN : int32_t type of status
6373 * NO_ERROR -- success
6374 * none-zero failure code
6375 *==========================================================================*/
6376int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
6377{
6378 int rc = 0;
6379 mm_camera_vtbl_t *cameraHandle = NULL;
6380 QCamera3HeapMemory *capabilityHeap = NULL;
6381
6382 rc = camera_open((uint8_t)cameraId, &cameraHandle);
6383 if (rc) {
6384 LOGE("camera_open failed. rc = %d", rc);
6385 goto open_failed;
6386 }
6387 if (!cameraHandle) {
6388 LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
6389 goto open_failed;
6390 }
6391
6392 capabilityHeap = new QCamera3HeapMemory(1);
6393 if (capabilityHeap == NULL) {
6394 LOGE("creation of capabilityHeap failed");
6395 goto heap_creation_failed;
6396 }
6397 /* Allocate memory for capability buffer */
6398 rc = capabilityHeap->allocate(sizeof(cam_capability_t));
6399 if(rc != OK) {
6400 LOGE("No memory for cappability");
6401 goto allocate_failed;
6402 }
6403
6404 /* Map memory for capability buffer */
6405 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
6406 rc = cameraHandle->ops->map_buf(cameraHandle->camera_handle,
6407 CAM_MAPPING_BUF_TYPE_CAPABILITY,
6408 capabilityHeap->getFd(0),
6409 sizeof(cam_capability_t),
6410 capabilityHeap->getPtr(0));
6411 if(rc < 0) {
6412 LOGE("failed to map capability buffer");
6413 goto map_failed;
6414 }
6415
6416 /* Query Capability */
6417 rc = cameraHandle->ops->query_capability(cameraHandle->camera_handle);
6418 if(rc < 0) {
6419 LOGE("failed to query capability");
6420 goto query_failed;
6421 }
6422 gCamCapability[cameraId] = (cam_capability_t *)malloc(sizeof(cam_capability_t));
6423 if (!gCamCapability[cameraId]) {
6424 LOGE("out of memory");
6425 goto query_failed;
6426 }
6427 memcpy(gCamCapability[cameraId], DATA_PTR(capabilityHeap,0),
6428 sizeof(cam_capability_t));
6429
6430 int index;
6431 for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
6432 cam_analysis_info_t *p_analysis_info =
6433 &gCamCapability[cameraId]->analysis_info[index];
6434 p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
6435 p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
6436 }
6437 rc = 0;
6438
6439query_failed:
6440 cameraHandle->ops->unmap_buf(cameraHandle->camera_handle,
6441 CAM_MAPPING_BUF_TYPE_CAPABILITY);
6442map_failed:
6443 capabilityHeap->deallocate();
6444allocate_failed:
6445 delete capabilityHeap;
6446heap_creation_failed:
6447 cameraHandle->ops->close_camera(cameraHandle->camera_handle);
6448 cameraHandle = NULL;
6449open_failed:
6450 return rc;
6451}
6452
6453/*==========================================================================
6454 * FUNCTION : get3Aversion
6455 *
6456 * DESCRIPTION: get the Q3A S/W version
6457 *
6458 * PARAMETERS :
6459 * @sw_version: Reference of Q3A structure which will hold version info upon
6460 * return
6461 *
6462 * RETURN : None
6463 *
6464 *==========================================================================*/
6465void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
6466{
6467 if(gCamCapability[mCameraId])
6468 sw_version = gCamCapability[mCameraId]->q3a_version;
6469 else
6470 LOGE("Capability structure NULL!");
6471}
6472
6473
6474/*===========================================================================
6475 * FUNCTION : initParameters
6476 *
6477 * DESCRIPTION: initialize camera parameters
6478 *
6479 * PARAMETERS :
6480 *
6481 * RETURN : int32_t type of status
6482 * NO_ERROR -- success
6483 * none-zero failure code
6484 *==========================================================================*/
6485int QCamera3HardwareInterface::initParameters()
6486{
6487 int rc = 0;
6488
6489 //Allocate Set Param Buffer
6490 mParamHeap = new QCamera3HeapMemory(1);
6491 rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
6492 if(rc != OK) {
6493 rc = NO_MEMORY;
6494 LOGE("Failed to allocate SETPARM Heap memory");
6495 delete mParamHeap;
6496 mParamHeap = NULL;
6497 return rc;
6498 }
6499
6500 //Map memory for parameters buffer
6501 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
6502 CAM_MAPPING_BUF_TYPE_PARM_BUF,
6503 mParamHeap->getFd(0),
6504 sizeof(metadata_buffer_t),
6505 (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
6506 if(rc < 0) {
6507 LOGE("failed to map SETPARM buffer");
6508 rc = FAILED_TRANSACTION;
6509 mParamHeap->deallocate();
6510 delete mParamHeap;
6511 mParamHeap = NULL;
6512 return rc;
6513 }
6514
6515 mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
6516
6517 mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
6518 return rc;
6519}
6520
6521/*===========================================================================
6522 * FUNCTION : deinitParameters
6523 *
6524 * DESCRIPTION: de-initialize camera parameters
6525 *
6526 * PARAMETERS :
6527 *
6528 * RETURN : NONE
6529 *==========================================================================*/
6530void QCamera3HardwareInterface::deinitParameters()
6531{
6532 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
6533 CAM_MAPPING_BUF_TYPE_PARM_BUF);
6534
6535 mParamHeap->deallocate();
6536 delete mParamHeap;
6537 mParamHeap = NULL;
6538
6539 mParameters = NULL;
6540
6541 free(mPrevParameters);
6542 mPrevParameters = NULL;
6543}
6544
6545/*===========================================================================
6546 * FUNCTION : calcMaxJpegSize
6547 *
6548 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
6549 *
6550 * PARAMETERS :
6551 *
6552 * RETURN : max_jpeg_size
6553 *==========================================================================*/
6554size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
6555{
6556 size_t max_jpeg_size = 0;
6557 size_t temp_width, temp_height;
6558 size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
6559 MAX_SIZES_CNT);
6560 for (size_t i = 0; i < count; i++) {
6561 temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
6562 temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
6563 if (temp_width * temp_height > max_jpeg_size ) {
6564 max_jpeg_size = temp_width * temp_height;
6565 }
6566 }
6567 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
6568 return max_jpeg_size;
6569}
6570
6571/*===========================================================================
6572 * FUNCTION : getMaxRawSize
6573 *
6574 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
6575 *
6576 * PARAMETERS :
6577 *
6578 * RETURN : Largest supported Raw Dimension
6579 *==========================================================================*/
6580cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
6581{
6582 int max_width = 0;
6583 cam_dimension_t maxRawSize;
6584
6585 memset(&maxRawSize, 0, sizeof(cam_dimension_t));
6586 for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
6587 if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
6588 max_width = gCamCapability[camera_id]->raw_dim[i].width;
6589 maxRawSize = gCamCapability[camera_id]->raw_dim[i];
6590 }
6591 }
6592 return maxRawSize;
6593}
6594
6595
6596/*===========================================================================
6597 * FUNCTION : calcMaxJpegDim
6598 *
6599 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
6600 *
6601 * PARAMETERS :
6602 *
6603 * RETURN : max_jpeg_dim
6604 *==========================================================================*/
6605cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
6606{
6607 cam_dimension_t max_jpeg_dim;
6608 cam_dimension_t curr_jpeg_dim;
6609 max_jpeg_dim.width = 0;
6610 max_jpeg_dim.height = 0;
6611 curr_jpeg_dim.width = 0;
6612 curr_jpeg_dim.height = 0;
6613 for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
6614 curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
6615 curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
6616 if (curr_jpeg_dim.width * curr_jpeg_dim.height >
6617 max_jpeg_dim.width * max_jpeg_dim.height ) {
6618 max_jpeg_dim.width = curr_jpeg_dim.width;
6619 max_jpeg_dim.height = curr_jpeg_dim.height;
6620 }
6621 }
6622 return max_jpeg_dim;
6623}
6624
6625/*===========================================================================
6626 * FUNCTION : addStreamConfig
6627 *
6628 * DESCRIPTION: adds the stream configuration to the array
6629 *
6630 * PARAMETERS :
6631 * @available_stream_configs : pointer to stream configuration array
6632 * @scalar_format : scalar format
6633 * @dim : configuration dimension
6634 * @config_type : input or output configuration type
6635 *
6636 * RETURN : NONE
6637 *==========================================================================*/
6638void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
6639 int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
6640{
6641 available_stream_configs.add(scalar_format);
6642 available_stream_configs.add(dim.width);
6643 available_stream_configs.add(dim.height);
6644 available_stream_configs.add(config_type);
6645}
6646
6647/*===========================================================================
6648 * FUNCTION : suppportBurstCapture
6649 *
6650 * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
6651 *
6652 * PARAMETERS :
6653 * @cameraId : camera Id
6654 *
6655 * RETURN : true if camera supports BURST_CAPTURE
6656 * false otherwise
6657 *==========================================================================*/
6658bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
6659{
6660 const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
6661 const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
6662 const int32_t highResWidth = 3264;
6663 const int32_t highResHeight = 2448;
6664
6665 if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
6666 // Maximum resolution images cannot be captured at >= 10fps
6667 // -> not supporting BURST_CAPTURE
6668 return false;
6669 }
6670
6671 if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
6672 // Maximum resolution images can be captured at >= 20fps
6673 // --> supporting BURST_CAPTURE
6674 return true;
6675 }
6676
6677 // Find the smallest highRes resolution, or largest resolution if there is none
6678 size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
6679 MAX_SIZES_CNT);
6680 size_t highRes = 0;
6681 while ((highRes + 1 < totalCnt) &&
6682 (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
6683 gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
6684 highResWidth * highResHeight)) {
6685 highRes++;
6686 }
6687 if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
6688 return true;
6689 } else {
6690 return false;
6691 }
6692}
6693
6694/*===========================================================================
6695 * FUNCTION : initStaticMetadata
6696 *
6697 * DESCRIPTION: initialize the static metadata
6698 *
6699 * PARAMETERS :
6700 * @cameraId : camera Id
6701 *
6702 * RETURN : int32_t type of status
6703 * 0 -- success
6704 * non-zero failure code
6705 *==========================================================================*/
6706int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
6707{
6708 int rc = 0;
6709 CameraMetadata staticInfo;
6710 size_t count = 0;
6711 bool limitedDevice = false;
6712 char prop[PROPERTY_VALUE_MAX];
6713 bool supportBurst = false;
6714
6715 supportBurst = supportBurstCapture(cameraId);
6716
6717 /* If sensor is YUV sensor (no raw support) or if per-frame control is not
6718 * guaranteed or if min fps of max resolution is less than 20 fps, its
6719 * advertised as limited device*/
6720 limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
6721 (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
6722 (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
6723 !supportBurst;
6724
6725 uint8_t supportedHwLvl = limitedDevice ?
6726 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006727#ifndef USE_HAL_3_3
6728 // LEVEL_3 - This device will support level 3.
6729 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
6730#else
Thierry Strudel3d639192016-09-09 11:52:26 -07006731 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006732#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07006733
6734 staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
6735 &supportedHwLvl, 1);
6736
6737 bool facingBack = false;
6738 if ((gCamCapability[cameraId]->position == CAM_POSITION_BACK) ||
6739 (gCamCapability[cameraId]->position == CAM_POSITION_BACK_AUX)) {
6740 facingBack = true;
6741 }
6742 /*HAL 3 only*/
6743 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
6744 &gCamCapability[cameraId]->min_focus_distance, 1);
6745
6746 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
6747 &gCamCapability[cameraId]->hyper_focal_distance, 1);
6748
6749 /*should be using focal lengths but sensor doesn't provide that info now*/
6750 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
6751 &gCamCapability[cameraId]->focal_length,
6752 1);
6753
6754 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
6755 gCamCapability[cameraId]->apertures,
6756 MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
6757
6758 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
6759 gCamCapability[cameraId]->filter_densities,
6760 MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
6761
6762
6763 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
6764 (uint8_t *)gCamCapability[cameraId]->optical_stab_modes,
6765 MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count));
6766
6767 int32_t lens_shading_map_size[] = {
6768 MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
6769 MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
6770 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
6771 lens_shading_map_size,
6772 sizeof(lens_shading_map_size)/sizeof(int32_t));
6773
6774 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
6775 gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
6776
6777 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
6778 gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
6779
6780 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
6781 &gCamCapability[cameraId]->max_frame_duration, 1);
6782
6783 camera_metadata_rational baseGainFactor = {
6784 gCamCapability[cameraId]->base_gain_factor.numerator,
6785 gCamCapability[cameraId]->base_gain_factor.denominator};
6786 staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
6787 &baseGainFactor, 1);
6788
6789 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
6790 (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
6791
6792 int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
6793 gCamCapability[cameraId]->pixel_array_size.height};
6794 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
6795 pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
6796
6797 int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
6798 gCamCapability[cameraId]->active_array_size.top,
6799 gCamCapability[cameraId]->active_array_size.width,
6800 gCamCapability[cameraId]->active_array_size.height};
6801 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
6802 active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
6803
6804 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
6805 &gCamCapability[cameraId]->white_level, 1);
6806
6807 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
6808 gCamCapability[cameraId]->black_level_pattern, BLACK_LEVEL_PATTERN_CNT);
6809
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006810#ifndef USE_HAL_3_3
6811 bool hasBlackRegions = false;
6812 if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
6813 LOGW("black_region_count: %d is bounded to %d",
6814 gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
6815 gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
6816 }
6817 if (gCamCapability[cameraId]->optical_black_region_count != 0) {
6818 int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
6819 for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
6820 opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
6821 }
6822 staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
6823 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
6824 hasBlackRegions = true;
6825 }
6826#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07006827 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
6828 &gCamCapability[cameraId]->flash_charge_duration, 1);
6829
6830 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
6831 &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
6832
6833 uint8_t timestampSource = ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN;
6834 staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
6835 &timestampSource, 1);
6836
6837 staticInfo.update(ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
6838 &gCamCapability[cameraId]->histogram_size, 1);
6839
6840 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
6841 &gCamCapability[cameraId]->max_histogram_count, 1);
6842
6843 int32_t sharpness_map_size[] = {
6844 gCamCapability[cameraId]->sharpness_map_size.width,
6845 gCamCapability[cameraId]->sharpness_map_size.height};
6846
6847 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
6848 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
6849
6850 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
6851 &gCamCapability[cameraId]->max_sharpness_map_value, 1);
6852
6853 int32_t scalar_formats[] = {
6854 ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
6855 ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
6856 ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
6857 ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
6858 HAL_PIXEL_FORMAT_RAW10,
6859 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
6860 size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(int32_t);
6861 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS,
6862 scalar_formats,
6863 scalar_formats_count);
6864
6865 int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
6866 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
6867 makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
6868 count, MAX_SIZES_CNT, available_processed_sizes);
6869 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
6870 available_processed_sizes, count * 2);
6871
6872 int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
6873 count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
6874 makeTable(gCamCapability[cameraId]->raw_dim,
6875 count, MAX_SIZES_CNT, available_raw_sizes);
6876 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
6877 available_raw_sizes, count * 2);
6878
6879 int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
6880 count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
6881 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
6882 count, MAX_SIZES_CNT, available_fps_ranges);
6883 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
6884 available_fps_ranges, count * 2);
6885
6886 camera_metadata_rational exposureCompensationStep = {
6887 gCamCapability[cameraId]->exp_compensation_step.numerator,
6888 gCamCapability[cameraId]->exp_compensation_step.denominator};
6889 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
6890 &exposureCompensationStep, 1);
6891
6892 Vector<uint8_t> availableVstabModes;
6893 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
6894 char eis_prop[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006895 bool eisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07006896 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006897 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07006898 uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006899 count = IS_TYPE_MAX;
6900 count = MIN(gCamCapability[cameraId]->supported_is_types_cnt, count);
6901 for (size_t i = 0; i < count; i++) {
6902 if ((gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
6903 (gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
6904 eisSupported = true;
6905 break;
6906 }
6907 }
6908 if (facingBack && eis_prop_set && eisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07006909 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
6910 }
6911 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
6912 availableVstabModes.array(), availableVstabModes.size());
6913
6914 /*HAL 1 and HAL 3 common*/
6915 uint32_t zoomSteps = gCamCapability[cameraId]->zoom_ratio_tbl_cnt;
6916 uint32_t maxZoomStep = gCamCapability[cameraId]->zoom_ratio_tbl[zoomSteps - 1];
6917 uint32_t minZoomStep = 100; //as per HAL1/API1 spec
6918 float maxZoom = maxZoomStep/minZoomStep;
6919 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
6920 &maxZoom, 1);
6921
6922 uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
6923 staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
6924
6925 int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
6926 if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
6927 max3aRegions[2] = 0; /* AF not supported */
6928 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
6929 max3aRegions, 3);
6930
6931 /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
6932 memset(prop, 0, sizeof(prop));
6933 property_get("persist.camera.facedetect", prop, "1");
6934 uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
6935 LOGD("Support face detection mode: %d",
6936 supportedFaceDetectMode);
6937
6938 int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
Thierry Strudel04e026f2016-10-10 11:27:36 -07006939 /* support mode should be OFF if max number of face is 0 */
6940 if (maxFaces <= 0) {
6941 supportedFaceDetectMode = 0;
6942 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006943 Vector<uint8_t> availableFaceDetectModes;
6944 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
6945 if (supportedFaceDetectMode == 1) {
6946 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
6947 } else if (supportedFaceDetectMode == 2) {
6948 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
6949 } else if (supportedFaceDetectMode == 3) {
6950 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
6951 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
6952 } else {
6953 maxFaces = 0;
6954 }
6955 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
6956 availableFaceDetectModes.array(),
6957 availableFaceDetectModes.size());
6958 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
6959 (int32_t *)&maxFaces, 1);
6960
6961 int32_t exposureCompensationRange[] = {
6962 gCamCapability[cameraId]->exposure_compensation_min,
6963 gCamCapability[cameraId]->exposure_compensation_max};
6964 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
6965 exposureCompensationRange,
6966 sizeof(exposureCompensationRange)/sizeof(int32_t));
6967
6968 uint8_t lensFacing = (facingBack) ?
6969 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
6970 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
6971
6972 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
6973 available_thumbnail_sizes,
6974 sizeof(available_thumbnail_sizes)/sizeof(int32_t));
6975
6976 /*all sizes will be clubbed into this tag*/
6977 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
6978 /*android.scaler.availableStreamConfigurations*/
6979 Vector<int32_t> available_stream_configs;
6980 cam_dimension_t active_array_dim;
6981 active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
6982 active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
6983 /* Add input/output stream configurations for each scalar formats*/
6984 for (size_t j = 0; j < scalar_formats_count; j++) {
6985 switch (scalar_formats[j]) {
6986 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
6987 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
6988 case HAL_PIXEL_FORMAT_RAW10:
6989 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
6990 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
6991 addStreamConfig(available_stream_configs, scalar_formats[j],
6992 gCamCapability[cameraId]->raw_dim[i],
6993 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
6994 }
6995 break;
6996 case HAL_PIXEL_FORMAT_BLOB:
6997 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
6998 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
6999 addStreamConfig(available_stream_configs, scalar_formats[j],
7000 gCamCapability[cameraId]->picture_sizes_tbl[i],
7001 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
7002 }
7003 break;
7004 case HAL_PIXEL_FORMAT_YCbCr_420_888:
7005 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
7006 default:
7007 cam_dimension_t largest_picture_size;
7008 memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
7009 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
7010 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
7011 addStreamConfig(available_stream_configs, scalar_formats[j],
7012 gCamCapability[cameraId]->picture_sizes_tbl[i],
7013 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
7014 /* Book keep largest */
7015 if (gCamCapability[cameraId]->picture_sizes_tbl[i].width
7016 >= largest_picture_size.width &&
7017 gCamCapability[cameraId]->picture_sizes_tbl[i].height
7018 >= largest_picture_size.height)
7019 largest_picture_size = gCamCapability[cameraId]->picture_sizes_tbl[i];
7020 }
7021 /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
7022 if (scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
7023 scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) {
7024 addStreamConfig(available_stream_configs, scalar_formats[j],
7025 largest_picture_size,
7026 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
7027 }
7028 break;
7029 }
7030 }
7031
7032 staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
7033 available_stream_configs.array(), available_stream_configs.size());
7034 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
7035 staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
7036
7037 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
7038 staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
7039
7040 /* android.scaler.availableMinFrameDurations */
7041 Vector<int64_t> available_min_durations;
7042 for (size_t j = 0; j < scalar_formats_count; j++) {
7043 switch (scalar_formats[j]) {
7044 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
7045 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
7046 case HAL_PIXEL_FORMAT_RAW10:
7047 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
7048 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
7049 available_min_durations.add(scalar_formats[j]);
7050 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
7051 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
7052 available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
7053 }
7054 break;
7055 default:
7056 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
7057 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
7058 available_min_durations.add(scalar_formats[j]);
7059 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
7060 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
7061 available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
7062 }
7063 break;
7064 }
7065 }
7066 staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
7067 available_min_durations.array(), available_min_durations.size());
7068
7069 Vector<int32_t> available_hfr_configs;
7070 for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
7071 int32_t fps = 0;
7072 switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
7073 case CAM_HFR_MODE_60FPS:
7074 fps = 60;
7075 break;
7076 case CAM_HFR_MODE_90FPS:
7077 fps = 90;
7078 break;
7079 case CAM_HFR_MODE_120FPS:
7080 fps = 120;
7081 break;
7082 case CAM_HFR_MODE_150FPS:
7083 fps = 150;
7084 break;
7085 case CAM_HFR_MODE_180FPS:
7086 fps = 180;
7087 break;
7088 case CAM_HFR_MODE_210FPS:
7089 fps = 210;
7090 break;
7091 case CAM_HFR_MODE_240FPS:
7092 fps = 240;
7093 break;
7094 case CAM_HFR_MODE_480FPS:
7095 fps = 480;
7096 break;
7097 case CAM_HFR_MODE_OFF:
7098 case CAM_HFR_MODE_MAX:
7099 default:
7100 break;
7101 }
7102
7103 /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
7104 if (fps >= MIN_FPS_FOR_BATCH_MODE) {
7105 /* For each HFR frame rate, need to advertise one variable fps range
7106 * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
7107 * and [120, 120]. While camcorder preview alone is running [30, 120] is
7108 * set by the app. When video recording is started, [120, 120] is
7109 * set. This way sensor configuration does not change when recording
7110 * is started */
7111
7112 /* (width, height, fps_min, fps_max, batch_size_max) */
7113 for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
7114 j < MAX_SIZES_CNT; j++) {
7115 available_hfr_configs.add(
7116 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
7117 available_hfr_configs.add(
7118 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
7119 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
7120 available_hfr_configs.add(fps);
7121 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
7122
7123 /* (width, height, fps_min, fps_max, batch_size_max) */
7124 available_hfr_configs.add(
7125 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
7126 available_hfr_configs.add(
7127 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
7128 available_hfr_configs.add(fps);
7129 available_hfr_configs.add(fps);
7130 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
7131 }
7132 }
7133 }
7134 //Advertise HFR capability only if the property is set
7135 memset(prop, 0, sizeof(prop));
7136 property_get("persist.camera.hal3hfr.enable", prop, "1");
7137 uint8_t hfrEnable = (uint8_t)atoi(prop);
7138
7139 if(hfrEnable && available_hfr_configs.array()) {
7140 staticInfo.update(
7141 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
7142 available_hfr_configs.array(), available_hfr_configs.size());
7143 }
7144
7145 int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
7146 staticInfo.update(ANDROID_JPEG_MAX_SIZE,
7147 &max_jpeg_size, 1);
7148
7149 uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
7150 size_t size = 0;
7151 count = CAM_EFFECT_MODE_MAX;
7152 count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
7153 for (size_t i = 0; i < count; i++) {
7154 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
7155 gCamCapability[cameraId]->supported_effects[i]);
7156 if (NAME_NOT_FOUND != val) {
7157 avail_effects[size] = (uint8_t)val;
7158 size++;
7159 }
7160 }
7161 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
7162 avail_effects,
7163 size);
7164
7165 uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
7166 uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
7167 size_t supported_scene_modes_cnt = 0;
7168 count = CAM_SCENE_MODE_MAX;
7169 count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
7170 for (size_t i = 0; i < count; i++) {
7171 if (gCamCapability[cameraId]->supported_scene_modes[i] !=
7172 CAM_SCENE_MODE_OFF) {
7173 int val = lookupFwkName(SCENE_MODES_MAP,
7174 METADATA_MAP_SIZE(SCENE_MODES_MAP),
7175 gCamCapability[cameraId]->supported_scene_modes[i]);
7176 if (NAME_NOT_FOUND != val) {
7177 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
7178 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
7179 supported_scene_modes_cnt++;
7180 }
7181 }
7182 }
7183 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
7184 avail_scene_modes,
7185 supported_scene_modes_cnt);
7186
7187 uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
7188 makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
7189 supported_scene_modes_cnt,
7190 CAM_SCENE_MODE_MAX,
7191 scene_mode_overrides,
7192 supported_indexes,
7193 cameraId);
7194
7195 if (supported_scene_modes_cnt == 0) {
7196 supported_scene_modes_cnt = 1;
7197 avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
7198 }
7199
7200 staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
7201 scene_mode_overrides, supported_scene_modes_cnt * 3);
7202
7203 uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
7204 ANDROID_CONTROL_MODE_AUTO,
7205 ANDROID_CONTROL_MODE_USE_SCENE_MODE};
7206 staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
7207 available_control_modes,
7208 3);
7209
7210 uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
7211 size = 0;
7212 count = CAM_ANTIBANDING_MODE_MAX;
7213 count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
7214 for (size_t i = 0; i < count; i++) {
7215 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
7216 gCamCapability[cameraId]->supported_antibandings[i]);
7217 if (NAME_NOT_FOUND != val) {
7218 avail_antibanding_modes[size] = (uint8_t)val;
7219 size++;
7220 }
7221
7222 }
7223 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
7224 avail_antibanding_modes,
7225 size);
7226
7227 uint8_t avail_abberation_modes[] = {
7228 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
7229 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
7230 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
7231 count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
7232 count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
7233 if (0 == count) {
7234 // If no aberration correction modes are available for a device, this advertise OFF mode
7235 size = 1;
7236 } else {
7237 // If count is not zero then atleast one among the FAST or HIGH quality is supported
7238 // So, advertize all 3 modes if atleast any one mode is supported as per the
7239 // new M requirement
7240 size = 3;
7241 }
7242 staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
7243 avail_abberation_modes,
7244 size);
7245
7246 uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
7247 size = 0;
7248 count = CAM_FOCUS_MODE_MAX;
7249 count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
7250 for (size_t i = 0; i < count; i++) {
7251 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
7252 gCamCapability[cameraId]->supported_focus_modes[i]);
7253 if (NAME_NOT_FOUND != val) {
7254 avail_af_modes[size] = (uint8_t)val;
7255 size++;
7256 }
7257 }
7258 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
7259 avail_af_modes,
7260 size);
7261
7262 uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
7263 size = 0;
7264 count = CAM_WB_MODE_MAX;
7265 count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
7266 for (size_t i = 0; i < count; i++) {
7267 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
7268 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
7269 gCamCapability[cameraId]->supported_white_balances[i]);
7270 if (NAME_NOT_FOUND != val) {
7271 avail_awb_modes[size] = (uint8_t)val;
7272 size++;
7273 }
7274 }
7275 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
7276 avail_awb_modes,
7277 size);
7278
7279 uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
7280 count = CAM_FLASH_FIRING_LEVEL_MAX;
7281 count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
7282 count);
7283 for (size_t i = 0; i < count; i++) {
7284 available_flash_levels[i] =
7285 gCamCapability[cameraId]->supported_firing_levels[i];
7286 }
7287 staticInfo.update(ANDROID_FLASH_FIRING_POWER,
7288 available_flash_levels, count);
7289
7290 uint8_t flashAvailable;
7291 if (gCamCapability[cameraId]->flash_available)
7292 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
7293 else
7294 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
7295 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
7296 &flashAvailable, 1);
7297
7298 Vector<uint8_t> avail_ae_modes;
7299 count = CAM_AE_MODE_MAX;
7300 count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
7301 for (size_t i = 0; i < count; i++) {
7302 avail_ae_modes.add(gCamCapability[cameraId]->supported_ae_modes[i]);
7303 }
7304 if (flashAvailable) {
7305 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
7306 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
7307 }
7308 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
7309 avail_ae_modes.array(),
7310 avail_ae_modes.size());
7311
7312 int32_t sensitivity_range[2];
7313 sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
7314 sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
7315 staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
7316 sensitivity_range,
7317 sizeof(sensitivity_range) / sizeof(int32_t));
7318
7319 staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
7320 &gCamCapability[cameraId]->max_analog_sensitivity,
7321 1);
7322
7323 int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
7324 staticInfo.update(ANDROID_SENSOR_ORIENTATION,
7325 &sensor_orientation,
7326 1);
7327
7328 int32_t max_output_streams[] = {
7329 MAX_STALLING_STREAMS,
7330 MAX_PROCESSED_STREAMS,
7331 MAX_RAW_STREAMS};
7332 staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
7333 max_output_streams,
7334 sizeof(max_output_streams)/sizeof(max_output_streams[0]));
7335
7336 uint8_t avail_leds = 0;
7337 staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
7338 &avail_leds, 0);
7339
7340 uint8_t focus_dist_calibrated;
7341 int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
7342 gCamCapability[cameraId]->focus_dist_calibrated);
7343 if (NAME_NOT_FOUND != val) {
7344 focus_dist_calibrated = (uint8_t)val;
7345 staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
7346 &focus_dist_calibrated, 1);
7347 }
7348
7349 int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
7350 size = 0;
7351 count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
7352 MAX_TEST_PATTERN_CNT);
7353 for (size_t i = 0; i < count; i++) {
7354 int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
7355 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
7356 if (NAME_NOT_FOUND != testpatternMode) {
7357 avail_testpattern_modes[size] = testpatternMode;
7358 size++;
7359 }
7360 }
7361 staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
7362 avail_testpattern_modes,
7363 size);
7364
7365 uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
7366 staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
7367 &max_pipeline_depth,
7368 1);
7369
7370 int32_t partial_result_count = PARTIAL_RESULT_COUNT;
7371 staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
7372 &partial_result_count,
7373 1);
7374
7375 int32_t max_stall_duration = MAX_REPROCESS_STALL;
7376 staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
7377
7378 Vector<uint8_t> available_capabilities;
7379 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
7380 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
7381 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
7382 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
7383 if (supportBurst) {
7384 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
7385 }
7386 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
7387 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
7388 if (hfrEnable && available_hfr_configs.array()) {
7389 available_capabilities.add(
7390 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
7391 }
7392
7393 if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
7394 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
7395 }
7396 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
7397 available_capabilities.array(),
7398 available_capabilities.size());
7399
7400 //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
7401 //Assumption is that all bayer cameras support MANUAL_SENSOR.
7402 uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
7403 ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
7404
7405 staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
7406 &aeLockAvailable, 1);
7407
7408 //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
7409 //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
7410 uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
7411 ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
7412
7413 staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
7414 &awbLockAvailable, 1);
7415
7416 int32_t max_input_streams = 1;
7417 staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
7418 &max_input_streams,
7419 1);
7420
7421 /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
7422 int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
7423 HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
7424 HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
7425 HAL_PIXEL_FORMAT_YCbCr_420_888};
7426 staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
7427 io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
7428
7429 int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
7430 staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
7431 &max_latency,
7432 1);
7433
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007434#ifndef USE_HAL_3_3
7435 int32_t isp_sensitivity_range[2];
7436 isp_sensitivity_range[0] =
7437 gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
7438 isp_sensitivity_range[1] =
7439 gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
7440 staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
7441 isp_sensitivity_range,
7442 sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
7443#endif
7444
Thierry Strudel3d639192016-09-09 11:52:26 -07007445 uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
7446 ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
7447 staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
7448 available_hot_pixel_modes,
7449 sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
7450
7451 uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
7452 ANDROID_SHADING_MODE_FAST,
7453 ANDROID_SHADING_MODE_HIGH_QUALITY};
7454 staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
7455 available_shading_modes,
7456 3);
7457
7458 uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
7459 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
7460 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
7461 available_lens_shading_map_modes,
7462 2);
7463
7464 uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
7465 ANDROID_EDGE_MODE_FAST,
7466 ANDROID_EDGE_MODE_HIGH_QUALITY,
7467 ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
7468 staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
7469 available_edge_modes,
7470 sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
7471
7472 uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
7473 ANDROID_NOISE_REDUCTION_MODE_FAST,
7474 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
7475 ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
7476 ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
7477 staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
7478 available_noise_red_modes,
7479 sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
7480
7481 uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
7482 ANDROID_TONEMAP_MODE_FAST,
7483 ANDROID_TONEMAP_MODE_HIGH_QUALITY};
7484 staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
7485 available_tonemap_modes,
7486 sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
7487
7488 uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
7489 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
7490 available_hot_pixel_map_modes,
7491 sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
7492
7493 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
7494 gCamCapability[cameraId]->reference_illuminant1);
7495 if (NAME_NOT_FOUND != val) {
7496 uint8_t fwkReferenceIlluminant = (uint8_t)val;
7497 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
7498 }
7499
7500 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
7501 gCamCapability[cameraId]->reference_illuminant2);
7502 if (NAME_NOT_FOUND != val) {
7503 uint8_t fwkReferenceIlluminant = (uint8_t)val;
7504 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
7505 }
7506
7507 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
7508 (void *)gCamCapability[cameraId]->forward_matrix1,
7509 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
7510
7511 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
7512 (void *)gCamCapability[cameraId]->forward_matrix2,
7513 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
7514
7515 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
7516 (void *)gCamCapability[cameraId]->color_transform1,
7517 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
7518
7519 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
7520 (void *)gCamCapability[cameraId]->color_transform2,
7521 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
7522
7523 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
7524 (void *)gCamCapability[cameraId]->calibration_transform1,
7525 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
7526
7527 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
7528 (void *)gCamCapability[cameraId]->calibration_transform2,
7529 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
7530
7531 int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
7532 ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
7533 ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
7534 ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
7535 ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
7536 ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
7537 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
7538 ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
7539 ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
7540 ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
7541 ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
7542 ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
7543 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
7544 ANDROID_JPEG_GPS_COORDINATES,
7545 ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
7546 ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
7547 ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
7548 ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
7549 ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
7550 ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
7551 ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
7552 ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
7553 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
7554 ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007555#ifndef USE_HAL_3_3
7556 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
7557#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007558 ANDROID_STATISTICS_FACE_DETECT_MODE,
7559 ANDROID_STATISTICS_HISTOGRAM_MODE, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
7560 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
7561 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
7562 ANDROID_BLACK_LEVEL_LOCK };
7563
7564 size_t request_keys_cnt =
7565 sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
7566 Vector<int32_t> available_request_keys;
7567 available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
7568 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
7569 available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
7570 }
7571
7572 staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
7573 available_request_keys.array(), available_request_keys.size());
7574
7575 int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
7576 ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
7577 ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
7578 ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
7579 ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
7580 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
7581 ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
7582 ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
7583 ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
7584 ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
7585 ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
7586 ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
7587 ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
7588 ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
7589 ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
7590 ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
7591 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
7592 ANDROID_STATISTICS_FACE_DETECT_MODE, ANDROID_STATISTICS_HISTOGRAM_MODE,
7593 ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
7594 ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
7595 ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007596 ANDROID_STATISTICS_FACE_SCORES,
7597#ifndef USE_HAL_3_3
7598 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
7599#endif
7600 };
7601
Thierry Strudel3d639192016-09-09 11:52:26 -07007602 size_t result_keys_cnt =
7603 sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
7604
7605 Vector<int32_t> available_result_keys;
7606 available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
7607 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
7608 available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
7609 }
7610 if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
7611 available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
7612 available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
7613 }
7614 if (supportedFaceDetectMode == 1) {
7615 available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
7616 available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
7617 } else if ((supportedFaceDetectMode == 2) ||
7618 (supportedFaceDetectMode == 3)) {
7619 available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
7620 available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
7621 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007622#ifndef USE_HAL_3_3
7623 if (hasBlackRegions) {
7624 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
7625 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
7626 }
7627#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007628 staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
7629 available_result_keys.array(), available_result_keys.size());
7630
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007631 int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
Thierry Strudel3d639192016-09-09 11:52:26 -07007632 ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
7633 ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
7634 ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
7635 ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
7636 ANDROID_SCALER_CROPPING_TYPE,
7637 ANDROID_SYNC_MAX_LATENCY,
7638 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
7639 ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
7640 ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
7641 ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
7642 ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
7643 ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
7644 ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
7645 ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
7646 ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
7647 ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
7648 ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
7649 ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
7650 ANDROID_LENS_FACING,
7651 ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
7652 ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
7653 ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
7654 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
7655 ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
7656 ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
7657 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
7658 /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
7659 ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
7660 ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
7661 ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
7662 ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
7663 ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
7664 ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
7665 ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
7666 ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
7667 ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
7668 ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
7669 ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
7670 ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
7671 ANDROID_STATISTICS_INFO_HISTOGRAM_BUCKET_COUNT,
7672 ANDROID_STATISTICS_INFO_MAX_FACE_COUNT, ANDROID_STATISTICS_INFO_MAX_HISTOGRAM_COUNT,
7673 ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
7674 ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
7675 ANDROID_EDGE_AVAILABLE_EDGE_MODES,
7676 ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
7677 ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
7678 ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
7679 ANDROID_TONEMAP_MAX_CURVE_POINTS,
7680 ANDROID_CONTROL_AVAILABLE_MODES,
7681 ANDROID_CONTROL_AE_LOCK_AVAILABLE,
7682 ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
7683 ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
7684 ANDROID_SHADING_AVAILABLE_MODES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007685 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
7686#ifndef USE_HAL_3_3
7687 ANDROID_SENSOR_OPAQUE_RAW_SIZE,
7688 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
7689#endif
7690 };
7691
7692 Vector<int32_t> available_characteristics_keys;
7693 available_characteristics_keys.appendArray(characteristics_keys_basic,
7694 sizeof(characteristics_keys_basic)/sizeof(int32_t));
7695#ifndef USE_HAL_3_3
7696 if (hasBlackRegions) {
7697 available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
7698 }
7699#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007700 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007701 available_characteristics_keys.array(),
7702 available_characteristics_keys.size());
Thierry Strudel3d639192016-09-09 11:52:26 -07007703
7704 /*available stall durations depend on the hw + sw and will be different for different devices */
7705 /*have to add for raw after implementation*/
7706 int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
7707 size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
7708
7709 Vector<int64_t> available_stall_durations;
7710 for (uint32_t j = 0; j < stall_formats_count; j++) {
7711 if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
7712 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
7713 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
7714 available_stall_durations.add(stall_formats[j]);
7715 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
7716 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
7717 available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
7718 }
7719 } else {
7720 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
7721 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
7722 available_stall_durations.add(stall_formats[j]);
7723 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
7724 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
7725 available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
7726 }
7727 }
7728 }
7729 staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
7730 available_stall_durations.array(),
7731 available_stall_durations.size());
7732
7733 //QCAMERA3_OPAQUE_RAW
7734 uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
7735 cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
7736 switch (gCamCapability[cameraId]->opaque_raw_fmt) {
7737 case LEGACY_RAW:
7738 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
7739 fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
7740 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
7741 fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
7742 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
7743 fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
7744 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
7745 break;
7746 case MIPI_RAW:
7747 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
7748 fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
7749 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
7750 fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
7751 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
7752 fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
7753 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
7754 break;
7755 default:
7756 LOGE("unknown opaque_raw_format %d",
7757 gCamCapability[cameraId]->opaque_raw_fmt);
7758 break;
7759 }
7760 staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
7761
7762 Vector<int32_t> strides;
7763 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
7764 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
7765 cam_stream_buf_plane_info_t buf_planes;
7766 strides.add(gCamCapability[cameraId]->raw_dim[i].width);
7767 strides.add(gCamCapability[cameraId]->raw_dim[i].height);
7768 mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
7769 &gCamCapability[cameraId]->padding_info, &buf_planes);
7770 strides.add(buf_planes.plane_info.mp[0].stride);
7771 }
7772 staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
7773 strides.size());
7774
Thierry Strudel04e026f2016-10-10 11:27:36 -07007775 //Video HDR default
7776 if ((gCamCapability[cameraId]->qcom_supported_feature_mask) &
7777 (CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR |
7778 CAM_QCOM_FEATURE_ZIGZAG_VIDEO_HDR | CAM_QCOM_FEATURE_SENSOR_HDR)) {
7779 int32_t vhdr_mode[] = {
7780 QCAMERA3_VIDEO_HDR_MODE_OFF,
7781 QCAMERA3_VIDEO_HDR_MODE_ON};
7782
7783 size_t vhdr_mode_count = sizeof(vhdr_mode) / sizeof(int32_t);
7784 staticInfo.update(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES,
7785 vhdr_mode, vhdr_mode_count);
7786 }
7787
Thierry Strudel3d639192016-09-09 11:52:26 -07007788 staticInfo.update(QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
7789 (const uint8_t*)&gCamCapability[cameraId]->related_cam_calibration,
7790 sizeof(gCamCapability[cameraId]->related_cam_calibration));
7791
7792 uint8_t isMonoOnly =
7793 (gCamCapability[cameraId]->color_arrangement == CAM_FILTER_ARRANGEMENT_Y);
7794 staticInfo.update(QCAMERA3_SENSOR_IS_MONO_ONLY,
7795 &isMonoOnly, 1);
7796
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007797#ifndef USE_HAL_3_3
7798 Vector<int32_t> opaque_size;
7799 for (size_t j = 0; j < scalar_formats_count; j++) {
7800 if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
7801 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
7802 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
7803 cam_stream_buf_plane_info_t buf_planes;
7804
7805 rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
7806 &gCamCapability[cameraId]->padding_info, &buf_planes);
7807
7808 if (rc == 0) {
7809 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
7810 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
7811 opaque_size.add(buf_planes.plane_info.frame_len);
7812 }else {
7813 LOGE("raw frame calculation failed!");
7814 }
7815 }
7816 }
7817 }
7818
7819 if ((opaque_size.size() > 0) &&
7820 (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
7821 staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
7822 else
7823 LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
7824#endif
7825
Thierry Strudel04e026f2016-10-10 11:27:36 -07007826 if (gCamCapability[cameraId]->supported_ir_mode_cnt > 0) {
7827 int32_t avail_ir_modes[CAM_IR_MODE_MAX];
7828 size = 0;
7829 count = CAM_IR_MODE_MAX;
7830 count = MIN(gCamCapability[cameraId]->supported_ir_mode_cnt, count);
7831 for (size_t i = 0; i < count; i++) {
7832 int val = lookupFwkName(IR_MODES_MAP, METADATA_MAP_SIZE(IR_MODES_MAP),
7833 gCamCapability[cameraId]->supported_ir_modes[i]);
7834 if (NAME_NOT_FOUND != val) {
7835 avail_ir_modes[size] = (int32_t)val;
7836 size++;
7837 }
7838 }
7839 staticInfo.update(QCAMERA3_IR_AVAILABLE_MODES,
7840 avail_ir_modes, size);
7841 }
7842
Thierry Strudel3d639192016-09-09 11:52:26 -07007843 gStaticMetadata[cameraId] = staticInfo.release();
7844 return rc;
7845}
7846
7847/*===========================================================================
7848 * FUNCTION : makeTable
7849 *
7850 * DESCRIPTION: make a table of sizes
7851 *
7852 * PARAMETERS :
7853 *
7854 *
7855 *==========================================================================*/
7856void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
7857 size_t max_size, int32_t *sizeTable)
7858{
7859 size_t j = 0;
7860 if (size > max_size) {
7861 size = max_size;
7862 }
7863 for (size_t i = 0; i < size; i++) {
7864 sizeTable[j] = dimTable[i].width;
7865 sizeTable[j+1] = dimTable[i].height;
7866 j+=2;
7867 }
7868}
7869
7870/*===========================================================================
7871 * FUNCTION : makeFPSTable
7872 *
7873 * DESCRIPTION: make a table of fps ranges
7874 *
7875 * PARAMETERS :
7876 *
7877 *==========================================================================*/
7878void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
7879 size_t max_size, int32_t *fpsRangesTable)
7880{
7881 size_t j = 0;
7882 if (size > max_size) {
7883 size = max_size;
7884 }
7885 for (size_t i = 0; i < size; i++) {
7886 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
7887 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
7888 j+=2;
7889 }
7890}
7891
7892/*===========================================================================
7893 * FUNCTION : makeOverridesList
7894 *
7895 * DESCRIPTION: make a list of scene mode overrides
7896 *
7897 * PARAMETERS :
7898 *
7899 *
7900 *==========================================================================*/
7901void QCamera3HardwareInterface::makeOverridesList(
7902 cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
7903 uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
7904{
7905 /*daemon will give a list of overrides for all scene modes.
7906 However we should send the fwk only the overrides for the scene modes
7907 supported by the framework*/
7908 size_t j = 0;
7909 if (size > max_size) {
7910 size = max_size;
7911 }
7912 size_t focus_count = CAM_FOCUS_MODE_MAX;
7913 focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
7914 focus_count);
7915 for (size_t i = 0; i < size; i++) {
7916 bool supt = false;
7917 size_t index = supported_indexes[i];
7918 overridesList[j] = gCamCapability[camera_id]->flash_available ?
7919 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
7920 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
7921 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
7922 overridesTable[index].awb_mode);
7923 if (NAME_NOT_FOUND != val) {
7924 overridesList[j+1] = (uint8_t)val;
7925 }
7926 uint8_t focus_override = overridesTable[index].af_mode;
7927 for (size_t k = 0; k < focus_count; k++) {
7928 if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
7929 supt = true;
7930 break;
7931 }
7932 }
7933 if (supt) {
7934 val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
7935 focus_override);
7936 if (NAME_NOT_FOUND != val) {
7937 overridesList[j+2] = (uint8_t)val;
7938 }
7939 } else {
7940 overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
7941 }
7942 j+=3;
7943 }
7944}
7945
7946/*===========================================================================
7947 * FUNCTION : filterJpegSizes
7948 *
7949 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
7950 * could be downscaled to
7951 *
7952 * PARAMETERS :
7953 *
7954 * RETURN : length of jpegSizes array
7955 *==========================================================================*/
7956
7957size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
7958 size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
7959 uint8_t downscale_factor)
7960{
7961 if (0 == downscale_factor) {
7962 downscale_factor = 1;
7963 }
7964
7965 int32_t min_width = active_array_size.width / downscale_factor;
7966 int32_t min_height = active_array_size.height / downscale_factor;
7967 size_t jpegSizesCnt = 0;
7968 if (processedSizesCnt > maxCount) {
7969 processedSizesCnt = maxCount;
7970 }
7971 for (size_t i = 0; i < processedSizesCnt; i+=2) {
7972 if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
7973 jpegSizes[jpegSizesCnt] = processedSizes[i];
7974 jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
7975 jpegSizesCnt += 2;
7976 }
7977 }
7978 return jpegSizesCnt;
7979}
7980
7981/*===========================================================================
7982 * FUNCTION : computeNoiseModelEntryS
7983 *
7984 * DESCRIPTION: function to map a given sensitivity to the S noise
7985 * model parameters in the DNG noise model.
7986 *
7987 * PARAMETERS : sens : the sensor sensitivity
7988 *
7989 ** RETURN : S (sensor amplification) noise
7990 *
7991 *==========================================================================*/
7992double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
7993 double s = gCamCapability[mCameraId]->gradient_S * sens +
7994 gCamCapability[mCameraId]->offset_S;
7995 return ((s < 0.0) ? 0.0 : s);
7996}
7997
7998/*===========================================================================
7999 * FUNCTION : computeNoiseModelEntryO
8000 *
8001 * DESCRIPTION: function to map a given sensitivity to the O noise
8002 * model parameters in the DNG noise model.
8003 *
8004 * PARAMETERS : sens : the sensor sensitivity
8005 *
8006 ** RETURN : O (sensor readout) noise
8007 *
8008 *==========================================================================*/
8009double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
8010 int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
8011 double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
8012 1.0 : (1.0 * sens / max_analog_sens);
8013 double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
8014 gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
8015 return ((o < 0.0) ? 0.0 : o);
8016}
8017
8018/*===========================================================================
8019 * FUNCTION : getSensorSensitivity
8020 *
8021 * DESCRIPTION: convert iso_mode to an integer value
8022 *
8023 * PARAMETERS : iso_mode : the iso_mode supported by sensor
8024 *
8025 ** RETURN : sensitivity supported by sensor
8026 *
8027 *==========================================================================*/
8028int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
8029{
8030 int32_t sensitivity;
8031
8032 switch (iso_mode) {
8033 case CAM_ISO_MODE_100:
8034 sensitivity = 100;
8035 break;
8036 case CAM_ISO_MODE_200:
8037 sensitivity = 200;
8038 break;
8039 case CAM_ISO_MODE_400:
8040 sensitivity = 400;
8041 break;
8042 case CAM_ISO_MODE_800:
8043 sensitivity = 800;
8044 break;
8045 case CAM_ISO_MODE_1600:
8046 sensitivity = 1600;
8047 break;
8048 default:
8049 sensitivity = -1;
8050 break;
8051 }
8052 return sensitivity;
8053}
8054
8055/*===========================================================================
8056 * FUNCTION : getCamInfo
8057 *
8058 * DESCRIPTION: query camera capabilities
8059 *
8060 * PARAMETERS :
8061 * @cameraId : camera Id
8062 * @info : camera info struct to be filled in with camera capabilities
8063 *
8064 * RETURN : int type of status
8065 * NO_ERROR -- success
8066 * none-zero failure code
8067 *==========================================================================*/
8068int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
8069 struct camera_info *info)
8070{
8071 ATRACE_CALL();
8072 int rc = 0;
8073
8074 pthread_mutex_lock(&gCamLock);
8075 if (NULL == gCamCapability[cameraId]) {
8076 rc = initCapabilities(cameraId);
8077 if (rc < 0) {
8078 pthread_mutex_unlock(&gCamLock);
8079 return rc;
8080 }
8081 }
8082
8083 if (NULL == gStaticMetadata[cameraId]) {
8084 rc = initStaticMetadata(cameraId);
8085 if (rc < 0) {
8086 pthread_mutex_unlock(&gCamLock);
8087 return rc;
8088 }
8089 }
8090
8091 switch(gCamCapability[cameraId]->position) {
8092 case CAM_POSITION_BACK:
8093 case CAM_POSITION_BACK_AUX:
8094 info->facing = CAMERA_FACING_BACK;
8095 break;
8096
8097 case CAM_POSITION_FRONT:
8098 case CAM_POSITION_FRONT_AUX:
8099 info->facing = CAMERA_FACING_FRONT;
8100 break;
8101
8102 default:
8103 LOGE("Unknown position type %d for camera id:%d",
8104 gCamCapability[cameraId]->position, cameraId);
8105 rc = -1;
8106 break;
8107 }
8108
8109
8110 info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008111#ifndef USE_HAL_3_3
8112 info->device_version = CAMERA_DEVICE_API_VERSION_3_4;
8113#else
Thierry Strudel3d639192016-09-09 11:52:26 -07008114 info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008115#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07008116 info->static_camera_characteristics = gStaticMetadata[cameraId];
8117
8118 //For now assume both cameras can operate independently.
8119 info->conflicting_devices = NULL;
8120 info->conflicting_devices_length = 0;
8121
8122 //resource cost is 100 * MIN(1.0, m/M),
8123 //where m is throughput requirement with maximum stream configuration
8124 //and M is CPP maximum throughput.
8125 float max_fps = 0.0;
8126 for (uint32_t i = 0;
8127 i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
8128 if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
8129 max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
8130 }
8131 float ratio = 1.0 * MAX_PROCESSED_STREAMS *
8132 gCamCapability[cameraId]->active_array_size.width *
8133 gCamCapability[cameraId]->active_array_size.height * max_fps /
8134 gCamCapability[cameraId]->max_pixel_bandwidth;
8135 info->resource_cost = 100 * MIN(1.0, ratio);
8136 LOGI("camera %d resource cost is %d", cameraId,
8137 info->resource_cost);
8138
8139 pthread_mutex_unlock(&gCamLock);
8140 return rc;
8141}
8142
8143/*===========================================================================
8144 * FUNCTION : translateCapabilityToMetadata
8145 *
8146 * DESCRIPTION: translate the capability into camera_metadata_t
8147 *
8148 * PARAMETERS : type of the request
8149 *
8150 *
8151 * RETURN : success: camera_metadata_t*
8152 * failure: NULL
8153 *
8154 *==========================================================================*/
8155camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
8156{
8157 if (mDefaultMetadata[type] != NULL) {
8158 return mDefaultMetadata[type];
8159 }
8160 //first time we are handling this request
8161 //fill up the metadata structure using the wrapper class
8162 CameraMetadata settings;
8163 //translate from cam_capability_t to camera_metadata_tag_t
8164 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
8165 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
8166 int32_t defaultRequestID = 0;
8167 settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
8168
8169 /* OIS disable */
8170 char ois_prop[PROPERTY_VALUE_MAX];
8171 memset(ois_prop, 0, sizeof(ois_prop));
8172 property_get("persist.camera.ois.disable", ois_prop, "0");
8173 uint8_t ois_disable = (uint8_t)atoi(ois_prop);
8174
8175 /* Force video to use OIS */
8176 char videoOisProp[PROPERTY_VALUE_MAX];
8177 memset(videoOisProp, 0, sizeof(videoOisProp));
8178 property_get("persist.camera.ois.video", videoOisProp, "1");
8179 uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
Thierry Strudel3d639192016-09-09 11:52:26 -07008180 uint8_t controlIntent = 0;
8181 uint8_t focusMode;
8182 uint8_t vsMode;
8183 uint8_t optStabMode;
8184 uint8_t cacMode;
8185 uint8_t edge_mode;
8186 uint8_t noise_red_mode;
8187 uint8_t tonemap_mode;
8188 bool highQualityModeEntryAvailable = FALSE;
8189 bool fastModeEntryAvailable = FALSE;
8190 vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
8191 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
8192 switch (type) {
8193 case CAMERA3_TEMPLATE_PREVIEW:
8194 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
8195 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
8196 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
8197 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
8198 edge_mode = ANDROID_EDGE_MODE_FAST;
8199 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
8200 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
8201 break;
8202 case CAMERA3_TEMPLATE_STILL_CAPTURE:
8203 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
8204 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
8205 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
8206 edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
8207 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
8208 tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
8209 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
8210 // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
8211 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
8212 if (gCamCapability[mCameraId]->aberration_modes[i] ==
8213 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
8214 highQualityModeEntryAvailable = TRUE;
8215 } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
8216 CAM_COLOR_CORRECTION_ABERRATION_FAST) {
8217 fastModeEntryAvailable = TRUE;
8218 }
8219 }
8220 if (highQualityModeEntryAvailable) {
8221 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
8222 } else if (fastModeEntryAvailable) {
8223 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
8224 }
8225 break;
8226 case CAMERA3_TEMPLATE_VIDEO_RECORD:
8227 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
8228 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
8229 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -07008230 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
8231 edge_mode = ANDROID_EDGE_MODE_FAST;
8232 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
8233 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
8234 if (forceVideoOis)
8235 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
8236 break;
8237 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
8238 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
8239 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
8240 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -07008241 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
8242 edge_mode = ANDROID_EDGE_MODE_FAST;
8243 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
8244 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
8245 if (forceVideoOis)
8246 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
8247 break;
8248 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
8249 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
8250 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
8251 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
8252 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
8253 edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
8254 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
8255 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
8256 break;
8257 case CAMERA3_TEMPLATE_MANUAL:
8258 edge_mode = ANDROID_EDGE_MODE_FAST;
8259 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
8260 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
8261 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
8262 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
8263 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
8264 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
8265 break;
8266 default:
8267 edge_mode = ANDROID_EDGE_MODE_FAST;
8268 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
8269 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
8270 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
8271 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
8272 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
8273 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
8274 break;
8275 }
Thierry Strudel04e026f2016-10-10 11:27:36 -07008276 // Set CAC to OFF if underlying device doesn't support
8277 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
8278 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
8279 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008280 settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
8281 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
8282 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
8283 if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
8284 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
8285 }
8286 settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
8287
8288 if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
8289 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
8290 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
8291 else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
8292 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
8293 || ois_disable)
8294 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
8295 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
8296
8297 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
8298 &gCamCapability[mCameraId]->exposure_compensation_default, 1);
8299
8300 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
8301 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
8302
8303 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
8304 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
8305
8306 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
8307 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
8308
8309 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
8310 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
8311
8312 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
8313 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
8314
8315 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
8316 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
8317
8318 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
8319 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
8320
8321 /*flash*/
8322 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
8323 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
8324
8325 static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
8326 settings.update(ANDROID_FLASH_FIRING_POWER,
8327 &flashFiringLevel, 1);
8328
8329 /* lens */
8330 float default_aperture = gCamCapability[mCameraId]->apertures[0];
8331 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
8332
8333 if (gCamCapability[mCameraId]->filter_densities_count) {
8334 float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
8335 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
8336 gCamCapability[mCameraId]->filter_densities_count);
8337 }
8338
8339 float default_focal_length = gCamCapability[mCameraId]->focal_length;
8340 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
8341
8342 if (focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
8343 float default_focus_distance = 0;
8344 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &default_focus_distance, 1);
8345 }
8346
8347 static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
8348 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
8349
8350 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
8351 settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
8352
8353 static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
8354 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
8355
8356 /* face detection (default to OFF) */
8357 static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
8358 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
8359
8360 static const uint8_t histogramMode = ANDROID_STATISTICS_HISTOGRAM_MODE_OFF;
8361 settings.update(ANDROID_STATISTICS_HISTOGRAM_MODE, &histogramMode, 1);
8362
8363 static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
8364 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
8365
8366 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
8367 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
8368
8369 static const uint8_t lensShadingMode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
8370 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &lensShadingMode, 1);
8371
8372 static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
8373 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
8374
8375 /* Exposure time(Update the Min Exposure Time)*/
8376 int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
8377 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
8378
8379 /* frame duration */
8380 static const int64_t default_frame_duration = NSEC_PER_33MSEC;
8381 settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
8382
8383 /* sensitivity */
8384 static const int32_t default_sensitivity = 100;
8385 settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008386#ifndef USE_HAL_3_3
8387 static const int32_t default_isp_sensitivity =
8388 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
8389 settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
8390#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07008391
8392 /*edge mode*/
8393 settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
8394
8395 /*noise reduction mode*/
8396 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
8397
8398 /*color correction mode*/
8399 static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
8400 settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
8401
8402 /*transform matrix mode*/
8403 settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
8404
8405 int32_t scaler_crop_region[4];
8406 scaler_crop_region[0] = 0;
8407 scaler_crop_region[1] = 0;
8408 scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
8409 scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
8410 settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
8411
8412 static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
8413 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
8414
8415 /*focus distance*/
8416 float focus_distance = 0.0;
8417 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
8418
8419 /*target fps range: use maximum range for picture, and maximum fixed range for video*/
8420 float max_range = 0.0;
8421 float max_fixed_fps = 0.0;
8422 int32_t fps_range[2] = {0, 0};
8423 for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
8424 i++) {
8425 float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
8426 gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
8427 if (type == CAMERA3_TEMPLATE_PREVIEW ||
8428 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
8429 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
8430 if (range > max_range) {
8431 fps_range[0] =
8432 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
8433 fps_range[1] =
8434 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
8435 max_range = range;
8436 }
8437 } else {
8438 if (range < 0.01 && max_fixed_fps <
8439 gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
8440 fps_range[0] =
8441 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
8442 fps_range[1] =
8443 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
8444 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
8445 }
8446 }
8447 }
8448 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
8449
8450 /*precapture trigger*/
8451 uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
8452 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
8453
8454 /*af trigger*/
8455 uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
8456 settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
8457
8458 /* ae & af regions */
8459 int32_t active_region[] = {
8460 gCamCapability[mCameraId]->active_array_size.left,
8461 gCamCapability[mCameraId]->active_array_size.top,
8462 gCamCapability[mCameraId]->active_array_size.left +
8463 gCamCapability[mCameraId]->active_array_size.width,
8464 gCamCapability[mCameraId]->active_array_size.top +
8465 gCamCapability[mCameraId]->active_array_size.height,
8466 0};
8467 settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
8468 sizeof(active_region) / sizeof(active_region[0]));
8469 settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
8470 sizeof(active_region) / sizeof(active_region[0]));
8471
8472 /* black level lock */
8473 uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
8474 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
8475
8476 /* lens shading map mode */
8477 uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
8478 if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
8479 shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
8480 }
8481 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
8482
8483 //special defaults for manual template
8484 if (type == CAMERA3_TEMPLATE_MANUAL) {
8485 static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
8486 settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
8487
8488 static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
8489 settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
8490
8491 static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
8492 settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
8493
8494 static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
8495 settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
8496
8497 static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
8498 settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
8499
8500 static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
8501 settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
8502 }
8503
8504
8505 /* TNR
8506 * We'll use this location to determine which modes TNR will be set.
8507 * We will enable TNR to be on if either of the Preview/Video stream requires TNR
8508 * This is not to be confused with linking on a per stream basis that decision
8509 * is still on per-session basis and will be handled as part of config stream
8510 */
8511 uint8_t tnr_enable = 0;
8512
8513 if (m_bTnrPreview || m_bTnrVideo) {
8514
8515 switch (type) {
8516 case CAMERA3_TEMPLATE_VIDEO_RECORD:
8517 tnr_enable = 1;
8518 break;
8519
8520 default:
8521 tnr_enable = 0;
8522 break;
8523 }
8524
8525 int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
8526 settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
8527 settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
8528
8529 LOGD("TNR:%d with process plate %d for template:%d",
8530 tnr_enable, tnr_process_type, type);
8531 }
8532
8533 //Update Link tags to default
8534 int32_t sync_type = CAM_TYPE_STANDALONE;
8535 settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
8536
8537 int32_t is_main = 0; //this doesn't matter as app should overwrite
8538 settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
8539
8540 settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &is_main, 1);
8541
8542 /* CDS default */
8543 char prop[PROPERTY_VALUE_MAX];
8544 memset(prop, 0, sizeof(prop));
8545 property_get("persist.camera.CDS", prop, "Auto");
8546 cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
8547 cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
8548 if (CAM_CDS_MODE_MAX == cds_mode) {
8549 cds_mode = CAM_CDS_MODE_AUTO;
8550 }
8551
8552 /* Disabling CDS in templates which have TNR enabled*/
8553 if (tnr_enable)
8554 cds_mode = CAM_CDS_MODE_OFF;
8555
8556 int32_t mode = cds_mode;
8557 settings.update(QCAMERA3_CDS_MODE, &mode, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -07008558
8559 int32_t hdr_mode = (int32_t)QCAMERA3_VIDEO_HDR_MODE_OFF;
8560 settings.update(QCAMERA3_VIDEO_HDR_MODE, &hdr_mode, 1);
8561
8562 /* IR Mode Default Off */
8563 int32_t ir_mode = (int32_t)QCAMERA3_IR_MODE_OFF;
8564 settings.update(QCAMERA3_IR_MODE, &ir_mode, 1);
8565
Thierry Strudel3d639192016-09-09 11:52:26 -07008566 mDefaultMetadata[type] = settings.release();
8567
8568 return mDefaultMetadata[type];
8569}
8570
8571/*===========================================================================
8572 * FUNCTION : setFrameParameters
8573 *
8574 * DESCRIPTION: set parameters per frame as requested in the metadata from
8575 * framework
8576 *
8577 * PARAMETERS :
8578 * @request : request that needs to be serviced
8579 * @streamID : Stream ID of all the requested streams
8580 * @blob_request: Whether this request is a blob request or not
8581 *
8582 * RETURN : success: NO_ERROR
8583 * failure:
8584 *==========================================================================*/
8585int QCamera3HardwareInterface::setFrameParameters(
8586 camera3_capture_request_t *request,
8587 cam_stream_ID_t streamID,
8588 int blob_request,
8589 uint32_t snapshotStreamId)
8590{
8591 /*translate from camera_metadata_t type to parm_type_t*/
8592 int rc = 0;
8593 int32_t hal_version = CAM_HAL_V3;
8594
8595 clear_metadata_buffer(mParameters);
8596 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
8597 LOGE("Failed to set hal version in the parameters");
8598 return BAD_VALUE;
8599 }
8600
8601 /*we need to update the frame number in the parameters*/
8602 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
8603 request->frame_number)) {
8604 LOGE("Failed to set the frame number in the parameters");
8605 return BAD_VALUE;
8606 }
8607
8608 /* Update stream id of all the requested buffers */
8609 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamID)) {
8610 LOGE("Failed to set stream type mask in the parameters");
8611 return BAD_VALUE;
8612 }
8613
8614 if (mUpdateDebugLevel) {
8615 uint32_t dummyDebugLevel = 0;
8616 /* The value of dummyDebugLevel is irrelavent. On
8617 * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
8618 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
8619 dummyDebugLevel)) {
8620 LOGE("Failed to set UPDATE_DEBUG_LEVEL");
8621 return BAD_VALUE;
8622 }
8623 mUpdateDebugLevel = false;
8624 }
8625
8626 if(request->settings != NULL){
8627 rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
8628 if (blob_request)
8629 memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
8630 }
8631
8632 return rc;
8633}
8634
8635/*===========================================================================
8636 * FUNCTION : setReprocParameters
8637 *
8638 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
8639 * return it.
8640 *
8641 * PARAMETERS :
8642 * @request : request that needs to be serviced
8643 *
8644 * RETURN : success: NO_ERROR
8645 * failure:
8646 *==========================================================================*/
8647int32_t QCamera3HardwareInterface::setReprocParameters(
8648 camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
8649 uint32_t snapshotStreamId)
8650{
8651 /*translate from camera_metadata_t type to parm_type_t*/
8652 int rc = 0;
8653
8654 if (NULL == request->settings){
8655 LOGE("Reprocess settings cannot be NULL");
8656 return BAD_VALUE;
8657 }
8658
8659 if (NULL == reprocParam) {
8660 LOGE("Invalid reprocessing metadata buffer");
8661 return BAD_VALUE;
8662 }
8663 clear_metadata_buffer(reprocParam);
8664
8665 /*we need to update the frame number in the parameters*/
8666 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
8667 request->frame_number)) {
8668 LOGE("Failed to set the frame number in the parameters");
8669 return BAD_VALUE;
8670 }
8671
8672 rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
8673 if (rc < 0) {
8674 LOGE("Failed to translate reproc request");
8675 return rc;
8676 }
8677
8678 CameraMetadata frame_settings;
8679 frame_settings = request->settings;
8680 if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
8681 frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
8682 int32_t *crop_count =
8683 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
8684 int32_t *crop_data =
8685 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
8686 int32_t *roi_map =
8687 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
8688 if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
8689 cam_crop_data_t crop_meta;
8690 memset(&crop_meta, 0, sizeof(cam_crop_data_t));
8691 crop_meta.num_of_streams = 1;
8692 crop_meta.crop_info[0].crop.left = crop_data[0];
8693 crop_meta.crop_info[0].crop.top = crop_data[1];
8694 crop_meta.crop_info[0].crop.width = crop_data[2];
8695 crop_meta.crop_info[0].crop.height = crop_data[3];
8696
8697 crop_meta.crop_info[0].roi_map.left =
8698 roi_map[0];
8699 crop_meta.crop_info[0].roi_map.top =
8700 roi_map[1];
8701 crop_meta.crop_info[0].roi_map.width =
8702 roi_map[2];
8703 crop_meta.crop_info[0].roi_map.height =
8704 roi_map[3];
8705
8706 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
8707 rc = BAD_VALUE;
8708 }
8709 LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
8710 request->input_buffer->stream,
8711 crop_meta.crop_info[0].crop.left,
8712 crop_meta.crop_info[0].crop.top,
8713 crop_meta.crop_info[0].crop.width,
8714 crop_meta.crop_info[0].crop.height);
8715 LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
8716 request->input_buffer->stream,
8717 crop_meta.crop_info[0].roi_map.left,
8718 crop_meta.crop_info[0].roi_map.top,
8719 crop_meta.crop_info[0].roi_map.width,
8720 crop_meta.crop_info[0].roi_map.height);
8721 } else {
8722 LOGE("Invalid reprocess crop count %d!", *crop_count);
8723 }
8724 } else {
8725 LOGE("No crop data from matching output stream");
8726 }
8727
8728 /* These settings are not needed for regular requests so handle them specially for
8729 reprocess requests; information needed for EXIF tags */
8730 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
8731 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
8732 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
8733 if (NAME_NOT_FOUND != val) {
8734 uint32_t flashMode = (uint32_t)val;
8735 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
8736 rc = BAD_VALUE;
8737 }
8738 } else {
8739 LOGE("Could not map fwk flash mode %d to correct hal flash mode",
8740 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
8741 }
8742 } else {
8743 LOGH("No flash mode in reprocess settings");
8744 }
8745
8746 if (frame_settings.exists(ANDROID_FLASH_STATE)) {
8747 int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
8748 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
8749 rc = BAD_VALUE;
8750 }
8751 } else {
8752 LOGH("No flash state in reprocess settings");
8753 }
8754
8755 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS)) {
8756 uint8_t *reprocessFlags =
8757 frame_settings.find(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS).data.u8;
8758 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_REPROCESS_FLAGS,
8759 *reprocessFlags)) {
8760 rc = BAD_VALUE;
8761 }
8762 }
8763
8764 // Add metadata which DDM needs
8765 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_DDM_DATA_BLOB)) {
8766 cam_ddm_info_t *ddm_info =
8767 (cam_ddm_info_t *)frame_settings.find
8768 (QCAMERA3_HAL_PRIVATEDATA_DDM_DATA_BLOB).data.u8;
8769 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_SENSOR,
8770 ddm_info->sensor_crop_info);
8771 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CAMIF,
8772 ddm_info->camif_crop_info);
8773 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_ISP,
8774 ddm_info->isp_crop_info);
8775 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CPP,
8776 ddm_info->cpp_crop_info);
8777 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_FOCAL_LENGTH_RATIO,
8778 ddm_info->af_focal_length_ratio);
8779 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_FLIP,
8780 ddm_info->pipeline_flip);
8781 /* If there is ANDROID_JPEG_ORIENTATION in frame setting,
8782 CAM_INTF_PARM_ROTATION metadata then has been added in
8783 translateToHalMetadata. HAL need to keep this new rotation
8784 metadata. Otherwise, the old rotation info saved in the vendor tag
8785 would be used */
8786 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
8787 CAM_INTF_PARM_ROTATION, reprocParam) {
8788 LOGD("CAM_INTF_PARM_ROTATION metadata is added in translateToHalMetadata");
8789 } else {
8790 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_ROTATION,
8791 ddm_info->rotation_info);
8792 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008793 }
8794
8795 /* Add additional JPEG cropping information. App add QCAMERA3_JPEG_ENCODE_CROP_RECT
8796 to ask for cropping and use ROI for downscale/upscale during HW JPEG encoding.
8797 roi.width and roi.height would be the final JPEG size.
8798 For now, HAL only checks this for reprocess request */
8799 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ENABLE) &&
8800 frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_RECT)) {
8801 uint8_t *enable =
8802 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ENABLE).data.u8;
8803 if (*enable == TRUE) {
8804 int32_t *crop_data =
8805 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_RECT).data.i32;
8806 cam_stream_crop_info_t crop_meta;
8807 memset(&crop_meta, 0, sizeof(cam_stream_crop_info_t));
8808 crop_meta.stream_id = 0;
8809 crop_meta.crop.left = crop_data[0];
8810 crop_meta.crop.top = crop_data[1];
8811 crop_meta.crop.width = crop_data[2];
8812 crop_meta.crop.height = crop_data[3];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008813 // The JPEG crop roi should match cpp output size
8814 IF_META_AVAILABLE(cam_stream_crop_info_t, cpp_crop,
8815 CAM_INTF_META_SNAP_CROP_INFO_CPP, reprocParam) {
8816 crop_meta.roi_map.left = 0;
8817 crop_meta.roi_map.top = 0;
8818 crop_meta.roi_map.width = cpp_crop->crop.width;
8819 crop_meta.roi_map.height = cpp_crop->crop.height;
Thierry Strudel3d639192016-09-09 11:52:26 -07008820 }
8821 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_ENCODE_CROP,
8822 crop_meta);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008823 LOGH("Add JPEG encode crop left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -07008824 crop_meta.crop.left, crop_meta.crop.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008825 crop_meta.crop.width, crop_meta.crop.height, mCameraId);
8826 LOGH("Add JPEG encode crop ROI left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -07008827 crop_meta.roi_map.left, crop_meta.roi_map.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008828 crop_meta.roi_map.width, crop_meta.roi_map.height, mCameraId);
8829
8830 // Add JPEG scale information
8831 cam_dimension_t scale_dim;
8832 memset(&scale_dim, 0, sizeof(cam_dimension_t));
8833 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ROI)) {
8834 int32_t *roi =
8835 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ROI).data.i32;
8836 scale_dim.width = roi[2];
8837 scale_dim.height = roi[3];
8838 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_SCALE_DIMENSION,
8839 scale_dim);
8840 LOGH("Add JPEG encode scale width %d, height %d, mCameraId %d",
8841 scale_dim.width, scale_dim.height, mCameraId);
8842 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008843 }
8844 }
8845
8846 return rc;
8847}
8848
8849/*===========================================================================
8850 * FUNCTION : saveRequestSettings
8851 *
8852 * DESCRIPTION: Add any settings that might have changed to the request settings
8853 * and save the settings to be applied on the frame
8854 *
8855 * PARAMETERS :
8856 * @jpegMetadata : the extracted and/or modified jpeg metadata
8857 * @request : request with initial settings
8858 *
8859 * RETURN :
8860 * camera_metadata_t* : pointer to the saved request settings
8861 *==========================================================================*/
8862camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
8863 const CameraMetadata &jpegMetadata,
8864 camera3_capture_request_t *request)
8865{
8866 camera_metadata_t *resultMetadata;
8867 CameraMetadata camMetadata;
8868 camMetadata = request->settings;
8869
8870 if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
8871 int32_t thumbnail_size[2];
8872 thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
8873 thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
8874 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
8875 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
8876 }
8877
8878 if (request->input_buffer != NULL) {
8879 uint8_t reprocessFlags = 1;
8880 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
8881 (uint8_t*)&reprocessFlags,
8882 sizeof(reprocessFlags));
8883 }
8884
8885 resultMetadata = camMetadata.release();
8886 return resultMetadata;
8887}
8888
8889/*===========================================================================
8890 * FUNCTION : setHalFpsRange
8891 *
8892 * DESCRIPTION: set FPS range parameter
8893 *
8894 *
8895 * PARAMETERS :
8896 * @settings : Metadata from framework
8897 * @hal_metadata: Metadata buffer
8898 *
8899 *
8900 * RETURN : success: NO_ERROR
8901 * failure:
8902 *==========================================================================*/
8903int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
8904 metadata_buffer_t *hal_metadata)
8905{
8906 int32_t rc = NO_ERROR;
8907 cam_fps_range_t fps_range;
8908 fps_range.min_fps = (float)
8909 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
8910 fps_range.max_fps = (float)
8911 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
8912 fps_range.video_min_fps = fps_range.min_fps;
8913 fps_range.video_max_fps = fps_range.max_fps;
8914
8915 LOGD("aeTargetFpsRange fps: [%f %f]",
8916 fps_range.min_fps, fps_range.max_fps);
8917 /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
8918 * follows:
8919 * ---------------------------------------------------------------|
8920 * Video stream is absent in configure_streams |
8921 * (Camcorder preview before the first video record |
8922 * ---------------------------------------------------------------|
8923 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
8924 * | | | vid_min/max_fps|
8925 * ---------------------------------------------------------------|
8926 * NO | [ 30, 240] | 240 | [240, 240] |
8927 * |-------------|-------------|----------------|
8928 * | [240, 240] | 240 | [240, 240] |
8929 * ---------------------------------------------------------------|
8930 * Video stream is present in configure_streams |
8931 * ---------------------------------------------------------------|
8932 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
8933 * | | | vid_min/max_fps|
8934 * ---------------------------------------------------------------|
8935 * NO | [ 30, 240] | 240 | [240, 240] |
8936 * (camcorder prev |-------------|-------------|----------------|
8937 * after video rec | [240, 240] | 240 | [240, 240] |
8938 * is stopped) | | | |
8939 * ---------------------------------------------------------------|
8940 * YES | [ 30, 240] | 240 | [240, 240] |
8941 * |-------------|-------------|----------------|
8942 * | [240, 240] | 240 | [240, 240] |
8943 * ---------------------------------------------------------------|
8944 * When Video stream is absent in configure_streams,
8945 * preview fps = sensor_fps / batchsize
8946 * Eg: for 240fps at batchSize 4, preview = 60fps
8947 * for 120fps at batchSize 4, preview = 30fps
8948 *
8949 * When video stream is present in configure_streams, preview fps is as per
8950 * the ratio of preview buffers to video buffers requested in process
8951 * capture request
8952 */
8953 mBatchSize = 0;
8954 if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
8955 fps_range.min_fps = fps_range.video_max_fps;
8956 fps_range.video_min_fps = fps_range.video_max_fps;
8957 int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
8958 fps_range.max_fps);
8959 if (NAME_NOT_FOUND != val) {
8960 cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
8961 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
8962 return BAD_VALUE;
8963 }
8964
8965 if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
8966 /* If batchmode is currently in progress and the fps changes,
8967 * set the flag to restart the sensor */
8968 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
8969 (mHFRVideoFps != fps_range.max_fps)) {
8970 mNeedSensorRestart = true;
8971 }
8972 mHFRVideoFps = fps_range.max_fps;
8973 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
8974 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
8975 mBatchSize = MAX_HFR_BATCH_SIZE;
8976 }
8977 }
8978 LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
8979
8980 }
8981 } else {
8982 /* HFR mode is session param in backend/ISP. This should be reset when
8983 * in non-HFR mode */
8984 cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
8985 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
8986 return BAD_VALUE;
8987 }
8988 }
8989 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
8990 return BAD_VALUE;
8991 }
8992 LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
8993 fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
8994 return rc;
8995}
8996
8997/*===========================================================================
8998 * FUNCTION : translateToHalMetadata
8999 *
9000 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
9001 *
9002 *
9003 * PARAMETERS :
9004 * @request : request sent from framework
9005 *
9006 *
9007 * RETURN : success: NO_ERROR
9008 * failure:
9009 *==========================================================================*/
9010int QCamera3HardwareInterface::translateToHalMetadata
9011 (const camera3_capture_request_t *request,
9012 metadata_buffer_t *hal_metadata,
9013 uint32_t snapshotStreamId)
9014{
9015 int rc = 0;
9016 CameraMetadata frame_settings;
9017 frame_settings = request->settings;
9018
9019 /* Do not change the order of the following list unless you know what you are
9020 * doing.
9021 * The order is laid out in such a way that parameters in the front of the table
9022 * may be used to override the parameters later in the table. Examples are:
9023 * 1. META_MODE should precede AEC/AWB/AF MODE
9024 * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
9025 * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
9026 * 4. Any mode should precede it's corresponding settings
9027 */
9028 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
9029 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
9030 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
9031 rc = BAD_VALUE;
9032 }
9033 rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
9034 if (rc != NO_ERROR) {
9035 LOGE("extractSceneMode failed");
9036 }
9037 }
9038
9039 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
9040 uint8_t fwk_aeMode =
9041 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
9042 uint8_t aeMode;
9043 int32_t redeye;
9044
9045 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
9046 aeMode = CAM_AE_MODE_OFF;
9047 } else {
9048 aeMode = CAM_AE_MODE_ON;
9049 }
9050 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
9051 redeye = 1;
9052 } else {
9053 redeye = 0;
9054 }
9055
9056 int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
9057 fwk_aeMode);
9058 if (NAME_NOT_FOUND != val) {
9059 int32_t flashMode = (int32_t)val;
9060 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
9061 }
9062
9063 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
9064 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
9065 rc = BAD_VALUE;
9066 }
9067 }
9068
9069 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
9070 uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
9071 int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
9072 fwk_whiteLevel);
9073 if (NAME_NOT_FOUND != val) {
9074 uint8_t whiteLevel = (uint8_t)val;
9075 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
9076 rc = BAD_VALUE;
9077 }
9078 }
9079 }
9080
9081 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
9082 uint8_t fwk_cacMode =
9083 frame_settings.find(
9084 ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
9085 int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
9086 fwk_cacMode);
9087 if (NAME_NOT_FOUND != val) {
9088 cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
9089 bool entryAvailable = FALSE;
9090 // Check whether Frameworks set CAC mode is supported in device or not
9091 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
9092 if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
9093 entryAvailable = TRUE;
9094 break;
9095 }
9096 }
9097 LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
9098 // If entry not found then set the device supported mode instead of frameworks mode i.e,
9099 // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
9100 // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
9101 if (entryAvailable == FALSE) {
9102 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
9103 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
9104 } else {
9105 if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
9106 // High is not supported and so set the FAST as spec say's underlying
9107 // device implementation can be the same for both modes.
9108 cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
9109 } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
9110 // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
9111 // in order to avoid the fps drop due to high quality
9112 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
9113 } else {
9114 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
9115 }
9116 }
9117 }
9118 LOGD("Final cacMode is %d", cacMode);
9119 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
9120 rc = BAD_VALUE;
9121 }
9122 } else {
9123 LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
9124 }
9125 }
9126
9127 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
9128 uint8_t fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
9129 int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
9130 fwk_focusMode);
9131 if (NAME_NOT_FOUND != val) {
9132 uint8_t focusMode = (uint8_t)val;
9133 LOGD("set focus mode %d", focusMode);
9134 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
9135 rc = BAD_VALUE;
9136 }
9137 }
9138 }
9139
9140 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE)) {
9141 float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
9142 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
9143 focalDistance)) {
9144 rc = BAD_VALUE;
9145 }
9146 }
9147
9148 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
9149 uint8_t fwk_antibandingMode =
9150 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
9151 int val = lookupHalName(ANTIBANDING_MODES_MAP,
9152 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
9153 if (NAME_NOT_FOUND != val) {
9154 uint32_t hal_antibandingMode = (uint32_t)val;
9155 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
9156 hal_antibandingMode)) {
9157 rc = BAD_VALUE;
9158 }
9159 }
9160 }
9161
9162 if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
9163 int32_t expCompensation = frame_settings.find(
9164 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
9165 if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
9166 expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
9167 if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
9168 expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
9169 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
9170 expCompensation)) {
9171 rc = BAD_VALUE;
9172 }
9173 }
9174
9175 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
9176 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
9177 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
9178 rc = BAD_VALUE;
9179 }
9180 }
9181 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
9182 rc = setHalFpsRange(frame_settings, hal_metadata);
9183 if (rc != NO_ERROR) {
9184 LOGE("setHalFpsRange failed");
9185 }
9186 }
9187
9188 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
9189 uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
9190 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
9191 rc = BAD_VALUE;
9192 }
9193 }
9194
9195 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
9196 uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
9197 int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
9198 fwk_effectMode);
9199 if (NAME_NOT_FOUND != val) {
9200 uint8_t effectMode = (uint8_t)val;
9201 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
9202 rc = BAD_VALUE;
9203 }
9204 }
9205 }
9206
9207 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
9208 uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
9209 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
9210 colorCorrectMode)) {
9211 rc = BAD_VALUE;
9212 }
9213 }
9214
9215 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
9216 cam_color_correct_gains_t colorCorrectGains;
9217 for (size_t i = 0; i < CC_GAIN_MAX; i++) {
9218 colorCorrectGains.gains[i] =
9219 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
9220 }
9221 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
9222 colorCorrectGains)) {
9223 rc = BAD_VALUE;
9224 }
9225 }
9226
9227 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
9228 cam_color_correct_matrix_t colorCorrectTransform;
9229 cam_rational_type_t transform_elem;
9230 size_t num = 0;
9231 for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
9232 for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
9233 transform_elem.numerator =
9234 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
9235 transform_elem.denominator =
9236 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
9237 colorCorrectTransform.transform_matrix[i][j] = transform_elem;
9238 num++;
9239 }
9240 }
9241 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
9242 colorCorrectTransform)) {
9243 rc = BAD_VALUE;
9244 }
9245 }
9246
9247 cam_trigger_t aecTrigger;
9248 aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
9249 aecTrigger.trigger_id = -1;
9250 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
9251 frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
9252 aecTrigger.trigger =
9253 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
9254 aecTrigger.trigger_id =
9255 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
9256 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
9257 aecTrigger)) {
9258 rc = BAD_VALUE;
9259 }
9260 LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
9261 aecTrigger.trigger, aecTrigger.trigger_id);
9262 }
9263
9264 /*af_trigger must come with a trigger id*/
9265 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
9266 frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
9267 cam_trigger_t af_trigger;
9268 af_trigger.trigger =
9269 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
9270 af_trigger.trigger_id =
9271 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
9272 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
9273 rc = BAD_VALUE;
9274 }
9275 LOGD("AfTrigger: %d AfTriggerID: %d",
9276 af_trigger.trigger, af_trigger.trigger_id);
9277 }
9278
9279 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
9280 int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
9281 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
9282 rc = BAD_VALUE;
9283 }
9284 }
9285 if (frame_settings.exists(ANDROID_EDGE_MODE)) {
9286 cam_edge_application_t edge_application;
9287 edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
9288 if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
9289 edge_application.sharpness = 0;
9290 } else {
9291 edge_application.sharpness = gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
9292 }
9293 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
9294 rc = BAD_VALUE;
9295 }
9296 }
9297
9298 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
9299 int32_t respectFlashMode = 1;
9300 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
9301 uint8_t fwk_aeMode =
9302 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
9303 if (fwk_aeMode > ANDROID_CONTROL_AE_MODE_ON) {
9304 respectFlashMode = 0;
9305 LOGH("AE Mode controls flash, ignore android.flash.mode");
9306 }
9307 }
9308 if (respectFlashMode) {
9309 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
9310 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
9311 LOGH("flash mode after mapping %d", val);
9312 // To check: CAM_INTF_META_FLASH_MODE usage
9313 if (NAME_NOT_FOUND != val) {
9314 uint8_t flashMode = (uint8_t)val;
9315 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
9316 rc = BAD_VALUE;
9317 }
9318 }
9319 }
9320 }
9321
9322 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
9323 uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
9324 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
9325 rc = BAD_VALUE;
9326 }
9327 }
9328
9329 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
9330 int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
9331 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
9332 flashFiringTime)) {
9333 rc = BAD_VALUE;
9334 }
9335 }
9336
9337 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
9338 uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
9339 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
9340 hotPixelMode)) {
9341 rc = BAD_VALUE;
9342 }
9343 }
9344
9345 if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
9346 float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
9347 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
9348 lensAperture)) {
9349 rc = BAD_VALUE;
9350 }
9351 }
9352
9353 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
9354 float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
9355 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
9356 filterDensity)) {
9357 rc = BAD_VALUE;
9358 }
9359 }
9360
9361 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
9362 float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
9363 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
9364 focalLength)) {
9365 rc = BAD_VALUE;
9366 }
9367 }
9368
9369 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
9370 uint8_t optStabMode =
9371 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
9372 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
9373 optStabMode)) {
9374 rc = BAD_VALUE;
9375 }
9376 }
9377
9378 if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
9379 uint8_t videoStabMode =
9380 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
9381 LOGD("videoStabMode from APP = %d", videoStabMode);
9382 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
9383 videoStabMode)) {
9384 rc = BAD_VALUE;
9385 }
9386 }
9387
9388
9389 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
9390 uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
9391 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
9392 noiseRedMode)) {
9393 rc = BAD_VALUE;
9394 }
9395 }
9396
9397 if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
9398 float reprocessEffectiveExposureFactor =
9399 frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
9400 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
9401 reprocessEffectiveExposureFactor)) {
9402 rc = BAD_VALUE;
9403 }
9404 }
9405
9406 cam_crop_region_t scalerCropRegion;
9407 bool scalerCropSet = false;
9408 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
9409 scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
9410 scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
9411 scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
9412 scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
9413
9414 // Map coordinate system from active array to sensor output.
9415 mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
9416 scalerCropRegion.width, scalerCropRegion.height);
9417
9418 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
9419 scalerCropRegion)) {
9420 rc = BAD_VALUE;
9421 }
9422 scalerCropSet = true;
9423 }
9424
9425 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
9426 int64_t sensorExpTime =
9427 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
9428 LOGD("setting sensorExpTime %lld", sensorExpTime);
9429 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
9430 sensorExpTime)) {
9431 rc = BAD_VALUE;
9432 }
9433 }
9434
9435 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
9436 int64_t sensorFrameDuration =
9437 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
9438 int64_t minFrameDuration = getMinFrameDuration(request);
9439 sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
9440 if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
9441 sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
9442 LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
9443 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
9444 sensorFrameDuration)) {
9445 rc = BAD_VALUE;
9446 }
9447 }
9448
9449 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
9450 int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
9451 if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
9452 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
9453 if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
9454 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
9455 LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
9456 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
9457 sensorSensitivity)) {
9458 rc = BAD_VALUE;
9459 }
9460 }
9461
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009462#ifndef USE_HAL_3_3
9463 if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
9464 int32_t ispSensitivity =
9465 frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
9466 if (ispSensitivity <
9467 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
9468 ispSensitivity =
9469 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
9470 LOGD("clamp ispSensitivity to %d", ispSensitivity);
9471 }
9472 if (ispSensitivity >
9473 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
9474 ispSensitivity =
9475 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
9476 LOGD("clamp ispSensitivity to %d", ispSensitivity);
9477 }
9478 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
9479 ispSensitivity)) {
9480 rc = BAD_VALUE;
9481 }
9482 }
9483#endif
9484
Thierry Strudel3d639192016-09-09 11:52:26 -07009485 if (frame_settings.exists(ANDROID_SHADING_MODE)) {
9486 uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
9487 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
9488 rc = BAD_VALUE;
9489 }
9490 }
9491
9492 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
9493 uint8_t fwk_facedetectMode =
9494 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
9495
9496 int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
9497 fwk_facedetectMode);
9498
9499 if (NAME_NOT_FOUND != val) {
9500 uint8_t facedetectMode = (uint8_t)val;
9501 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
9502 facedetectMode)) {
9503 rc = BAD_VALUE;
9504 }
9505 }
9506 }
9507
9508 if (frame_settings.exists(ANDROID_STATISTICS_HISTOGRAM_MODE)) {
9509 uint8_t histogramMode =
9510 frame_settings.find(ANDROID_STATISTICS_HISTOGRAM_MODE).data.u8[0];
9511 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
9512 histogramMode)) {
9513 rc = BAD_VALUE;
9514 }
9515 }
9516
9517 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
9518 uint8_t sharpnessMapMode =
9519 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
9520 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
9521 sharpnessMapMode)) {
9522 rc = BAD_VALUE;
9523 }
9524 }
9525
9526 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
9527 uint8_t tonemapMode =
9528 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
9529 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
9530 rc = BAD_VALUE;
9531 }
9532 }
9533 /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
9534 /*All tonemap channels will have the same number of points*/
9535 if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
9536 frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
9537 frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
9538 cam_rgb_tonemap_curves tonemapCurves;
9539 tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
9540 if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
9541 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
9542 tonemapCurves.tonemap_points_cnt,
9543 CAM_MAX_TONEMAP_CURVE_SIZE);
9544 tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
9545 }
9546
9547 /* ch0 = G*/
9548 size_t point = 0;
9549 cam_tonemap_curve_t tonemapCurveGreen;
9550 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
9551 for (size_t j = 0; j < 2; j++) {
9552 tonemapCurveGreen.tonemap_points[i][j] =
9553 frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
9554 point++;
9555 }
9556 }
9557 tonemapCurves.curves[0] = tonemapCurveGreen;
9558
9559 /* ch 1 = B */
9560 point = 0;
9561 cam_tonemap_curve_t tonemapCurveBlue;
9562 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
9563 for (size_t j = 0; j < 2; j++) {
9564 tonemapCurveBlue.tonemap_points[i][j] =
9565 frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
9566 point++;
9567 }
9568 }
9569 tonemapCurves.curves[1] = tonemapCurveBlue;
9570
9571 /* ch 2 = R */
9572 point = 0;
9573 cam_tonemap_curve_t tonemapCurveRed;
9574 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
9575 for (size_t j = 0; j < 2; j++) {
9576 tonemapCurveRed.tonemap_points[i][j] =
9577 frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
9578 point++;
9579 }
9580 }
9581 tonemapCurves.curves[2] = tonemapCurveRed;
9582
9583 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
9584 tonemapCurves)) {
9585 rc = BAD_VALUE;
9586 }
9587 }
9588
9589 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
9590 uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
9591 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
9592 captureIntent)) {
9593 rc = BAD_VALUE;
9594 }
9595 }
9596
9597 if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
9598 uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
9599 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
9600 blackLevelLock)) {
9601 rc = BAD_VALUE;
9602 }
9603 }
9604
9605 if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
9606 uint8_t lensShadingMapMode =
9607 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
9608 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
9609 lensShadingMapMode)) {
9610 rc = BAD_VALUE;
9611 }
9612 }
9613
9614 if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
9615 cam_area_t roi;
9616 bool reset = true;
9617 convertFromRegions(roi, request->settings, ANDROID_CONTROL_AE_REGIONS);
9618
9619 // Map coordinate system from active array to sensor output.
9620 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
9621 roi.rect.height);
9622
9623 if (scalerCropSet) {
9624 reset = resetIfNeededROI(&roi, &scalerCropRegion);
9625 }
9626 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
9627 rc = BAD_VALUE;
9628 }
9629 }
9630
9631 if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
9632 cam_area_t roi;
9633 bool reset = true;
9634 convertFromRegions(roi, request->settings, ANDROID_CONTROL_AF_REGIONS);
9635
9636 // Map coordinate system from active array to sensor output.
9637 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
9638 roi.rect.height);
9639
9640 if (scalerCropSet) {
9641 reset = resetIfNeededROI(&roi, &scalerCropRegion);
9642 }
9643 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
9644 rc = BAD_VALUE;
9645 }
9646 }
9647
9648 // CDS for non-HFR non-video mode
9649 if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
9650 !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
9651 int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
9652 if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
9653 LOGE("Invalid CDS mode %d!", *fwk_cds);
9654 } else {
9655 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
9656 CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
9657 rc = BAD_VALUE;
9658 }
9659 }
9660 }
9661
Thierry Strudel04e026f2016-10-10 11:27:36 -07009662 // Video HDR
9663 if (frame_settings.exists(QCAMERA3_VIDEO_HDR_MODE)) {
9664 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
9665 frame_settings.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
9666 rc = setVideoHdrMode(mParameters, vhdr);
9667 if (rc != NO_ERROR) {
9668 LOGE("setVideoHDR is failed");
9669 }
9670 }
9671
9672 //IR
9673 if(frame_settings.exists(QCAMERA3_IR_MODE)) {
9674 cam_ir_mode_type_t fwk_ir = (cam_ir_mode_type_t)
9675 frame_settings.find(QCAMERA3_IR_MODE).data.i32[0];
9676 if ((CAM_IR_MODE_MAX <= fwk_ir) || (0 > fwk_ir)) {
9677 LOGE("Invalid IR mode %d!", fwk_ir);
9678 } else {
9679 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
9680 CAM_INTF_META_IR_MODE, fwk_ir)) {
9681 rc = BAD_VALUE;
9682 }
9683 }
9684 }
9685
Thierry Strudel3d639192016-09-09 11:52:26 -07009686 // TNR
9687 if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
9688 frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
9689 uint8_t b_TnrRequested = 0;
9690 cam_denoise_param_t tnr;
9691 tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
9692 tnr.process_plates =
9693 (cam_denoise_process_type_t)frame_settings.find(
9694 QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
9695 b_TnrRequested = tnr.denoise_enable;
9696 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
9697 rc = BAD_VALUE;
9698 }
9699 }
9700
9701 if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
9702 int32_t fwk_testPatternMode =
9703 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
9704 int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
9705 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
9706
9707 if (NAME_NOT_FOUND != testPatternMode) {
9708 cam_test_pattern_data_t testPatternData;
9709 memset(&testPatternData, 0, sizeof(testPatternData));
9710 testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
9711 if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
9712 frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
9713 int32_t *fwk_testPatternData =
9714 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
9715 testPatternData.r = fwk_testPatternData[0];
9716 testPatternData.b = fwk_testPatternData[3];
9717 switch (gCamCapability[mCameraId]->color_arrangement) {
9718 case CAM_FILTER_ARRANGEMENT_RGGB:
9719 case CAM_FILTER_ARRANGEMENT_GRBG:
9720 testPatternData.gr = fwk_testPatternData[1];
9721 testPatternData.gb = fwk_testPatternData[2];
9722 break;
9723 case CAM_FILTER_ARRANGEMENT_GBRG:
9724 case CAM_FILTER_ARRANGEMENT_BGGR:
9725 testPatternData.gr = fwk_testPatternData[2];
9726 testPatternData.gb = fwk_testPatternData[1];
9727 break;
9728 default:
9729 LOGE("color arrangement %d is not supported",
9730 gCamCapability[mCameraId]->color_arrangement);
9731 break;
9732 }
9733 }
9734 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
9735 testPatternData)) {
9736 rc = BAD_VALUE;
9737 }
9738 } else {
9739 LOGE("Invalid framework sensor test pattern mode %d",
9740 fwk_testPatternMode);
9741 }
9742 }
9743
9744 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
9745 size_t count = 0;
9746 camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
9747 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
9748 gps_coords.data.d, gps_coords.count, count);
9749 if (gps_coords.count != count) {
9750 rc = BAD_VALUE;
9751 }
9752 }
9753
9754 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
9755 char gps_methods[GPS_PROCESSING_METHOD_SIZE];
9756 size_t count = 0;
9757 const char *gps_methods_src = (const char *)
9758 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
9759 memset(gps_methods, '\0', sizeof(gps_methods));
9760 strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
9761 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
9762 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
9763 if (GPS_PROCESSING_METHOD_SIZE != count) {
9764 rc = BAD_VALUE;
9765 }
9766 }
9767
9768 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
9769 int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
9770 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
9771 gps_timestamp)) {
9772 rc = BAD_VALUE;
9773 }
9774 }
9775
9776 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
9777 int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
9778 cam_rotation_info_t rotation_info;
9779 if (orientation == 0) {
9780 rotation_info.rotation = ROTATE_0;
9781 } else if (orientation == 90) {
9782 rotation_info.rotation = ROTATE_90;
9783 } else if (orientation == 180) {
9784 rotation_info.rotation = ROTATE_180;
9785 } else if (orientation == 270) {
9786 rotation_info.rotation = ROTATE_270;
9787 }
9788 rotation_info.streamId = snapshotStreamId;
9789 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
9790 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
9791 rc = BAD_VALUE;
9792 }
9793 }
9794
9795 if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
9796 uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
9797 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
9798 rc = BAD_VALUE;
9799 }
9800 }
9801
9802 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
9803 uint32_t thumb_quality = (uint32_t)
9804 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
9805 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
9806 thumb_quality)) {
9807 rc = BAD_VALUE;
9808 }
9809 }
9810
9811 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
9812 cam_dimension_t dim;
9813 dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
9814 dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
9815 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
9816 rc = BAD_VALUE;
9817 }
9818 }
9819
9820 // Internal metadata
9821 if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
9822 size_t count = 0;
9823 camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
9824 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
9825 privatedata.data.i32, privatedata.count, count);
9826 if (privatedata.count != count) {
9827 rc = BAD_VALUE;
9828 }
9829 }
9830
Thierry Strudel3d639192016-09-09 11:52:26 -07009831 // EV step
9832 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
9833 gCamCapability[mCameraId]->exp_compensation_step)) {
9834 rc = BAD_VALUE;
9835 }
9836
9837 // CDS info
9838 if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
9839 cam_cds_data_t *cdsData = (cam_cds_data_t *)
9840 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
9841
9842 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
9843 CAM_INTF_META_CDS_DATA, *cdsData)) {
9844 rc = BAD_VALUE;
9845 }
9846 }
9847
9848 return rc;
9849}
9850
9851/*===========================================================================
9852 * FUNCTION : captureResultCb
9853 *
9854 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
9855 *
9856 * PARAMETERS :
9857 * @frame : frame information from mm-camera-interface
9858 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
9859 * @userdata: userdata
9860 *
9861 * RETURN : NONE
9862 *==========================================================================*/
9863void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
9864 camera3_stream_buffer_t *buffer,
9865 uint32_t frame_number, bool isInputBuffer, void *userdata)
9866{
9867 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
9868 if (hw == NULL) {
9869 LOGE("Invalid hw %p", hw);
9870 return;
9871 }
9872
9873 hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
9874 return;
9875}
9876
9877
9878/*===========================================================================
9879 * FUNCTION : initialize
9880 *
9881 * DESCRIPTION: Pass framework callback pointers to HAL
9882 *
9883 * PARAMETERS :
9884 *
9885 *
9886 * RETURN : Success : 0
9887 * Failure: -ENODEV
9888 *==========================================================================*/
9889
9890int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
9891 const camera3_callback_ops_t *callback_ops)
9892{
9893 LOGD("E");
9894 QCamera3HardwareInterface *hw =
9895 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
9896 if (!hw) {
9897 LOGE("NULL camera device");
9898 return -ENODEV;
9899 }
9900
9901 int rc = hw->initialize(callback_ops);
9902 LOGD("X");
9903 return rc;
9904}
9905
9906/*===========================================================================
9907 * FUNCTION : configure_streams
9908 *
9909 * DESCRIPTION:
9910 *
9911 * PARAMETERS :
9912 *
9913 *
9914 * RETURN : Success: 0
9915 * Failure: -EINVAL (if stream configuration is invalid)
9916 * -ENODEV (fatal error)
9917 *==========================================================================*/
9918
9919int QCamera3HardwareInterface::configure_streams(
9920 const struct camera3_device *device,
9921 camera3_stream_configuration_t *stream_list)
9922{
9923 LOGD("E");
9924 QCamera3HardwareInterface *hw =
9925 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
9926 if (!hw) {
9927 LOGE("NULL camera device");
9928 return -ENODEV;
9929 }
9930 int rc = hw->configureStreams(stream_list);
9931 LOGD("X");
9932 return rc;
9933}
9934
9935/*===========================================================================
9936 * FUNCTION : construct_default_request_settings
9937 *
9938 * DESCRIPTION: Configure a settings buffer to meet the required use case
9939 *
9940 * PARAMETERS :
9941 *
9942 *
9943 * RETURN : Success: Return valid metadata
9944 * Failure: Return NULL
9945 *==========================================================================*/
9946const camera_metadata_t* QCamera3HardwareInterface::
9947 construct_default_request_settings(const struct camera3_device *device,
9948 int type)
9949{
9950
9951 LOGD("E");
9952 camera_metadata_t* fwk_metadata = NULL;
9953 QCamera3HardwareInterface *hw =
9954 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
9955 if (!hw) {
9956 LOGE("NULL camera device");
9957 return NULL;
9958 }
9959
9960 fwk_metadata = hw->translateCapabilityToMetadata(type);
9961
9962 LOGD("X");
9963 return fwk_metadata;
9964}
9965
9966/*===========================================================================
9967 * FUNCTION : process_capture_request
9968 *
9969 * DESCRIPTION:
9970 *
9971 * PARAMETERS :
9972 *
9973 *
9974 * RETURN :
9975 *==========================================================================*/
9976int QCamera3HardwareInterface::process_capture_request(
9977 const struct camera3_device *device,
9978 camera3_capture_request_t *request)
9979{
9980 LOGD("E");
9981 QCamera3HardwareInterface *hw =
9982 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
9983 if (!hw) {
9984 LOGE("NULL camera device");
9985 return -EINVAL;
9986 }
9987
9988 int rc = hw->processCaptureRequest(request);
9989 LOGD("X");
9990 return rc;
9991}
9992
9993/*===========================================================================
9994 * FUNCTION : dump
9995 *
9996 * DESCRIPTION:
9997 *
9998 * PARAMETERS :
9999 *
10000 *
10001 * RETURN :
10002 *==========================================================================*/
10003
10004void QCamera3HardwareInterface::dump(
10005 const struct camera3_device *device, int fd)
10006{
10007 /* Log level property is read when "adb shell dumpsys media.camera" is
10008 called so that the log level can be controlled without restarting
10009 the media server */
10010 getLogLevel();
10011
10012 LOGD("E");
10013 QCamera3HardwareInterface *hw =
10014 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
10015 if (!hw) {
10016 LOGE("NULL camera device");
10017 return;
10018 }
10019
10020 hw->dump(fd);
10021 LOGD("X");
10022 return;
10023}
10024
10025/*===========================================================================
10026 * FUNCTION : flush
10027 *
10028 * DESCRIPTION:
10029 *
10030 * PARAMETERS :
10031 *
10032 *
10033 * RETURN :
10034 *==========================================================================*/
10035
10036int QCamera3HardwareInterface::flush(
10037 const struct camera3_device *device)
10038{
10039 int rc;
10040 LOGD("E");
10041 QCamera3HardwareInterface *hw =
10042 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
10043 if (!hw) {
10044 LOGE("NULL camera device");
10045 return -EINVAL;
10046 }
10047
10048 pthread_mutex_lock(&hw->mMutex);
10049 // Validate current state
10050 switch (hw->mState) {
10051 case STARTED:
10052 /* valid state */
10053 break;
10054
10055 case ERROR:
10056 pthread_mutex_unlock(&hw->mMutex);
10057 hw->handleCameraDeviceError();
10058 return -ENODEV;
10059
10060 default:
10061 LOGI("Flush returned during state %d", hw->mState);
10062 pthread_mutex_unlock(&hw->mMutex);
10063 return 0;
10064 }
10065 pthread_mutex_unlock(&hw->mMutex);
10066
10067 rc = hw->flush(true /* restart channels */ );
10068 LOGD("X");
10069 return rc;
10070}
10071
10072/*===========================================================================
10073 * FUNCTION : close_camera_device
10074 *
10075 * DESCRIPTION:
10076 *
10077 * PARAMETERS :
10078 *
10079 *
10080 * RETURN :
10081 *==========================================================================*/
10082int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
10083{
10084 int ret = NO_ERROR;
10085 QCamera3HardwareInterface *hw =
10086 reinterpret_cast<QCamera3HardwareInterface *>(
10087 reinterpret_cast<camera3_device_t *>(device)->priv);
10088 if (!hw) {
10089 LOGE("NULL camera device");
10090 return BAD_VALUE;
10091 }
10092
10093 LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
10094 delete hw;
10095 LOGI("[KPI Perf]: X");
10096 return ret;
10097}
10098
10099/*===========================================================================
10100 * FUNCTION : getWaveletDenoiseProcessPlate
10101 *
10102 * DESCRIPTION: query wavelet denoise process plate
10103 *
10104 * PARAMETERS : None
10105 *
10106 * RETURN : WNR prcocess plate value
10107 *==========================================================================*/
10108cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
10109{
10110 char prop[PROPERTY_VALUE_MAX];
10111 memset(prop, 0, sizeof(prop));
10112 property_get("persist.denoise.process.plates", prop, "0");
10113 int processPlate = atoi(prop);
10114 switch(processPlate) {
10115 case 0:
10116 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
10117 case 1:
10118 return CAM_WAVELET_DENOISE_CBCR_ONLY;
10119 case 2:
10120 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
10121 case 3:
10122 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
10123 default:
10124 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
10125 }
10126}
10127
10128
10129/*===========================================================================
10130 * FUNCTION : getTemporalDenoiseProcessPlate
10131 *
10132 * DESCRIPTION: query temporal denoise process plate
10133 *
10134 * PARAMETERS : None
10135 *
10136 * RETURN : TNR prcocess plate value
10137 *==========================================================================*/
10138cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
10139{
10140 char prop[PROPERTY_VALUE_MAX];
10141 memset(prop, 0, sizeof(prop));
10142 property_get("persist.tnr.process.plates", prop, "0");
10143 int processPlate = atoi(prop);
10144 switch(processPlate) {
10145 case 0:
10146 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
10147 case 1:
10148 return CAM_WAVELET_DENOISE_CBCR_ONLY;
10149 case 2:
10150 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
10151 case 3:
10152 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
10153 default:
10154 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
10155 }
10156}
10157
10158
10159/*===========================================================================
10160 * FUNCTION : extractSceneMode
10161 *
10162 * DESCRIPTION: Extract scene mode from frameworks set metadata
10163 *
10164 * PARAMETERS :
10165 * @frame_settings: CameraMetadata reference
10166 * @metaMode: ANDROID_CONTORL_MODE
10167 * @hal_metadata: hal metadata structure
10168 *
10169 * RETURN : None
10170 *==========================================================================*/
10171int32_t QCamera3HardwareInterface::extractSceneMode(
10172 const CameraMetadata &frame_settings, uint8_t metaMode,
10173 metadata_buffer_t *hal_metadata)
10174{
10175 int32_t rc = NO_ERROR;
10176
10177 if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
10178 camera_metadata_ro_entry entry =
10179 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
10180 if (0 == entry.count)
10181 return rc;
10182
10183 uint8_t fwk_sceneMode = entry.data.u8[0];
10184
10185 int val = lookupHalName(SCENE_MODES_MAP,
10186 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
10187 fwk_sceneMode);
10188 if (NAME_NOT_FOUND != val) {
10189 uint8_t sceneMode = (uint8_t)val;
10190 LOGD("sceneMode: %d", sceneMode);
10191 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
10192 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
10193 rc = BAD_VALUE;
10194 }
10195 }
10196 } else if ((ANDROID_CONTROL_MODE_OFF == metaMode) ||
10197 (ANDROID_CONTROL_MODE_AUTO == metaMode)) {
10198 uint8_t sceneMode = CAM_SCENE_MODE_OFF;
10199 LOGD("sceneMode: %d", sceneMode);
10200 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
10201 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
10202 rc = BAD_VALUE;
10203 }
10204 }
10205 return rc;
10206}
10207
10208/*===========================================================================
Thierry Strudel04e026f2016-10-10 11:27:36 -070010209 * FUNCTION : setVideoHdrMode
10210 *
10211 * DESCRIPTION: Set Video HDR mode from frameworks set metadata
10212 *
10213 * PARAMETERS :
10214 * @hal_metadata: hal metadata structure
10215 * @metaMode: QCAMERA3_VIDEO_HDR_MODE
10216 *
10217 * RETURN : None
10218 *==========================================================================*/
10219int32_t QCamera3HardwareInterface::setVideoHdrMode(
10220 metadata_buffer_t *hal_metadata, cam_video_hdr_mode_t vhdr)
10221{
10222 int32_t rc = NO_ERROR;
10223 if ((CAM_VIDEO_HDR_MODE_MAX <= (vhdr)) || (0 > (vhdr))) {
10224 LOGE("%s: Invalid Video HDR mode %d!", __func__, vhdr);
10225 rc = BAD_VALUE;
10226 } else {
10227 cam_sensor_hdr_type_t vhdr_type = CAM_SENSOR_HDR_MAX;
10228 if(vhdr == QCAMERA3_VIDEO_HDR_MODE_OFF) {
10229 LOGD("Setting HDR mode Off");
10230 vhdr_type = CAM_SENSOR_HDR_OFF;
10231 } else {
10232 char video_hdr_prop[PROPERTY_VALUE_MAX];
10233 memset(video_hdr_prop, 0, sizeof(video_hdr_prop));
10234 property_get("persist.camera.hdr.video", video_hdr_prop, "3");
10235 uint8_t use_hdr_video = (uint8_t)atoi(video_hdr_prop);
10236 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask &
10237 CAM_QCOM_FEATURE_SENSOR_HDR) &&
10238 (use_hdr_video == CAM_SENSOR_HDR_IN_SENSOR)) {
10239 LOGD("Setting HDR mode In Sensor");
10240 vhdr_type = CAM_SENSOR_HDR_IN_SENSOR;
10241 }
10242 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask &
10243 CAM_QCOM_FEATURE_ZIGZAG_VIDEO_HDR) &&
10244 (use_hdr_video == CAM_SENSOR_HDR_ZIGZAG)) {
10245 LOGD("Setting HDR mode Zigzag");
10246 vhdr_type = CAM_SENSOR_HDR_ZIGZAG;
10247 }
10248 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask &
10249 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) &&
10250 (use_hdr_video == CAM_SENSOR_HDR_STAGGERED)) {
10251 LOGD("Setting HDR mode Staggered");
10252 vhdr_type = CAM_SENSOR_HDR_STAGGERED;
10253 }
10254 if(vhdr_type == CAM_SENSOR_HDR_MAX) {
10255 LOGD("HDR mode not supported");
10256 rc = BAD_VALUE;
10257 }
10258 }
10259 if(rc == NO_ERROR) {
10260 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
10261 CAM_INTF_PARM_SENSOR_HDR, vhdr_type)) {
10262 rc = BAD_VALUE;
10263 }
10264 }
10265 }
10266 return rc;
10267}
10268
10269/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070010270 * FUNCTION : needRotationReprocess
10271 *
10272 * DESCRIPTION: if rotation needs to be done by reprocess in pp
10273 *
10274 * PARAMETERS : none
10275 *
10276 * RETURN : true: needed
10277 * false: no need
10278 *==========================================================================*/
10279bool QCamera3HardwareInterface::needRotationReprocess()
10280{
10281 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
10282 // current rotation is not zero, and pp has the capability to process rotation
10283 LOGH("need do reprocess for rotation");
10284 return true;
10285 }
10286
10287 return false;
10288}
10289
10290/*===========================================================================
10291 * FUNCTION : needReprocess
10292 *
10293 * DESCRIPTION: if reprocess in needed
10294 *
10295 * PARAMETERS : none
10296 *
10297 * RETURN : true: needed
10298 * false: no need
10299 *==========================================================================*/
10300bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
10301{
10302 if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
10303 // TODO: add for ZSL HDR later
10304 // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
10305 if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
10306 LOGH("need do reprocess for ZSL WNR or min PP reprocess");
10307 return true;
10308 } else {
10309 LOGH("already post processed frame");
10310 return false;
10311 }
10312 }
10313 return needRotationReprocess();
10314}
10315
10316/*===========================================================================
10317 * FUNCTION : needJpegExifRotation
10318 *
10319 * DESCRIPTION: if rotation from jpeg is needed
10320 *
10321 * PARAMETERS : none
10322 *
10323 * RETURN : true: needed
10324 * false: no need
10325 *==========================================================================*/
10326bool QCamera3HardwareInterface::needJpegExifRotation()
10327{
10328 /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
10329 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
10330 LOGD("Need use Jpeg EXIF Rotation");
10331 return true;
10332 }
10333 return false;
10334}
10335
10336/*===========================================================================
10337 * FUNCTION : addOfflineReprocChannel
10338 *
10339 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
10340 * coming from input channel
10341 *
10342 * PARAMETERS :
10343 * @config : reprocess configuration
10344 * @inputChHandle : pointer to the input (source) channel
10345 *
10346 *
10347 * RETURN : Ptr to the newly created channel obj. NULL if failed.
10348 *==========================================================================*/
10349QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
10350 const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
10351{
10352 int32_t rc = NO_ERROR;
10353 QCamera3ReprocessChannel *pChannel = NULL;
10354
10355 pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
10356 mChannelHandle, mCameraHandle->ops, captureResultCb, config.padding,
10357 CAM_QCOM_FEATURE_NONE, this, inputChHandle);
10358 if (NULL == pChannel) {
10359 LOGE("no mem for reprocess channel");
10360 return NULL;
10361 }
10362
10363 rc = pChannel->initialize(IS_TYPE_NONE);
10364 if (rc != NO_ERROR) {
10365 LOGE("init reprocess channel failed, ret = %d", rc);
10366 delete pChannel;
10367 return NULL;
10368 }
10369
10370 // pp feature config
10371 cam_pp_feature_config_t pp_config;
10372 memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
10373
10374 pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
10375 if (gCamCapability[mCameraId]->qcom_supported_feature_mask
10376 & CAM_QCOM_FEATURE_DSDN) {
10377 //Use CPP CDS incase h/w supports it.
10378 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
10379 pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
10380 }
10381 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
10382 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
10383 }
10384
10385 rc = pChannel->addReprocStreamsFromSource(pp_config,
10386 config,
10387 IS_TYPE_NONE,
10388 mMetadataChannel);
10389
10390 if (rc != NO_ERROR) {
10391 delete pChannel;
10392 return NULL;
10393 }
10394 return pChannel;
10395}
10396
10397/*===========================================================================
10398 * FUNCTION : getMobicatMask
10399 *
10400 * DESCRIPTION: returns mobicat mask
10401 *
10402 * PARAMETERS : none
10403 *
10404 * RETURN : mobicat mask
10405 *
10406 *==========================================================================*/
10407uint8_t QCamera3HardwareInterface::getMobicatMask()
10408{
10409 return m_MobicatMask;
10410}
10411
10412/*===========================================================================
10413 * FUNCTION : setMobicat
10414 *
10415 * DESCRIPTION: set Mobicat on/off.
10416 *
10417 * PARAMETERS :
10418 * @params : none
10419 *
10420 * RETURN : int32_t type of status
10421 * NO_ERROR -- success
10422 * none-zero failure code
10423 *==========================================================================*/
10424int32_t QCamera3HardwareInterface::setMobicat()
10425{
10426 char value [PROPERTY_VALUE_MAX];
10427 property_get("persist.camera.mobicat", value, "0");
10428 int32_t ret = NO_ERROR;
10429 uint8_t enableMobi = (uint8_t)atoi(value);
10430
10431 if (enableMobi) {
10432 tune_cmd_t tune_cmd;
10433 tune_cmd.type = SET_RELOAD_CHROMATIX;
10434 tune_cmd.module = MODULE_ALL;
10435 tune_cmd.value = TRUE;
10436 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
10437 CAM_INTF_PARM_SET_VFE_COMMAND,
10438 tune_cmd);
10439
10440 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
10441 CAM_INTF_PARM_SET_PP_COMMAND,
10442 tune_cmd);
10443 }
10444 m_MobicatMask = enableMobi;
10445
10446 return ret;
10447}
10448
10449/*===========================================================================
10450* FUNCTION : getLogLevel
10451*
10452* DESCRIPTION: Reads the log level property into a variable
10453*
10454* PARAMETERS :
10455* None
10456*
10457* RETURN :
10458* None
10459*==========================================================================*/
10460void QCamera3HardwareInterface::getLogLevel()
10461{
10462 char prop[PROPERTY_VALUE_MAX];
10463 uint32_t globalLogLevel = 0;
10464
10465 property_get("persist.camera.hal.debug", prop, "0");
10466 int val = atoi(prop);
10467 if (0 <= val) {
10468 gCamHal3LogLevel = (uint32_t)val;
10469 }
10470
10471 property_get("persist.camera.kpi.debug", prop, "1");
10472 gKpiDebugLevel = atoi(prop);
10473
10474 property_get("persist.camera.global.debug", prop, "0");
10475 val = atoi(prop);
10476 if (0 <= val) {
10477 globalLogLevel = (uint32_t)val;
10478 }
10479
10480 /* Highest log level among hal.logs and global.logs is selected */
10481 if (gCamHal3LogLevel < globalLogLevel)
10482 gCamHal3LogLevel = globalLogLevel;
10483
10484 return;
10485}
10486
10487/*===========================================================================
10488 * FUNCTION : validateStreamRotations
10489 *
10490 * DESCRIPTION: Check if the rotations requested are supported
10491 *
10492 * PARAMETERS :
10493 * @stream_list : streams to be configured
10494 *
10495 * RETURN : NO_ERROR on success
10496 * -EINVAL on failure
10497 *
10498 *==========================================================================*/
10499int QCamera3HardwareInterface::validateStreamRotations(
10500 camera3_stream_configuration_t *streamList)
10501{
10502 int rc = NO_ERROR;
10503
10504 /*
10505 * Loop through all streams requested in configuration
10506 * Check if unsupported rotations have been requested on any of them
10507 */
10508 for (size_t j = 0; j < streamList->num_streams; j++){
10509 camera3_stream_t *newStream = streamList->streams[j];
10510
10511 bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
10512 bool isImplDef = (newStream->format ==
10513 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
10514 bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
10515 isImplDef);
10516
10517 if (isRotated && (!isImplDef || isZsl)) {
10518 LOGE("Error: Unsupported rotation of %d requested for stream"
10519 "type:%d and stream format:%d",
10520 newStream->rotation, newStream->stream_type,
10521 newStream->format);
10522 rc = -EINVAL;
10523 break;
10524 }
10525 }
10526
10527 return rc;
10528}
10529
10530/*===========================================================================
10531* FUNCTION : getFlashInfo
10532*
10533* DESCRIPTION: Retrieve information about whether the device has a flash.
10534*
10535* PARAMETERS :
10536* @cameraId : Camera id to query
10537* @hasFlash : Boolean indicating whether there is a flash device
10538* associated with given camera
10539* @flashNode : If a flash device exists, this will be its device node.
10540*
10541* RETURN :
10542* None
10543*==========================================================================*/
10544void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
10545 bool& hasFlash,
10546 char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
10547{
10548 cam_capability_t* camCapability = gCamCapability[cameraId];
10549 if (NULL == camCapability) {
10550 hasFlash = false;
10551 flashNode[0] = '\0';
10552 } else {
10553 hasFlash = camCapability->flash_available;
10554 strlcpy(flashNode,
10555 (char*)camCapability->flash_dev_name,
10556 QCAMERA_MAX_FILEPATH_LENGTH);
10557 }
10558}
10559
10560/*===========================================================================
10561* FUNCTION : getEepromVersionInfo
10562*
10563* DESCRIPTION: Retrieve version info of the sensor EEPROM data
10564*
10565* PARAMETERS : None
10566*
10567* RETURN : string describing EEPROM version
10568* "\0" if no such info available
10569*==========================================================================*/
10570const char *QCamera3HardwareInterface::getEepromVersionInfo()
10571{
10572 return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
10573}
10574
10575/*===========================================================================
10576* FUNCTION : getLdafCalib
10577*
10578* DESCRIPTION: Retrieve Laser AF calibration data
10579*
10580* PARAMETERS : None
10581*
10582* RETURN : Two uint32_t describing laser AF calibration data
10583* NULL if none is available.
10584*==========================================================================*/
10585const uint32_t *QCamera3HardwareInterface::getLdafCalib()
10586{
10587 if (mLdafCalibExist) {
10588 return &mLdafCalib[0];
10589 } else {
10590 return NULL;
10591 }
10592}
10593
10594/*===========================================================================
10595 * FUNCTION : dynamicUpdateMetaStreamInfo
10596 *
10597 * DESCRIPTION: This function:
10598 * (1) stops all the channels
10599 * (2) returns error on pending requests and buffers
10600 * (3) sends metastream_info in setparams
10601 * (4) starts all channels
10602 * This is useful when sensor has to be restarted to apply any
10603 * settings such as frame rate from a different sensor mode
10604 *
10605 * PARAMETERS : None
10606 *
10607 * RETURN : NO_ERROR on success
10608 * Error codes on failure
10609 *
10610 *==========================================================================*/
10611int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
10612{
10613 ATRACE_CALL();
10614 int rc = NO_ERROR;
10615
10616 LOGD("E");
10617
10618 rc = stopAllChannels();
10619 if (rc < 0) {
10620 LOGE("stopAllChannels failed");
10621 return rc;
10622 }
10623
10624 rc = notifyErrorForPendingRequests();
10625 if (rc < 0) {
10626 LOGE("notifyErrorForPendingRequests failed");
10627 return rc;
10628 }
10629
10630 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
10631 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
10632 "Format:%d",
10633 mStreamConfigInfo.type[i],
10634 mStreamConfigInfo.stream_sizes[i].width,
10635 mStreamConfigInfo.stream_sizes[i].height,
10636 mStreamConfigInfo.postprocess_mask[i],
10637 mStreamConfigInfo.format[i]);
10638 }
10639
10640 /* Send meta stream info once again so that ISP can start */
10641 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
10642 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
10643 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
10644 mParameters);
10645 if (rc < 0) {
10646 LOGE("set Metastreaminfo failed. Sensor mode does not change");
10647 }
10648
10649 rc = startAllChannels();
10650 if (rc < 0) {
10651 LOGE("startAllChannels failed");
10652 return rc;
10653 }
10654
10655 LOGD("X");
10656 return rc;
10657}
10658
10659/*===========================================================================
10660 * FUNCTION : stopAllChannels
10661 *
10662 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
10663 *
10664 * PARAMETERS : None
10665 *
10666 * RETURN : NO_ERROR on success
10667 * Error codes on failure
10668 *
10669 *==========================================================================*/
10670int32_t QCamera3HardwareInterface::stopAllChannels()
10671{
10672 int32_t rc = NO_ERROR;
10673
10674 LOGD("Stopping all channels");
10675 // Stop the Streams/Channels
10676 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
10677 it != mStreamInfo.end(); it++) {
10678 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
10679 if (channel) {
10680 channel->stop();
10681 }
10682 (*it)->status = INVALID;
10683 }
10684
10685 if (mSupportChannel) {
10686 mSupportChannel->stop();
10687 }
10688 if (mAnalysisChannel) {
10689 mAnalysisChannel->stop();
10690 }
10691 if (mRawDumpChannel) {
10692 mRawDumpChannel->stop();
10693 }
10694 if (mMetadataChannel) {
10695 /* If content of mStreamInfo is not 0, there is metadata stream */
10696 mMetadataChannel->stop();
10697 }
10698
10699 LOGD("All channels stopped");
10700 return rc;
10701}
10702
10703/*===========================================================================
10704 * FUNCTION : startAllChannels
10705 *
10706 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
10707 *
10708 * PARAMETERS : None
10709 *
10710 * RETURN : NO_ERROR on success
10711 * Error codes on failure
10712 *
10713 *==========================================================================*/
10714int32_t QCamera3HardwareInterface::startAllChannels()
10715{
10716 int32_t rc = NO_ERROR;
10717
10718 LOGD("Start all channels ");
10719 // Start the Streams/Channels
10720 if (mMetadataChannel) {
10721 /* If content of mStreamInfo is not 0, there is metadata stream */
10722 rc = mMetadataChannel->start();
10723 if (rc < 0) {
10724 LOGE("META channel start failed");
10725 return rc;
10726 }
10727 }
10728 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
10729 it != mStreamInfo.end(); it++) {
10730 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
10731 if (channel) {
10732 rc = channel->start();
10733 if (rc < 0) {
10734 LOGE("channel start failed");
10735 return rc;
10736 }
10737 }
10738 }
10739 if (mAnalysisChannel) {
10740 mAnalysisChannel->start();
10741 }
10742 if (mSupportChannel) {
10743 rc = mSupportChannel->start();
10744 if (rc < 0) {
10745 LOGE("Support channel start failed");
10746 return rc;
10747 }
10748 }
10749 if (mRawDumpChannel) {
10750 rc = mRawDumpChannel->start();
10751 if (rc < 0) {
10752 LOGE("RAW dump channel start failed");
10753 return rc;
10754 }
10755 }
10756
10757 LOGD("All channels started");
10758 return rc;
10759}
10760
10761/*===========================================================================
10762 * FUNCTION : notifyErrorForPendingRequests
10763 *
10764 * DESCRIPTION: This function sends error for all the pending requests/buffers
10765 *
10766 * PARAMETERS : None
10767 *
10768 * RETURN : Error codes
10769 * NO_ERROR on success
10770 *
10771 *==========================================================================*/
10772int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
10773{
10774 int32_t rc = NO_ERROR;
10775 unsigned int frameNum = 0;
10776 camera3_capture_result_t result;
10777 camera3_stream_buffer_t *pStream_Buf = NULL;
10778
10779 memset(&result, 0, sizeof(camera3_capture_result_t));
10780
10781 if (mPendingRequestsList.size() > 0) {
10782 pendingRequestIterator i = mPendingRequestsList.begin();
10783 frameNum = i->frame_number;
10784 } else {
10785 /* There might still be pending buffers even though there are
10786 no pending requests. Setting the frameNum to MAX so that
10787 all the buffers with smaller frame numbers are returned */
10788 frameNum = UINT_MAX;
10789 }
10790
10791 LOGH("Oldest frame num on mPendingRequestsList = %u",
10792 frameNum);
10793
10794 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
10795 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); ) {
10796
10797 if (req->frame_number < frameNum) {
10798 // Send Error notify to frameworks for each buffer for which
10799 // metadata buffer is already sent
10800 LOGH("Sending ERROR BUFFER for frame %d for %d buffer(s)",
10801 req->frame_number, req->mPendingBufferList.size());
10802
10803 pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
10804 if (NULL == pStream_Buf) {
10805 LOGE("No memory for pending buffers array");
10806 return NO_MEMORY;
10807 }
10808 memset(pStream_Buf, 0,
10809 sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
10810 result.result = NULL;
10811 result.frame_number = req->frame_number;
10812 result.num_output_buffers = req->mPendingBufferList.size();
10813 result.output_buffers = pStream_Buf;
10814
10815 size_t index = 0;
10816 for (auto info = req->mPendingBufferList.begin();
10817 info != req->mPendingBufferList.end(); ) {
10818
10819 camera3_notify_msg_t notify_msg;
10820 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
10821 notify_msg.type = CAMERA3_MSG_ERROR;
10822 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
10823 notify_msg.message.error.error_stream = info->stream;
10824 notify_msg.message.error.frame_number = req->frame_number;
10825 pStream_Buf[index].acquire_fence = -1;
10826 pStream_Buf[index].release_fence = -1;
10827 pStream_Buf[index].buffer = info->buffer;
10828 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
10829 pStream_Buf[index].stream = info->stream;
10830 mCallbackOps->notify(mCallbackOps, &notify_msg);
10831 index++;
10832 // Remove buffer from list
10833 info = req->mPendingBufferList.erase(info);
10834 }
10835
10836 // Remove this request from Map
10837 LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
10838 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
10839 req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
10840
10841 mCallbackOps->process_capture_result(mCallbackOps, &result);
10842
10843 delete [] pStream_Buf;
10844 } else {
10845
10846 // Go through the pending requests info and send error request to framework
10847 pendingRequestIterator i = mPendingRequestsList.begin(); //make sure i is at the beginning
10848
10849 LOGH("Sending ERROR REQUEST for frame %d", req->frame_number);
10850
10851 // Send error notify to frameworks
10852 camera3_notify_msg_t notify_msg;
10853 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
10854 notify_msg.type = CAMERA3_MSG_ERROR;
10855 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
10856 notify_msg.message.error.error_stream = NULL;
10857 notify_msg.message.error.frame_number = req->frame_number;
10858 mCallbackOps->notify(mCallbackOps, &notify_msg);
10859
10860 pStream_Buf = new camera3_stream_buffer_t[req->mPendingBufferList.size()];
10861 if (NULL == pStream_Buf) {
10862 LOGE("No memory for pending buffers array");
10863 return NO_MEMORY;
10864 }
10865 memset(pStream_Buf, 0, sizeof(camera3_stream_buffer_t)*req->mPendingBufferList.size());
10866
10867 result.result = NULL;
10868 result.frame_number = req->frame_number;
10869 result.input_buffer = i->input_buffer;
10870 result.num_output_buffers = req->mPendingBufferList.size();
10871 result.output_buffers = pStream_Buf;
10872
10873 size_t index = 0;
10874 for (auto info = req->mPendingBufferList.begin();
10875 info != req->mPendingBufferList.end(); ) {
10876 pStream_Buf[index].acquire_fence = -1;
10877 pStream_Buf[index].release_fence = -1;
10878 pStream_Buf[index].buffer = info->buffer;
10879 pStream_Buf[index].status = CAMERA3_BUFFER_STATUS_ERROR;
10880 pStream_Buf[index].stream = info->stream;
10881 index++;
10882 // Remove buffer from list
10883 info = req->mPendingBufferList.erase(info);
10884 }
10885
10886 // Remove this request from Map
10887 LOGD("Removing request %d. Remaining requests in mPendingBuffersMap: %d",
10888 req->frame_number, mPendingBuffersMap.mPendingBuffersInRequest.size());
10889 req = mPendingBuffersMap.mPendingBuffersInRequest.erase(req);
10890
10891 mCallbackOps->process_capture_result(mCallbackOps, &result);
10892 delete [] pStream_Buf;
10893 i = erasePendingRequest(i);
10894 }
10895 }
10896
10897 /* Reset pending frame Drop list and requests list */
10898 mPendingFrameDropList.clear();
10899
10900 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
10901 req.mPendingBufferList.clear();
10902 }
10903 mPendingBuffersMap.mPendingBuffersInRequest.clear();
10904 mPendingReprocessResultList.clear();
10905 LOGH("Cleared all the pending buffers ");
10906
10907 return rc;
10908}
10909
10910bool QCamera3HardwareInterface::isOnEncoder(
10911 const cam_dimension_t max_viewfinder_size,
10912 uint32_t width, uint32_t height)
10913{
10914 return (width > (uint32_t)max_viewfinder_size.width ||
10915 height > (uint32_t)max_viewfinder_size.height);
10916}
10917
10918/*===========================================================================
10919 * FUNCTION : setBundleInfo
10920 *
10921 * DESCRIPTION: Set bundle info for all streams that are bundle.
10922 *
10923 * PARAMETERS : None
10924 *
10925 * RETURN : NO_ERROR on success
10926 * Error codes on failure
10927 *==========================================================================*/
10928int32_t QCamera3HardwareInterface::setBundleInfo()
10929{
10930 int32_t rc = NO_ERROR;
10931
10932 if (mChannelHandle) {
10933 cam_bundle_config_t bundleInfo;
10934 memset(&bundleInfo, 0, sizeof(bundleInfo));
10935 rc = mCameraHandle->ops->get_bundle_info(
10936 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
10937 if (rc != NO_ERROR) {
10938 LOGE("get_bundle_info failed");
10939 return rc;
10940 }
10941 if (mAnalysisChannel) {
10942 mAnalysisChannel->setBundleInfo(bundleInfo);
10943 }
10944 if (mSupportChannel) {
10945 mSupportChannel->setBundleInfo(bundleInfo);
10946 }
10947 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
10948 it != mStreamInfo.end(); it++) {
10949 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
10950 channel->setBundleInfo(bundleInfo);
10951 }
10952 if (mRawDumpChannel) {
10953 mRawDumpChannel->setBundleInfo(bundleInfo);
10954 }
10955 }
10956
10957 return rc;
10958}
10959
10960/*===========================================================================
10961 * FUNCTION : get_num_overall_buffers
10962 *
10963 * DESCRIPTION: Estimate number of pending buffers across all requests.
10964 *
10965 * PARAMETERS : None
10966 *
10967 * RETURN : Number of overall pending buffers
10968 *
10969 *==========================================================================*/
10970uint32_t PendingBuffersMap::get_num_overall_buffers()
10971{
10972 uint32_t sum_buffers = 0;
10973 for (auto &req : mPendingBuffersInRequest) {
10974 sum_buffers += req.mPendingBufferList.size();
10975 }
10976 return sum_buffers;
10977}
10978
10979/*===========================================================================
10980 * FUNCTION : removeBuf
10981 *
10982 * DESCRIPTION: Remove a matching buffer from tracker.
10983 *
10984 * PARAMETERS : @buffer: image buffer for the callback
10985 *
10986 * RETURN : None
10987 *
10988 *==========================================================================*/
10989void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
10990{
10991 bool buffer_found = false;
10992 for (auto req = mPendingBuffersInRequest.begin();
10993 req != mPendingBuffersInRequest.end(); req++) {
10994 for (auto k = req->mPendingBufferList.begin();
10995 k != req->mPendingBufferList.end(); k++ ) {
10996 if (k->buffer == buffer) {
10997 LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
10998 req->frame_number, buffer);
10999 k = req->mPendingBufferList.erase(k);
11000 if (req->mPendingBufferList.empty()) {
11001 // Remove this request from Map
11002 req = mPendingBuffersInRequest.erase(req);
11003 }
11004 buffer_found = true;
11005 break;
11006 }
11007 }
11008 if (buffer_found) {
11009 break;
11010 }
11011 }
11012 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
11013 get_num_overall_buffers());
11014}
11015
11016/*===========================================================================
11017 * FUNCTION : setPAAFSupport
11018 *
11019 * DESCRIPTION: Set the preview-assisted auto focus support bit in
11020 * feature mask according to stream type and filter
11021 * arrangement
11022 *
11023 * PARAMETERS : @feature_mask: current feature mask, which may be modified
11024 * @stream_type: stream type
11025 * @filter_arrangement: filter arrangement
11026 *
11027 * RETURN : None
11028 *==========================================================================*/
11029void QCamera3HardwareInterface::setPAAFSupport(
11030 cam_feature_mask_t& feature_mask,
11031 cam_stream_type_t stream_type,
11032 cam_color_filter_arrangement_t filter_arrangement)
11033{
11034 LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
11035 feature_mask, stream_type, filter_arrangement);
11036
11037 switch (filter_arrangement) {
11038 case CAM_FILTER_ARRANGEMENT_RGGB:
11039 case CAM_FILTER_ARRANGEMENT_GRBG:
11040 case CAM_FILTER_ARRANGEMENT_GBRG:
11041 case CAM_FILTER_ARRANGEMENT_BGGR:
11042 if ((stream_type == CAM_STREAM_TYPE_CALLBACK) ||
11043 (stream_type == CAM_STREAM_TYPE_PREVIEW) ||
11044 (stream_type == CAM_STREAM_TYPE_VIDEO)) {
11045 feature_mask |= CAM_QCOM_FEATURE_PAAF;
11046 }
11047 break;
11048 case CAM_FILTER_ARRANGEMENT_Y:
11049 if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
11050 feature_mask |= CAM_QCOM_FEATURE_PAAF;
11051 }
11052 break;
11053 default:
11054 break;
11055 }
11056}
11057
11058/*===========================================================================
11059* FUNCTION : getSensorMountAngle
11060*
11061* DESCRIPTION: Retrieve sensor mount angle
11062*
11063* PARAMETERS : None
11064*
11065* RETURN : sensor mount angle in uint32_t
11066*==========================================================================*/
11067uint32_t QCamera3HardwareInterface::getSensorMountAngle()
11068{
11069 return gCamCapability[mCameraId]->sensor_mount_angle;
11070}
11071
11072/*===========================================================================
11073* FUNCTION : getRelatedCalibrationData
11074*
11075* DESCRIPTION: Retrieve related system calibration data
11076*
11077* PARAMETERS : None
11078*
11079* RETURN : Pointer of related system calibration data
11080*==========================================================================*/
11081const cam_related_system_calibration_data_t *QCamera3HardwareInterface::getRelatedCalibrationData()
11082{
11083 return (const cam_related_system_calibration_data_t *)
11084 &(gCamCapability[mCameraId]->related_cam_calibration);
11085}
11086}; //end namespace qcamera