blob: 4edd80955a1cbd16d35d0459c800f6168c5f94b4 [file] [log] [blame]
Thierry Strudel3d639192016-09-09 11:52:26 -07001/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6* * Redistributions of source code must retain the above copyright
7* notice, this list of conditions and the following disclaimer.
8* * Redistributions in binary form must reproduce the above
9* copyright notice, this list of conditions and the following
10* disclaimer in the documentation and/or other materials provided
11* with the distribution.
12* * Neither the name of The Linux Foundation nor the names of its
13* contributors may be used to endorse or promote products derived
14* from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34
35// To remove
36#include <cutils/properties.h>
37
38// System dependencies
39#include <dlfcn.h>
40#include <fcntl.h>
41#include <stdio.h>
42#include <stdlib.h>
43#include "utils/Timers.h"
44#include "sys/ioctl.h"
Shuzhen Wangf6890e02016-08-12 14:28:54 -070045#include <time.h>
Thierry Strudel3d639192016-09-09 11:52:26 -070046#include <sync/sync.h>
47#include "gralloc_priv.h"
Thierry Strudele80ad7c2016-12-06 10:16:27 -080048#include <map>
Thierry Strudel3d639192016-09-09 11:52:26 -070049
50// Display dependencies
51#include "qdMetaData.h"
52
53// Camera dependencies
54#include "android/QCamera3External.h"
55#include "util/QCameraFlash.h"
56#include "QCamera3HWI.h"
57#include "QCamera3VendorTags.h"
58#include "QCameraTrace.h"
59
Chien-Yu Chene687bd02016-12-07 18:30:26 -080060#include "HdrPlusClientUtils.h"
61
Thierry Strudel3d639192016-09-09 11:52:26 -070062extern "C" {
63#include "mm_camera_dbg.h"
64}
Shuzhen Wangfb961e52016-11-28 11:48:02 -080065#include "cam_cond.h"
Thierry Strudel3d639192016-09-09 11:52:26 -070066
Jiyong Parkd4caeb72017-06-12 17:16:36 +090067using ::android::hardware::camera::common::V1_0::helper::CameraMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070068using namespace android;
69
70namespace qcamera {
71
72#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
73
74#define EMPTY_PIPELINE_DELAY 2
Chien-Yu Chen0a921f92017-08-27 17:25:33 -070075// mm_camera has 2 partial results: 3A, and final result.
76// HDR+ requests have 3 partial results: postview, next request ready, and final result.
77#define PARTIAL_RESULT_COUNT 3
Thierry Strudel3d639192016-09-09 11:52:26 -070078#define FRAME_SKIP_DELAY 0
79
80#define MAX_VALUE_8BIT ((1<<8)-1)
81#define MAX_VALUE_10BIT ((1<<10)-1)
82#define MAX_VALUE_12BIT ((1<<12)-1)
83
84#define VIDEO_4K_WIDTH 3840
85#define VIDEO_4K_HEIGHT 2160
86
Jason Leeb9e76432017-03-10 17:14:19 -080087#define MAX_EIS_WIDTH 3840
88#define MAX_EIS_HEIGHT 2160
Thierry Strudel3d639192016-09-09 11:52:26 -070089
90#define MAX_RAW_STREAMS 1
91#define MAX_STALLING_STREAMS 1
92#define MAX_PROCESSED_STREAMS 3
93/* Batch mode is enabled only if FPS set is equal to or greater than this */
94#define MIN_FPS_FOR_BATCH_MODE (120)
95#define PREVIEW_FPS_FOR_HFR (30)
96#define DEFAULT_VIDEO_FPS (30.0)
Thierry Strudele80ad7c2016-12-06 10:16:27 -080097#define TEMPLATE_MAX_PREVIEW_FPS (30.0)
Thierry Strudel3d639192016-09-09 11:52:26 -070098#define MAX_HFR_BATCH_SIZE (8)
99#define REGIONS_TUPLE_COUNT 5
Thierry Strudel3d639192016-09-09 11:52:26 -0700100// Set a threshold for detection of missing buffers //seconds
Emilian Peev30522a12017-08-03 14:36:33 +0100101#define MISSING_REQUEST_BUF_TIMEOUT 5
Chien-Yu Chene687bd02016-12-07 18:30:26 -0800102#define MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT 30
Thierry Strudel3d639192016-09-09 11:52:26 -0700103#define FLUSH_TIMEOUT 3
104#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
105
106#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3 ( CAM_QCOM_FEATURE_DENOISE2D |\
107 CAM_QCOM_FEATURE_CROP |\
108 CAM_QCOM_FEATURE_ROTATION |\
109 CAM_QCOM_FEATURE_SHARPNESS |\
110 CAM_QCOM_FEATURE_SCALE |\
111 CAM_QCOM_FEATURE_CAC |\
112 CAM_QCOM_FEATURE_CDS )
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700113/* Per configuration size for static metadata length*/
114#define PER_CONFIGURATION_SIZE_3 (3)
Thierry Strudel3d639192016-09-09 11:52:26 -0700115
116#define TIMEOUT_NEVER -1
117
Jason Lee8ce36fa2017-04-19 19:40:37 -0700118/* Face rect indices */
119#define FACE_LEFT 0
120#define FACE_TOP 1
121#define FACE_RIGHT 2
122#define FACE_BOTTOM 3
123#define FACE_WEIGHT 4
124
Thierry Strudel04e026f2016-10-10 11:27:36 -0700125/* Face landmarks indices */
126#define LEFT_EYE_X 0
127#define LEFT_EYE_Y 1
128#define RIGHT_EYE_X 2
129#define RIGHT_EYE_Y 3
130#define MOUTH_X 4
131#define MOUTH_Y 5
132#define TOTAL_LANDMARK_INDICES 6
133
Zhijun He2a5df222017-04-04 18:20:38 -0700134// Max preferred zoom
Zhijun He76870072017-05-08 17:13:17 -0700135#define MAX_PREFERRED_ZOOM_RATIO 7.0
Zhijun He2a5df222017-04-04 18:20:38 -0700136
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700137// Whether to check for the GPU stride padding, or use the default
138//#define CHECK_GPU_PIXEL_ALIGNMENT
139
Thierry Strudel3d639192016-09-09 11:52:26 -0700140cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
141const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
142extern pthread_mutex_t gCamLock;
143volatile uint32_t gCamHal3LogLevel = 1;
144extern uint8_t gNumCameraSessions;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700145
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800146// Note that this doesn't support concurrent front and back camera b/35960155.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700147// The following Easel related variables must be protected by gHdrPlusClientLock.
Chien-Yu Chend77a5462017-06-02 18:00:38 -0700148std::unique_ptr<EaselManagerClient> gEaselManagerClient;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700149bool EaselManagerClientOpened = false; // If gEaselManagerClient is opened.
150std::unique_ptr<HdrPlusClient> gHdrPlusClient = nullptr;
151bool gHdrPlusClientOpening = false; // If HDR+ client is being opened.
Chien-Yu Chen77ccd022017-06-23 12:00:36 -0700152std::condition_variable gHdrPlusClientOpenCond; // Used to synchronize HDR+ client opening.
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700153bool gEaselProfilingEnabled = false; // If Easel profiling is enabled.
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700154bool gExposeEnableZslKey = false; // If HAL makes android.control.enableZsl available.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700155
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800156// If Easel is in bypass only mode. If true, Easel HDR+ won't be enabled.
157bool gEaselBypassOnly;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700158
Chien-Yu Chen77ccd022017-06-23 12:00:36 -0700159std::mutex gHdrPlusClientLock; // Protect above Easel related variables.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700160
Thierry Strudel3d639192016-09-09 11:52:26 -0700161
162const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
163 {"On", CAM_CDS_MODE_ON},
164 {"Off", CAM_CDS_MODE_OFF},
165 {"Auto",CAM_CDS_MODE_AUTO}
166};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700167const QCamera3HardwareInterface::QCameraMap<
168 camera_metadata_enum_android_video_hdr_mode_t,
169 cam_video_hdr_mode_t> QCamera3HardwareInterface::VIDEO_HDR_MODES_MAP[] = {
170 { QCAMERA3_VIDEO_HDR_MODE_OFF, CAM_VIDEO_HDR_MODE_OFF },
171 { QCAMERA3_VIDEO_HDR_MODE_ON, CAM_VIDEO_HDR_MODE_ON }
172};
173
Thierry Strudel54dc9782017-02-15 12:12:10 -0800174const QCamera3HardwareInterface::QCameraMap<
175 camera_metadata_enum_android_binning_correction_mode_t,
176 cam_binning_correction_mode_t> QCamera3HardwareInterface::BINNING_CORRECTION_MODES_MAP[] = {
177 { QCAMERA3_BINNING_CORRECTION_MODE_OFF, CAM_BINNING_CORRECTION_MODE_OFF },
178 { QCAMERA3_BINNING_CORRECTION_MODE_ON, CAM_BINNING_CORRECTION_MODE_ON }
179};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700180
181const QCamera3HardwareInterface::QCameraMap<
182 camera_metadata_enum_android_ir_mode_t,
183 cam_ir_mode_type_t> QCamera3HardwareInterface::IR_MODES_MAP [] = {
184 {QCAMERA3_IR_MODE_OFF, CAM_IR_MODE_OFF},
185 {QCAMERA3_IR_MODE_ON, CAM_IR_MODE_ON},
186 {QCAMERA3_IR_MODE_AUTO, CAM_IR_MODE_AUTO}
187};
Thierry Strudel3d639192016-09-09 11:52:26 -0700188
189const QCamera3HardwareInterface::QCameraMap<
190 camera_metadata_enum_android_control_effect_mode_t,
191 cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
192 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF },
193 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO },
194 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE },
195 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE },
196 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA },
197 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE },
198 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
199 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
200 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA }
201};
202
203const QCamera3HardwareInterface::QCameraMap<
204 camera_metadata_enum_android_control_awb_mode_t,
205 cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
206 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF },
207 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO },
208 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT },
209 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT },
210 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
211 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT },
212 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
213 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT },
214 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE }
215};
216
217const QCamera3HardwareInterface::QCameraMap<
218 camera_metadata_enum_android_control_scene_mode_t,
219 cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
220 { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY, CAM_SCENE_MODE_FACE_PRIORITY },
221 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION },
222 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT },
223 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE },
224 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT },
225 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
226 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE },
227 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH },
228 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW },
229 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET },
230 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE },
231 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS },
232 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS },
233 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY },
234 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT },
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800235 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE},
236 { ANDROID_CONTROL_SCENE_MODE_HDR, CAM_SCENE_MODE_HDR}
Thierry Strudel3d639192016-09-09 11:52:26 -0700237};
238
239const QCamera3HardwareInterface::QCameraMap<
240 camera_metadata_enum_android_control_af_mode_t,
241 cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
242 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_OFF },
243 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED },
244 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO },
245 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO },
246 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF },
247 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
248 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO }
249};
250
251const QCamera3HardwareInterface::QCameraMap<
252 camera_metadata_enum_android_color_correction_aberration_mode_t,
253 cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
254 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
255 CAM_COLOR_CORRECTION_ABERRATION_OFF },
256 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
257 CAM_COLOR_CORRECTION_ABERRATION_FAST },
258 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
259 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
260};
261
262const QCamera3HardwareInterface::QCameraMap<
263 camera_metadata_enum_android_control_ae_antibanding_mode_t,
264 cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
265 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF },
266 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
267 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
268 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
269};
270
271const QCamera3HardwareInterface::QCameraMap<
272 camera_metadata_enum_android_control_ae_mode_t,
273 cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
274 { ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF },
275 { ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF },
276 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO},
277 { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON },
Shuzhen Wang3d11a642017-08-18 09:57:29 -0700278 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO},
279 { (camera_metadata_enum_android_control_ae_mode_t)
280 NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH, CAM_FLASH_MODE_OFF }
Thierry Strudel3d639192016-09-09 11:52:26 -0700281};
282
283const QCamera3HardwareInterface::QCameraMap<
284 camera_metadata_enum_android_flash_mode_t,
285 cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
286 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF },
287 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
288 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH }
289};
290
291const QCamera3HardwareInterface::QCameraMap<
292 camera_metadata_enum_android_statistics_face_detect_mode_t,
293 cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
294 { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, CAM_FACE_DETECT_MODE_OFF },
295 { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE },
296 { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL, CAM_FACE_DETECT_MODE_FULL }
297};
298
299const QCamera3HardwareInterface::QCameraMap<
300 camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
301 cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
302 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
303 CAM_FOCUS_UNCALIBRATED },
304 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
305 CAM_FOCUS_APPROXIMATE },
306 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
307 CAM_FOCUS_CALIBRATED }
308};
309
310const QCamera3HardwareInterface::QCameraMap<
311 camera_metadata_enum_android_lens_state_t,
312 cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
313 { ANDROID_LENS_STATE_STATIONARY, CAM_AF_LENS_STATE_STATIONARY},
314 { ANDROID_LENS_STATE_MOVING, CAM_AF_LENS_STATE_MOVING}
315};
316
317const int32_t available_thumbnail_sizes[] = {0, 0,
318 176, 144,
319 240, 144,
320 256, 144,
321 240, 160,
322 256, 154,
323 240, 240,
324 320, 240};
325
326const QCamera3HardwareInterface::QCameraMap<
327 camera_metadata_enum_android_sensor_test_pattern_mode_t,
328 cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
329 { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, CAM_TEST_PATTERN_OFF },
330 { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, CAM_TEST_PATTERN_SOLID_COLOR },
331 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS, CAM_TEST_PATTERN_COLOR_BARS },
332 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
333 { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9, CAM_TEST_PATTERN_PN9 },
334 { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1, CAM_TEST_PATTERN_CUSTOM1},
335};
336
337/* Since there is no mapping for all the options some Android enum are not listed.
338 * Also, the order in this list is important because while mapping from HAL to Android it will
339 * traverse from lower to higher index which means that for HAL values that are map to different
340 * Android values, the traverse logic will select the first one found.
341 */
342const QCamera3HardwareInterface::QCameraMap<
343 camera_metadata_enum_android_sensor_reference_illuminant1_t,
344 cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
345 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
346 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
347 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
348 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
349 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
350 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
351 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
352 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
353 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
354 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
355 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
356 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
357 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
358 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
359 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
360 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
361};
362
363const QCamera3HardwareInterface::QCameraMap<
364 int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
365 { 60, CAM_HFR_MODE_60FPS},
366 { 90, CAM_HFR_MODE_90FPS},
367 { 120, CAM_HFR_MODE_120FPS},
368 { 150, CAM_HFR_MODE_150FPS},
369 { 180, CAM_HFR_MODE_180FPS},
370 { 210, CAM_HFR_MODE_210FPS},
371 { 240, CAM_HFR_MODE_240FPS},
372 { 480, CAM_HFR_MODE_480FPS},
373};
374
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700375const QCamera3HardwareInterface::QCameraMap<
376 qcamera3_ext_instant_aec_mode_t,
377 cam_aec_convergence_type> QCamera3HardwareInterface::INSTANT_AEC_MODES_MAP[] = {
378 { QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE, CAM_AEC_NORMAL_CONVERGENCE},
379 { QCAMERA3_INSTANT_AEC_AGGRESSIVE_CONVERGENCE, CAM_AEC_AGGRESSIVE_CONVERGENCE},
380 { QCAMERA3_INSTANT_AEC_FAST_CONVERGENCE, CAM_AEC_FAST_CONVERGENCE},
381};
Thierry Strudel54dc9782017-02-15 12:12:10 -0800382
383const QCamera3HardwareInterface::QCameraMap<
384 qcamera3_ext_exposure_meter_mode_t,
385 cam_auto_exposure_mode_type> QCamera3HardwareInterface::AEC_MODES_MAP[] = {
386 { QCAMERA3_EXP_METER_MODE_FRAME_AVERAGE, CAM_AEC_MODE_FRAME_AVERAGE },
387 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED, CAM_AEC_MODE_CENTER_WEIGHTED },
388 { QCAMERA3_EXP_METER_MODE_SPOT_METERING, CAM_AEC_MODE_SPOT_METERING },
389 { QCAMERA3_EXP_METER_MODE_SMART_METERING, CAM_AEC_MODE_SMART_METERING },
390 { QCAMERA3_EXP_METER_MODE_USER_METERING, CAM_AEC_MODE_USER_METERING },
391 { QCAMERA3_EXP_METER_MODE_SPOT_METERING_ADV, CAM_AEC_MODE_SPOT_METERING_ADV },
392 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED_ADV, CAM_AEC_MODE_CENTER_WEIGHTED_ADV },
393};
394
395const QCamera3HardwareInterface::QCameraMap<
396 qcamera3_ext_iso_mode_t,
397 cam_iso_mode_type> QCamera3HardwareInterface::ISO_MODES_MAP[] = {
398 { QCAMERA3_ISO_MODE_AUTO, CAM_ISO_MODE_AUTO },
399 { QCAMERA3_ISO_MODE_DEBLUR, CAM_ISO_MODE_DEBLUR },
400 { QCAMERA3_ISO_MODE_100, CAM_ISO_MODE_100 },
401 { QCAMERA3_ISO_MODE_200, CAM_ISO_MODE_200 },
402 { QCAMERA3_ISO_MODE_400, CAM_ISO_MODE_400 },
403 { QCAMERA3_ISO_MODE_800, CAM_ISO_MODE_800 },
404 { QCAMERA3_ISO_MODE_1600, CAM_ISO_MODE_1600 },
405 { QCAMERA3_ISO_MODE_3200, CAM_ISO_MODE_3200 },
406};
407
Thierry Strudel3d639192016-09-09 11:52:26 -0700408camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
409 .initialize = QCamera3HardwareInterface::initialize,
410 .configure_streams = QCamera3HardwareInterface::configure_streams,
411 .register_stream_buffers = NULL,
412 .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
413 .process_capture_request = QCamera3HardwareInterface::process_capture_request,
414 .get_metadata_vendor_tag_ops = NULL,
415 .dump = QCamera3HardwareInterface::dump,
416 .flush = QCamera3HardwareInterface::flush,
417 .reserved = {0},
418};
419
420// initialise to some default value
421uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
422
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700423static inline void logEaselEvent(const char *tag, const char *event) {
424 if (CC_UNLIKELY(gEaselProfilingEnabled)) {
425 struct timespec ts = {};
426 static int64_t kMsPerSec = 1000;
427 static int64_t kNsPerMs = 1000000;
428 status_t res = clock_gettime(CLOCK_BOOTTIME, &ts);
429 if (res != OK) {
430 ALOGE("[%s] Failed to get boot time for <%s>.", tag, event);
431 } else {
432 int64_t now = static_cast<int64_t>(ts.tv_sec) * kMsPerSec + ts.tv_nsec / kNsPerMs;
433 ALOGI("[%s] %s at %" PRId64 " ms", tag, event, now);
434 }
435 }
436}
437
Thierry Strudel3d639192016-09-09 11:52:26 -0700438/*===========================================================================
439 * FUNCTION : QCamera3HardwareInterface
440 *
441 * DESCRIPTION: constructor of QCamera3HardwareInterface
442 *
443 * PARAMETERS :
444 * @cameraId : camera ID
445 *
446 * RETURN : none
447 *==========================================================================*/
448QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
449 const camera_module_callbacks_t *callbacks)
450 : mCameraId(cameraId),
451 mCameraHandle(NULL),
452 mCameraInitialized(false),
453 mCallbackOps(NULL),
454 mMetadataChannel(NULL),
455 mPictureChannel(NULL),
456 mRawChannel(NULL),
457 mSupportChannel(NULL),
458 mAnalysisChannel(NULL),
459 mRawDumpChannel(NULL),
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700460 mHdrPlusRawSrcChannel(NULL),
Thierry Strudel3d639192016-09-09 11:52:26 -0700461 mDummyBatchChannel(NULL),
Emilian Peev7650c122017-01-19 08:24:33 -0800462 mDepthChannel(NULL),
Emilian Peev656e4fa2017-06-02 16:47:04 +0100463 mDepthCloudMode(CAM_PD_DATA_SKIP),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800464 mPerfLockMgr(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700465 mChannelHandle(0),
466 mFirstConfiguration(true),
467 mFlush(false),
468 mFlushPerf(false),
469 mParamHeap(NULL),
470 mParameters(NULL),
471 mPrevParameters(NULL),
472 m_bIsVideo(false),
473 m_bIs4KVideo(false),
474 m_bEisSupportedSize(false),
475 m_bEisEnable(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800476 m_bEis3PropertyEnabled(false),
Binhao Lin09245482017-08-31 18:25:29 -0700477 m_bAVTimerEnabled(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700478 m_MobicatMask(0),
Chien-Yu Chen3f303522017-05-19 15:21:45 -0700479 mShutterDispatcher(this),
480 mOutputBufferDispatcher(this),
Thierry Strudel3d639192016-09-09 11:52:26 -0700481 mMinProcessedFrameDuration(0),
482 mMinJpegFrameDuration(0),
483 mMinRawFrameDuration(0),
Emilian Peev30522a12017-08-03 14:36:33 +0100484 mExpectedFrameDuration(0),
485 mExpectedInflightDuration(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700486 mMetaFrameCount(0U),
487 mUpdateDebugLevel(false),
488 mCallbacks(callbacks),
489 mCaptureIntent(0),
490 mCacMode(0),
Samuel Ha68ba5172016-12-15 18:41:12 -0800491 /* DevCamDebug metadata internal m control*/
492 mDevCamDebugMetaEnable(0),
493 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -0700494 mBatchSize(0),
495 mToBeQueuedVidBufs(0),
496 mHFRVideoFps(DEFAULT_VIDEO_FPS),
497 mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800498 mStreamConfig(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800499 mCommon(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700500 mFirstFrameNumberInBatch(0),
501 mNeedSensorRestart(false),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800502 mPreviewStarted(false),
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700503 mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
504 mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
Emilian Peev0f3c3162017-03-15 12:57:46 +0000505 mPDSupported(false),
506 mPDIndex(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700507 mInstantAEC(false),
508 mResetInstantAEC(false),
509 mInstantAECSettledFrameNumber(0),
510 mAecSkipDisplayFrameBound(0),
511 mInstantAecFrameIdxCount(0),
Chien-Yu Chenbc730232017-07-12 14:49:55 -0700512 mLastRequestedLensShadingMapMode(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF),
Chien-Yu Chen21b9e9a2017-09-25 14:34:26 -0700513 mLastRequestedFaceDetectMode(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800514 mCurrFeatureState(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700515 mLdafCalibExist(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700516 mLastCustIntentFrmNum(-1),
Shuzhen Wang3c077d72017-04-20 22:48:59 -0700517 mFirstMetadataCallback(true),
Thierry Strudel3d639192016-09-09 11:52:26 -0700518 mState(CLOSED),
519 mIsDeviceLinked(false),
520 mIsMainCamera(true),
521 mLinkedCameraId(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700522 m_pDualCamCmdHeap(NULL),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800523 m_pDualCamCmdPtr(NULL),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800524 mHdrPlusModeEnabled(false),
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700525 mZslEnabled(false),
Chien-Yu Chen153c5172017-09-08 11:33:19 -0700526 mEaselMipiStarted(false),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800527 mIsApInputUsedForHdrPlus(false),
528 mFirstPreviewIntentSeen(false),
Shuzhen Wang181c57b2017-07-21 11:39:44 -0700529 m_bSensorHDREnabled(false),
Shuzhen Wang3569d4a2017-09-04 19:10:28 -0700530 mAfTrigger(),
531 mSceneDistance(-1)
Thierry Strudel3d639192016-09-09 11:52:26 -0700532{
533 getLogLevel();
Thierry Strudel3d639192016-09-09 11:52:26 -0700534 mCommon.init(gCamCapability[cameraId]);
535 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700536#ifndef USE_HAL_3_3
537 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
538#else
Thierry Strudel3d639192016-09-09 11:52:26 -0700539 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700540#endif
Thierry Strudel3d639192016-09-09 11:52:26 -0700541 mCameraDevice.common.close = close_camera_device;
542 mCameraDevice.ops = &mCameraOps;
543 mCameraDevice.priv = this;
544 gCamCapability[cameraId]->version = CAM_HAL_V3;
545 // TODO: hardcode for now until mctl add support for min_num_pp_bufs
546 //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
547 gCamCapability[cameraId]->min_num_pp_bufs = 3;
548
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800549 PTHREAD_COND_INIT(&mBuffersCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700550
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800551 PTHREAD_COND_INIT(&mRequestCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700552 mPendingLiveRequest = 0;
553 mCurrentRequestId = -1;
554 pthread_mutex_init(&mMutex, NULL);
555
556 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
557 mDefaultMetadata[i] = NULL;
558
559 // Getting system props of different kinds
560 char prop[PROPERTY_VALUE_MAX];
561 memset(prop, 0, sizeof(prop));
562 property_get("persist.camera.raw.dump", prop, "0");
563 mEnableRawDump = atoi(prop);
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800564 property_get("persist.camera.hal3.force.hdr", prop, "0");
565 mForceHdrSnapshot = atoi(prop);
566
Thierry Strudel3d639192016-09-09 11:52:26 -0700567 if (mEnableRawDump)
568 LOGD("Raw dump from Camera HAL enabled");
569
570 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
571 memset(mLdafCalib, 0, sizeof(mLdafCalib));
572
Arnd Geis082a4d72017-08-24 10:33:07 -0700573 memset(mEaselFwVersion, 0, sizeof(mEaselFwVersion));
Arnd Geis8cbfc182017-09-07 14:46:41 -0700574 mEaselFwUpdated = false;
Arnd Geis082a4d72017-08-24 10:33:07 -0700575
Thierry Strudel3d639192016-09-09 11:52:26 -0700576 memset(prop, 0, sizeof(prop));
577 property_get("persist.camera.tnr.preview", prop, "0");
578 m_bTnrPreview = (uint8_t)atoi(prop);
579
580 memset(prop, 0, sizeof(prop));
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800581 property_get("persist.camera.swtnr.preview", prop, "1");
582 m_bSwTnrPreview = (uint8_t)atoi(prop);
583
584 memset(prop, 0, sizeof(prop));
Binhao Lincdb362a2017-04-20 13:31:54 -0700585 property_get("persist.camera.tnr.video", prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -0700586 m_bTnrVideo = (uint8_t)atoi(prop);
587
588 memset(prop, 0, sizeof(prop));
589 property_get("persist.camera.avtimer.debug", prop, "0");
590 m_debug_avtimer = (uint8_t)atoi(prop);
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800591 LOGI("AV timer enabled: %d", m_debug_avtimer);
Thierry Strudel3d639192016-09-09 11:52:26 -0700592
Thierry Strudel54dc9782017-02-15 12:12:10 -0800593 memset(prop, 0, sizeof(prop));
594 property_get("persist.camera.cacmode.disable", prop, "0");
595 m_cacModeDisabled = (uint8_t)atoi(prop);
596
Shuzhen Wangb57ec912017-07-31 13:24:27 -0700597 m_bForceInfinityAf = property_get_bool("persist.camera.af.infinity", 0);
Shuzhen Wang8c276ef2017-08-09 11:12:20 -0700598 m_MobicatMask = (uint8_t)property_get_int32("persist.camera.mobicat", 0);
Shuzhen Wangb57ec912017-07-31 13:24:27 -0700599
Thierry Strudel3d639192016-09-09 11:52:26 -0700600 //Load and read GPU library.
601 lib_surface_utils = NULL;
602 LINK_get_surface_pixel_alignment = NULL;
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700603 mSurfaceStridePadding = CAM_PAD_TO_64;
604#ifdef CHECK_GPU_PIXEL_ALIGNMENT
Thierry Strudel3d639192016-09-09 11:52:26 -0700605 lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
606 if (lib_surface_utils) {
607 *(void **)&LINK_get_surface_pixel_alignment =
608 dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
609 if (LINK_get_surface_pixel_alignment) {
610 mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
611 }
612 dlclose(lib_surface_utils);
613 }
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700614#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +0000615 mPDIndex = getPDStatIndex(gCamCapability[cameraId]);
616 mPDSupported = (0 <= mPDIndex) ? true : false;
617
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700618 m60HzZone = is60HzZone();
Thierry Strudel3d639192016-09-09 11:52:26 -0700619}
620
621/*===========================================================================
622 * FUNCTION : ~QCamera3HardwareInterface
623 *
624 * DESCRIPTION: destructor of QCamera3HardwareInterface
625 *
626 * PARAMETERS : none
627 *
628 * RETURN : none
629 *==========================================================================*/
630QCamera3HardwareInterface::~QCamera3HardwareInterface()
631{
632 LOGD("E");
633
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800634 int32_t rc = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -0700635
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800636 // Disable power hint and enable the perf lock for close camera
637 mPerfLockMgr.releasePerfLock(PERF_LOCK_POWERHINT_ENCODE);
638 mPerfLockMgr.acquirePerfLock(PERF_LOCK_CLOSE_CAMERA);
639
Chien-Yu Chen153c5172017-09-08 11:33:19 -0700640 // Close HDR+ client first before destroying HAL.
641 {
642 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
643 finishHdrPlusClientOpeningLocked(l);
644 if (gHdrPlusClient != nullptr) {
645 // Disable HDR+ mode.
646 disableHdrPlusModeLocked();
647 // Disconnect Easel if it's connected.
648 gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
649 gHdrPlusClient = nullptr;
650 }
651 }
652
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800653 // unlink of dualcam during close camera
654 if (mIsDeviceLinked) {
655 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
656 &m_pDualCamCmdPtr->bundle_info;
657 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
658 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
659 pthread_mutex_lock(&gCamLock);
660
661 if (mIsMainCamera == 1) {
662 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
663 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
664 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
665 // related session id should be session id of linked session
666 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
667 } else {
668 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
669 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
670 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
671 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
672 }
Thierry Strudel2896d122017-02-23 19:18:03 -0800673 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800674 pthread_mutex_unlock(&gCamLock);
675
676 rc = mCameraHandle->ops->set_dual_cam_cmd(
677 mCameraHandle->camera_handle);
678 if (rc < 0) {
679 LOGE("Dualcam: Unlink failed, but still proceed to close");
680 }
681 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700682
683 /* We need to stop all streams before deleting any stream */
684 if (mRawDumpChannel) {
685 mRawDumpChannel->stop();
686 }
687
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700688 if (mHdrPlusRawSrcChannel) {
689 mHdrPlusRawSrcChannel->stop();
690 }
691
Thierry Strudel3d639192016-09-09 11:52:26 -0700692 // NOTE: 'camera3_stream_t *' objects are already freed at
693 // this stage by the framework
694 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
695 it != mStreamInfo.end(); it++) {
696 QCamera3ProcessingChannel *channel = (*it)->channel;
697 if (channel) {
698 channel->stop();
699 }
700 }
701 if (mSupportChannel)
702 mSupportChannel->stop();
703
704 if (mAnalysisChannel) {
705 mAnalysisChannel->stop();
706 }
707 if (mMetadataChannel) {
708 mMetadataChannel->stop();
709 }
710 if (mChannelHandle) {
Chien-Yu Chen153c5172017-09-08 11:33:19 -0700711 stopChannelLocked(/*stop_immediately*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -0700712 }
713
714 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
715 it != mStreamInfo.end(); it++) {
716 QCamera3ProcessingChannel *channel = (*it)->channel;
717 if (channel)
718 delete channel;
719 free (*it);
720 }
721 if (mSupportChannel) {
722 delete mSupportChannel;
723 mSupportChannel = NULL;
724 }
725
726 if (mAnalysisChannel) {
727 delete mAnalysisChannel;
728 mAnalysisChannel = NULL;
729 }
730 if (mRawDumpChannel) {
731 delete mRawDumpChannel;
732 mRawDumpChannel = NULL;
733 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700734 if (mHdrPlusRawSrcChannel) {
735 delete mHdrPlusRawSrcChannel;
736 mHdrPlusRawSrcChannel = NULL;
737 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700738 if (mDummyBatchChannel) {
739 delete mDummyBatchChannel;
740 mDummyBatchChannel = NULL;
741 }
742
743 mPictureChannel = NULL;
Emilian Peev7650c122017-01-19 08:24:33 -0800744 mDepthChannel = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -0700745
746 if (mMetadataChannel) {
747 delete mMetadataChannel;
748 mMetadataChannel = NULL;
749 }
750
751 /* Clean up all channels */
752 if (mCameraInitialized) {
753 if(!mFirstConfiguration){
754 //send the last unconfigure
755 cam_stream_size_info_t stream_config_info;
756 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
757 stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
758 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -0800759 m_bIs4KVideo ? 0 :
Jason Leea46ad5e2017-07-07 15:20:56 -0700760 m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700761 clear_metadata_buffer(mParameters);
Thierry Strudel3d639192016-09-09 11:52:26 -0700762 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
763 stream_config_info);
764 int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
765 if (rc < 0) {
766 LOGE("set_parms failed for unconfigure");
767 }
768 }
769 deinitParameters();
770 }
771
772 if (mChannelHandle) {
773 mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
774 mChannelHandle);
775 LOGH("deleting channel %d", mChannelHandle);
776 mChannelHandle = 0;
777 }
778
779 if (mState != CLOSED)
780 closeCamera();
781
782 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
783 req.mPendingBufferList.clear();
784 }
785 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -0700786 for (pendingRequestIterator i = mPendingRequestsList.begin();
787 i != mPendingRequestsList.end();) {
788 i = erasePendingRequest(i);
789 }
790 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
791 if (mDefaultMetadata[i])
792 free_camera_metadata(mDefaultMetadata[i]);
793
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800794 mPerfLockMgr.releasePerfLock(PERF_LOCK_CLOSE_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700795
796 pthread_cond_destroy(&mRequestCond);
797
798 pthread_cond_destroy(&mBuffersCond);
799
800 pthread_mutex_destroy(&mMutex);
801 LOGD("X");
802}
803
804/*===========================================================================
805 * FUNCTION : erasePendingRequest
806 *
807 * DESCRIPTION: function to erase a desired pending request after freeing any
808 * allocated memory
809 *
810 * PARAMETERS :
811 * @i : iterator pointing to pending request to be erased
812 *
813 * RETURN : iterator pointing to the next request
814 *==========================================================================*/
815QCamera3HardwareInterface::pendingRequestIterator
816 QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
817{
818 if (i->input_buffer != NULL) {
819 free(i->input_buffer);
820 i->input_buffer = NULL;
821 }
822 if (i->settings != NULL)
823 free_camera_metadata((camera_metadata_t*)i->settings);
Emilian Peev30522a12017-08-03 14:36:33 +0100824
825 mExpectedInflightDuration -= i->expectedFrameDuration;
826 if (mExpectedInflightDuration < 0) {
827 LOGE("Negative expected in-flight duration!");
828 mExpectedInflightDuration = 0;
829 }
830
Thierry Strudel3d639192016-09-09 11:52:26 -0700831 return mPendingRequestsList.erase(i);
832}
833
834/*===========================================================================
835 * FUNCTION : camEvtHandle
836 *
837 * DESCRIPTION: Function registered to mm-camera-interface to handle events
838 *
839 * PARAMETERS :
840 * @camera_handle : interface layer camera handle
841 * @evt : ptr to event
842 * @user_data : user data ptr
843 *
844 * RETURN : none
845 *==========================================================================*/
846void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
847 mm_camera_event_t *evt,
848 void *user_data)
849{
850 QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
851 if (obj && evt) {
852 switch(evt->server_event_type) {
853 case CAM_EVENT_TYPE_DAEMON_DIED:
854 pthread_mutex_lock(&obj->mMutex);
855 obj->mState = ERROR;
856 pthread_mutex_unlock(&obj->mMutex);
857 LOGE("Fatal, camera daemon died");
858 break;
859
860 case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
861 LOGD("HAL got request pull from Daemon");
862 pthread_mutex_lock(&obj->mMutex);
863 obj->mWokenUpByDaemon = true;
864 obj->unblockRequestIfNecessary();
865 pthread_mutex_unlock(&obj->mMutex);
866 break;
867
868 default:
869 LOGW("Warning: Unhandled event %d",
870 evt->server_event_type);
871 break;
872 }
873 } else {
874 LOGE("NULL user_data/evt");
875 }
876}
877
878/*===========================================================================
879 * FUNCTION : openCamera
880 *
881 * DESCRIPTION: open camera
882 *
883 * PARAMETERS :
884 * @hw_device : double ptr for camera device struct
885 *
886 * RETURN : int32_t type of status
887 * NO_ERROR -- success
888 * none-zero failure code
889 *==========================================================================*/
890int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
891{
892 int rc = 0;
893 if (mState != CLOSED) {
894 *hw_device = NULL;
895 return PERMISSION_DENIED;
896 }
897
Chien-Yu Chene96475e2017-04-11 11:53:26 -0700898 logEaselEvent("EASEL_STARTUP_LATENCY", "Camera Open");
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800899 mPerfLockMgr.acquirePerfLock(PERF_LOCK_OPEN_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700900 LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
901 mCameraId);
902
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700903 if (mCameraHandle) {
904 LOGE("Failure: Camera already opened");
905 return ALREADY_EXISTS;
906 }
907
908 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -0700909 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chend77a5462017-06-02 18:00:38 -0700910 if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice()) {
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700911 logEaselEvent("EASEL_STARTUP_LATENCY", "Resume");
Chien-Yu Chen90f1fc12017-07-14 14:31:53 -0700912 rc = gEaselManagerClient->resume(this);
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700913 if (rc != 0) {
914 ALOGE("%s: Resuming Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
915 return rc;
916 }
Arnd Geis8cbfc182017-09-07 14:46:41 -0700917 mEaselFwUpdated = false;
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700918 }
919 }
920
Thierry Strudel3d639192016-09-09 11:52:26 -0700921 rc = openCamera();
922 if (rc == 0) {
923 *hw_device = &mCameraDevice.common;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800924 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -0700925 *hw_device = NULL;
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700926
927 // Suspend Easel because opening camera failed.
928 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -0700929 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chend77a5462017-06-02 18:00:38 -0700930 if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice()) {
931 status_t suspendErr = gEaselManagerClient->suspend();
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700932 if (suspendErr != 0) {
933 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__,
934 strerror(-suspendErr), suspendErr);
935 }
936 }
937 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800938 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700939
Thierry Strudel3d639192016-09-09 11:52:26 -0700940 LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
941 mCameraId, rc);
942
943 if (rc == NO_ERROR) {
944 mState = OPENED;
945 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800946
Thierry Strudel3d639192016-09-09 11:52:26 -0700947 return rc;
948}
949
950/*===========================================================================
951 * FUNCTION : openCamera
952 *
953 * DESCRIPTION: open camera
954 *
955 * PARAMETERS : none
956 *
957 * RETURN : int32_t type of status
958 * NO_ERROR -- success
959 * none-zero failure code
960 *==========================================================================*/
961int QCamera3HardwareInterface::openCamera()
962{
963 int rc = 0;
964 char value[PROPERTY_VALUE_MAX];
965
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800966 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_OPENCAMERA);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800967
Thierry Strudel3d639192016-09-09 11:52:26 -0700968 rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
969 if (rc < 0) {
970 LOGE("Failed to reserve flash for camera id: %d",
971 mCameraId);
972 return UNKNOWN_ERROR;
973 }
974
975 rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
976 if (rc) {
977 LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
978 return rc;
979 }
980
981 if (!mCameraHandle) {
982 LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
983 return -ENODEV;
984 }
985
986 rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
987 camEvtHandle, (void *)this);
988
989 if (rc < 0) {
990 LOGE("Error, failed to register event callback");
991 /* Not closing camera here since it is already handled in destructor */
992 return FAILED_TRANSACTION;
993 }
994
995 mExifParams.debug_params =
996 (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
997 if (mExifParams.debug_params) {
998 memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
999 } else {
1000 LOGE("Out of Memory. Allocation failed for 3A debug exif params");
1001 return NO_MEMORY;
1002 }
1003 mFirstConfiguration = true;
1004
1005 //Notify display HAL that a camera session is active.
1006 //But avoid calling the same during bootup because camera service might open/close
1007 //cameras at boot time during its initialization and display service will also internally
1008 //wait for camera service to initialize first while calling this display API, resulting in a
1009 //deadlock situation. Since boot time camera open/close calls are made only to fetch
1010 //capabilities, no need of this display bw optimization.
1011 //Use "service.bootanim.exit" property to know boot status.
1012 property_get("service.bootanim.exit", value, "0");
1013 if (atoi(value) == 1) {
1014 pthread_mutex_lock(&gCamLock);
1015 if (gNumCameraSessions++ == 0) {
1016 setCameraLaunchStatus(true);
1017 }
1018 pthread_mutex_unlock(&gCamLock);
1019 }
1020
1021 //fill the session id needed while linking dual cam
1022 pthread_mutex_lock(&gCamLock);
1023 rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
1024 &sessionId[mCameraId]);
1025 pthread_mutex_unlock(&gCamLock);
1026
1027 if (rc < 0) {
1028 LOGE("Error, failed to get sessiion id");
1029 return UNKNOWN_ERROR;
1030 } else {
1031 //Allocate related cam sync buffer
1032 //this is needed for the payload that goes along with bundling cmd for related
1033 //camera use cases
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001034 m_pDualCamCmdHeap = new QCamera3HeapMemory(1);
1035 rc = m_pDualCamCmdHeap->allocate(sizeof(cam_dual_camera_cmd_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07001036 if(rc != OK) {
1037 rc = NO_MEMORY;
1038 LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
1039 return NO_MEMORY;
1040 }
1041
1042 //Map memory for related cam sync buffer
1043 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001044 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF,
1045 m_pDualCamCmdHeap->getFd(0),
1046 sizeof(cam_dual_camera_cmd_info_t),
1047 m_pDualCamCmdHeap->getPtr(0));
Thierry Strudel3d639192016-09-09 11:52:26 -07001048 if(rc < 0) {
1049 LOGE("Dualcam: failed to map Related cam sync buffer");
1050 rc = FAILED_TRANSACTION;
1051 return NO_MEMORY;
1052 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001053 m_pDualCamCmdPtr =
1054 (cam_dual_camera_cmd_info_t*) DATA_PTR(m_pDualCamCmdHeap,0);
Thierry Strudel3d639192016-09-09 11:52:26 -07001055 }
1056
1057 LOGH("mCameraId=%d",mCameraId);
1058
1059 return NO_ERROR;
1060}
1061
1062/*===========================================================================
1063 * FUNCTION : closeCamera
1064 *
1065 * DESCRIPTION: close camera
1066 *
1067 * PARAMETERS : none
1068 *
1069 * RETURN : int32_t type of status
1070 * NO_ERROR -- success
1071 * none-zero failure code
1072 *==========================================================================*/
1073int QCamera3HardwareInterface::closeCamera()
1074{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001075 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CLOSECAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -07001076 int rc = NO_ERROR;
1077 char value[PROPERTY_VALUE_MAX];
1078
1079 LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
1080 mCameraId);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001081
1082 // unmap memory for related cam sync buffer
1083 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001084 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001085 if (NULL != m_pDualCamCmdHeap) {
1086 m_pDualCamCmdHeap->deallocate();
1087 delete m_pDualCamCmdHeap;
1088 m_pDualCamCmdHeap = NULL;
1089 m_pDualCamCmdPtr = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001090 }
1091
Thierry Strudel3d639192016-09-09 11:52:26 -07001092 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
1093 mCameraHandle = NULL;
1094
1095 //reset session id to some invalid id
1096 pthread_mutex_lock(&gCamLock);
1097 sessionId[mCameraId] = 0xDEADBEEF;
1098 pthread_mutex_unlock(&gCamLock);
1099
1100 //Notify display HAL that there is no active camera session
1101 //but avoid calling the same during bootup. Refer to openCamera
1102 //for more details.
1103 property_get("service.bootanim.exit", value, "0");
1104 if (atoi(value) == 1) {
1105 pthread_mutex_lock(&gCamLock);
1106 if (--gNumCameraSessions == 0) {
1107 setCameraLaunchStatus(false);
1108 }
1109 pthread_mutex_unlock(&gCamLock);
1110 }
1111
Thierry Strudel3d639192016-09-09 11:52:26 -07001112 if (mExifParams.debug_params) {
1113 free(mExifParams.debug_params);
1114 mExifParams.debug_params = NULL;
1115 }
1116 if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
1117 LOGW("Failed to release flash for camera id: %d",
1118 mCameraId);
1119 }
1120 mState = CLOSED;
1121 LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
1122 mCameraId, rc);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001123
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001124 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07001125 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001126 if (EaselManagerClientOpened) {
Chien-Yu Chend77a5462017-06-02 18:00:38 -07001127 rc = gEaselManagerClient->suspend();
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001128 if (rc != 0) {
1129 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1130 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001131 }
1132 }
1133
Thierry Strudel3d639192016-09-09 11:52:26 -07001134 return rc;
1135}
1136
1137/*===========================================================================
1138 * FUNCTION : initialize
1139 *
1140 * DESCRIPTION: Initialize frameworks callback functions
1141 *
1142 * PARAMETERS :
1143 * @callback_ops : callback function to frameworks
1144 *
1145 * RETURN :
1146 *
1147 *==========================================================================*/
1148int QCamera3HardwareInterface::initialize(
1149 const struct camera3_callback_ops *callback_ops)
1150{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001151 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_INIT);
Thierry Strudel3d639192016-09-09 11:52:26 -07001152 int rc;
1153
1154 LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
1155 pthread_mutex_lock(&mMutex);
1156
1157 // Validate current state
1158 switch (mState) {
1159 case OPENED:
1160 /* valid state */
1161 break;
1162 default:
1163 LOGE("Invalid state %d", mState);
1164 rc = -ENODEV;
1165 goto err1;
1166 }
1167
1168 rc = initParameters();
1169 if (rc < 0) {
1170 LOGE("initParamters failed %d", rc);
1171 goto err1;
1172 }
1173 mCallbackOps = callback_ops;
1174
1175 mChannelHandle = mCameraHandle->ops->add_channel(
1176 mCameraHandle->camera_handle, NULL, NULL, this);
1177 if (mChannelHandle == 0) {
1178 LOGE("add_channel failed");
1179 rc = -ENOMEM;
1180 pthread_mutex_unlock(&mMutex);
1181 return rc;
1182 }
1183
1184 pthread_mutex_unlock(&mMutex);
1185 mCameraInitialized = true;
1186 mState = INITIALIZED;
1187 LOGI("X");
1188 return 0;
1189
1190err1:
1191 pthread_mutex_unlock(&mMutex);
1192 return rc;
1193}
1194
1195/*===========================================================================
1196 * FUNCTION : validateStreamDimensions
1197 *
1198 * DESCRIPTION: Check if the configuration requested are those advertised
1199 *
1200 * PARAMETERS :
1201 * @stream_list : streams to be configured
1202 *
1203 * RETURN :
1204 *
1205 *==========================================================================*/
1206int QCamera3HardwareInterface::validateStreamDimensions(
1207 camera3_stream_configuration_t *streamList)
1208{
1209 int rc = NO_ERROR;
1210 size_t count = 0;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001211 uint32_t depthWidth = 0;
1212 uint32_t depthHeight = 0;
1213 if (mPDSupported) {
1214 depthWidth = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].width;
1215 depthHeight = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].height;
1216 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001217
1218 camera3_stream_t *inputStream = NULL;
1219 /*
1220 * Loop through all streams to find input stream if it exists*
1221 */
1222 for (size_t i = 0; i< streamList->num_streams; i++) {
1223 if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
1224 if (inputStream != NULL) {
1225 LOGE("Error, Multiple input streams requested");
1226 return -EINVAL;
1227 }
1228 inputStream = streamList->streams[i];
1229 }
1230 }
1231 /*
1232 * Loop through all streams requested in configuration
1233 * Check if unsupported sizes have been requested on any of them
1234 */
1235 for (size_t j = 0; j < streamList->num_streams; j++) {
1236 bool sizeFound = false;
1237 camera3_stream_t *newStream = streamList->streams[j];
1238
1239 uint32_t rotatedHeight = newStream->height;
1240 uint32_t rotatedWidth = newStream->width;
1241 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1242 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1243 rotatedHeight = newStream->width;
1244 rotatedWidth = newStream->height;
1245 }
1246
1247 /*
1248 * Sizes are different for each type of stream format check against
1249 * appropriate table.
1250 */
1251 switch (newStream->format) {
1252 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
1253 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
1254 case HAL_PIXEL_FORMAT_RAW10:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001255 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1256 (ANDROID_SCALER_AVAILABLE_FORMATS_RAW16 == newStream->format) &&
1257 mPDSupported) {
1258 if ((depthWidth == newStream->width) &&
1259 (depthHeight == newStream->height)) {
1260 sizeFound = true;
1261 }
1262 break;
1263 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001264 count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
1265 for (size_t i = 0; i < count; i++) {
1266 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
1267 (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
1268 sizeFound = true;
1269 break;
1270 }
1271 }
1272 break;
1273 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001274 if ((newStream->data_space == HAL_DATASPACE_DEPTH) &&
1275 mPDSupported) {
Emilian Peev7650c122017-01-19 08:24:33 -08001276 //As per spec. depth cloud should be sample count / 16
Emilian Peev0f3c3162017-03-15 12:57:46 +00001277 uint32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
Emilian Peev7650c122017-01-19 08:24:33 -08001278 if ((depthSamplesCount == newStream->width) &&
1279 (1 == newStream->height)) {
1280 sizeFound = true;
1281 }
1282 break;
1283 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001284 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
1285 /* Verify set size against generated sizes table */
1286 for (size_t i = 0; i < count; i++) {
1287 if (((int32_t)rotatedWidth ==
1288 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1289 ((int32_t)rotatedHeight ==
1290 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1291 sizeFound = true;
1292 break;
1293 }
1294 }
1295 break;
1296 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1297 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1298 default:
1299 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1300 || newStream->stream_type == CAMERA3_STREAM_INPUT
1301 || IS_USAGE_ZSL(newStream->usage)) {
1302 if (((int32_t)rotatedWidth ==
1303 gCamCapability[mCameraId]->active_array_size.width) &&
1304 ((int32_t)rotatedHeight ==
1305 gCamCapability[mCameraId]->active_array_size.height)) {
1306 sizeFound = true;
1307 break;
1308 }
1309 /* We could potentially break here to enforce ZSL stream
1310 * set from frameworks always is full active array size
1311 * but it is not clear from the spc if framework will always
1312 * follow that, also we have logic to override to full array
1313 * size, so keeping the logic lenient at the moment
1314 */
1315 }
1316 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
1317 MAX_SIZES_CNT);
1318 for (size_t i = 0; i < count; i++) {
1319 if (((int32_t)rotatedWidth ==
1320 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1321 ((int32_t)rotatedHeight ==
1322 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1323 sizeFound = true;
1324 break;
1325 }
1326 }
1327 break;
1328 } /* End of switch(newStream->format) */
1329
1330 /* We error out even if a single stream has unsupported size set */
1331 if (!sizeFound) {
1332 LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1333 rotatedWidth, rotatedHeight, newStream->format,
1334 gCamCapability[mCameraId]->active_array_size.width,
1335 gCamCapability[mCameraId]->active_array_size.height);
1336 rc = -EINVAL;
1337 break;
1338 }
1339 } /* End of for each stream */
1340 return rc;
1341}
1342
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001343/*===========================================================================
1344 * FUNCTION : validateUsageFlags
1345 *
1346 * DESCRIPTION: Check if the configuration usage flags map to same internal format.
1347 *
1348 * PARAMETERS :
1349 * @stream_list : streams to be configured
1350 *
1351 * RETURN :
1352 * NO_ERROR if the usage flags are supported
1353 * error code if usage flags are not supported
1354 *
1355 *==========================================================================*/
1356int QCamera3HardwareInterface::validateUsageFlags(
1357 const camera3_stream_configuration_t* streamList)
1358{
1359 for (size_t j = 0; j < streamList->num_streams; j++) {
1360 const camera3_stream_t *newStream = streamList->streams[j];
1361
1362 if (newStream->format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
1363 (newStream->stream_type != CAMERA3_STREAM_OUTPUT &&
1364 newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL)) {
1365 continue;
1366 }
1367
Jason Leec4cf5032017-05-24 18:31:41 -07001368 // Here we only care whether it's EIS3 or not
1369 char is_type_value[PROPERTY_VALUE_MAX];
1370 property_get("persist.camera.is_type", is_type_value, "4");
1371 cam_is_type_t isType = atoi(is_type_value) == IS_TYPE_EIS_3_0 ? IS_TYPE_EIS_3_0 : IS_TYPE_NONE;
1372 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1373 mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
1374 isType = IS_TYPE_NONE;
1375
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001376 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1377 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1378 bool isZSL = IS_USAGE_ZSL(newStream->usage);
1379 bool forcePreviewUBWC = true;
1380 if (isVideo && !QCameraCommon::isVideoUBWCEnabled()) {
1381 forcePreviewUBWC = false;
1382 }
1383 cam_format_t videoFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001384 CAM_STREAM_TYPE_VIDEO, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001385 cam_format_t previewFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001386 CAM_STREAM_TYPE_PREVIEW, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001387 cam_format_t zslFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001388 CAM_STREAM_TYPE_SNAPSHOT, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001389
1390 // Color space for this camera device is guaranteed to be ITU_R_601_FR.
1391 // So color spaces will always match.
1392
1393 // Check whether underlying formats of shared streams match.
1394 if (isVideo && isPreview && videoFormat != previewFormat) {
1395 LOGE("Combined video and preview usage flag is not supported");
1396 return -EINVAL;
1397 }
1398 if (isPreview && isZSL && previewFormat != zslFormat) {
1399 LOGE("Combined preview and zsl usage flag is not supported");
1400 return -EINVAL;
1401 }
1402 if (isVideo && isZSL && videoFormat != zslFormat) {
1403 LOGE("Combined video and zsl usage flag is not supported");
1404 return -EINVAL;
1405 }
1406 }
1407 return NO_ERROR;
1408}
1409
1410/*===========================================================================
1411 * FUNCTION : validateUsageFlagsForEis
1412 *
1413 * DESCRIPTION: Check if the configuration usage flags conflict with Eis
1414 *
1415 * PARAMETERS :
1416 * @stream_list : streams to be configured
1417 *
1418 * RETURN :
1419 * NO_ERROR if the usage flags are supported
1420 * error code if usage flags are not supported
1421 *
1422 *==========================================================================*/
1423int QCamera3HardwareInterface::validateUsageFlagsForEis(
1424 const camera3_stream_configuration_t* streamList)
1425{
1426 for (size_t j = 0; j < streamList->num_streams; j++) {
1427 const camera3_stream_t *newStream = streamList->streams[j];
1428
1429 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1430 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1431
1432 // Because EIS is "hard-coded" for certain use case, and current
1433 // implementation doesn't support shared preview and video on the same
1434 // stream, return failure if EIS is forced on.
1435 if (isPreview && isVideo && m_bEisEnable && m_bEisSupportedSize) {
1436 LOGE("Combined video and preview usage flag is not supported due to EIS");
1437 return -EINVAL;
1438 }
1439 }
1440 return NO_ERROR;
1441}
1442
Thierry Strudel3d639192016-09-09 11:52:26 -07001443/*==============================================================================
1444 * FUNCTION : isSupportChannelNeeded
1445 *
1446 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1447 *
1448 * PARAMETERS :
1449 * @stream_list : streams to be configured
1450 * @stream_config_info : the config info for streams to be configured
1451 *
1452 * RETURN : Boolen true/false decision
1453 *
1454 *==========================================================================*/
1455bool QCamera3HardwareInterface::isSupportChannelNeeded(
1456 camera3_stream_configuration_t *streamList,
1457 cam_stream_size_info_t stream_config_info)
1458{
1459 uint32_t i;
1460 bool pprocRequested = false;
1461 /* Check for conditions where PProc pipeline does not have any streams*/
1462 for (i = 0; i < stream_config_info.num_streams; i++) {
1463 if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1464 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1465 pprocRequested = true;
1466 break;
1467 }
1468 }
1469
1470 if (pprocRequested == false )
1471 return true;
1472
1473 /* Dummy stream needed if only raw or jpeg streams present */
1474 for (i = 0; i < streamList->num_streams; i++) {
1475 switch(streamList->streams[i]->format) {
1476 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1477 case HAL_PIXEL_FORMAT_RAW10:
1478 case HAL_PIXEL_FORMAT_RAW16:
1479 case HAL_PIXEL_FORMAT_BLOB:
1480 break;
1481 default:
1482 return false;
1483 }
1484 }
1485 return true;
1486}
1487
1488/*==============================================================================
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001489 * FUNCTION : sensor_mode_info
Thierry Strudel3d639192016-09-09 11:52:26 -07001490 *
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001491 * DESCRIPTION: Get sensor mode information based on current stream configuratoin
Thierry Strudel3d639192016-09-09 11:52:26 -07001492 *
1493 * PARAMETERS :
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001494 * @sensor_mode_info : sensor mode information (output)
Thierry Strudel3d639192016-09-09 11:52:26 -07001495 *
1496 * RETURN : int32_t type of status
1497 * NO_ERROR -- success
1498 * none-zero failure code
1499 *
1500 *==========================================================================*/
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001501int32_t QCamera3HardwareInterface::getSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
Thierry Strudel3d639192016-09-09 11:52:26 -07001502{
1503 int32_t rc = NO_ERROR;
1504
1505 cam_dimension_t max_dim = {0, 0};
1506 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1507 if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1508 max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1509 if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1510 max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1511 }
1512
1513 clear_metadata_buffer(mParameters);
1514
1515 rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1516 max_dim);
1517 if (rc != NO_ERROR) {
1518 LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1519 return rc;
1520 }
1521
1522 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1523 if (rc != NO_ERROR) {
1524 LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1525 return rc;
1526 }
1527
1528 clear_metadata_buffer(mParameters);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001529 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -07001530
1531 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1532 mParameters);
1533 if (rc != NO_ERROR) {
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001534 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
Thierry Strudel3d639192016-09-09 11:52:26 -07001535 return rc;
1536 }
1537
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001538 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO, sensorModeInfo);
Chien-Yu Chenee335912017-02-09 17:53:20 -08001539 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1540 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1541 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1542 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1543 sensorModeInfo.num_raw_bits);
Thierry Strudel3d639192016-09-09 11:52:26 -07001544
1545 return rc;
1546}
1547
1548/*==============================================================================
Chien-Yu Chen605c3872017-06-14 11:09:23 -07001549 * FUNCTION : getCurrentSensorModeInfo
1550 *
1551 * DESCRIPTION: Get sensor mode information that is currently selected.
1552 *
1553 * PARAMETERS :
1554 * @sensorModeInfo : sensor mode information (output)
1555 *
1556 * RETURN : int32_t type of status
1557 * NO_ERROR -- success
1558 * none-zero failure code
1559 *
1560 *==========================================================================*/
1561int32_t QCamera3HardwareInterface::getCurrentSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
1562{
1563 int32_t rc = NO_ERROR;
1564
1565 clear_metadata_buffer(mParameters);
1566 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_CURRENT_SENSOR_MODE_INFO);
1567
1568 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1569 mParameters);
1570 if (rc != NO_ERROR) {
1571 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
1572 return rc;
1573 }
1574
1575 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_CURRENT_SENSOR_MODE_INFO, sensorModeInfo);
1576 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1577 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1578 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1579 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1580 sensorModeInfo.num_raw_bits);
1581
1582 return rc;
1583}
1584
1585/*==============================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07001586 * FUNCTION : addToPPFeatureMask
1587 *
1588 * DESCRIPTION: add additional features to pp feature mask based on
1589 * stream type and usecase
1590 *
1591 * PARAMETERS :
1592 * @stream_format : stream type for feature mask
1593 * @stream_idx : stream idx within postprocess_mask list to change
1594 *
1595 * RETURN : NULL
1596 *
1597 *==========================================================================*/
1598void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1599 uint32_t stream_idx)
1600{
1601 char feature_mask_value[PROPERTY_VALUE_MAX];
1602 cam_feature_mask_t feature_mask;
1603 int args_converted;
1604 int property_len;
1605
1606 /* Get feature mask from property */
Thierry Strudel269c81a2016-10-12 12:13:59 -07001607#ifdef _LE_CAMERA_
1608 char swtnr_feature_mask_value[PROPERTY_VALUE_MAX];
1609 snprintf(swtnr_feature_mask_value, PROPERTY_VALUE_MAX, "%lld", CAM_QTI_FEATURE_SW_TNR);
1610 property_len = property_get("persist.camera.hal3.feature",
1611 feature_mask_value, swtnr_feature_mask_value);
1612#else
Thierry Strudel3d639192016-09-09 11:52:26 -07001613 property_len = property_get("persist.camera.hal3.feature",
1614 feature_mask_value, "0");
Thierry Strudel269c81a2016-10-12 12:13:59 -07001615#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07001616 if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1617 (feature_mask_value[1] == 'x')) {
1618 args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1619 } else {
1620 args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1621 }
1622 if (1 != args_converted) {
1623 feature_mask = 0;
1624 LOGE("Wrong feature mask %s", feature_mask_value);
1625 return;
1626 }
1627
1628 switch (stream_format) {
1629 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1630 /* Add LLVD to pp feature mask only if video hint is enabled */
1631 if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1632 mStreamConfigInfo.postprocess_mask[stream_idx]
1633 |= CAM_QTI_FEATURE_SW_TNR;
1634 LOGH("Added SW TNR to pp feature mask");
1635 } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1636 mStreamConfigInfo.postprocess_mask[stream_idx]
1637 |= CAM_QCOM_FEATURE_LLVD;
1638 LOGH("Added LLVD SeeMore to pp feature mask");
1639 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001640 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1641 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
1642 mStreamConfigInfo.postprocess_mask[stream_idx] |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
1643 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08001644 if ((m_bIsVideo) && (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1645 CAM_QTI_FEATURE_BINNING_CORRECTION)) {
1646 mStreamConfigInfo.postprocess_mask[stream_idx] |=
1647 CAM_QTI_FEATURE_BINNING_CORRECTION;
1648 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001649 break;
1650 }
1651 default:
1652 break;
1653 }
1654 LOGD("PP feature mask %llx",
1655 mStreamConfigInfo.postprocess_mask[stream_idx]);
1656}
1657
1658/*==============================================================================
1659 * FUNCTION : updateFpsInPreviewBuffer
1660 *
1661 * DESCRIPTION: update FPS information in preview buffer.
1662 *
1663 * PARAMETERS :
1664 * @metadata : pointer to metadata buffer
1665 * @frame_number: frame_number to look for in pending buffer list
1666 *
1667 * RETURN : None
1668 *
1669 *==========================================================================*/
1670void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1671 uint32_t frame_number)
1672{
1673 // Mark all pending buffers for this particular request
1674 // with corresponding framerate information
1675 for (List<PendingBuffersInRequest>::iterator req =
1676 mPendingBuffersMap.mPendingBuffersInRequest.begin();
1677 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1678 for(List<PendingBufferInfo>::iterator j =
1679 req->mPendingBufferList.begin();
1680 j != req->mPendingBufferList.end(); j++) {
1681 QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1682 if ((req->frame_number == frame_number) &&
1683 (channel->getStreamTypeMask() &
1684 (1U << CAM_STREAM_TYPE_PREVIEW))) {
1685 IF_META_AVAILABLE(cam_fps_range_t, float_range,
1686 CAM_INTF_PARM_FPS_RANGE, metadata) {
1687 typeof (MetaData_t::refreshrate) cameraFps = float_range->max_fps;
1688 struct private_handle_t *priv_handle =
1689 (struct private_handle_t *)(*(j->buffer));
1690 setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1691 }
1692 }
1693 }
1694 }
1695}
1696
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001697/*==============================================================================
1698 * FUNCTION : updateTimeStampInPendingBuffers
1699 *
1700 * DESCRIPTION: update timestamp in display metadata for all pending buffers
1701 * of a frame number
1702 *
1703 * PARAMETERS :
1704 * @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
1705 * @timestamp : timestamp to be set
1706 *
1707 * RETURN : None
1708 *
1709 *==========================================================================*/
1710void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
1711 uint32_t frameNumber, nsecs_t timestamp)
1712{
1713 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
1714 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
Binhao Lin09245482017-08-31 18:25:29 -07001715 // WAR: save the av_timestamp to the next frame
1716 if(req->frame_number == frameNumber + 1) {
1717 req->av_timestamp = timestamp;
1718 }
1719
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001720 if (req->frame_number != frameNumber)
1721 continue;
1722
1723 for (auto k = req->mPendingBufferList.begin();
1724 k != req->mPendingBufferList.end(); k++ ) {
Binhao Lin09245482017-08-31 18:25:29 -07001725 // WAR: update timestamp when it's not VT usecase
1726 QCamera3Channel *channel = (QCamera3Channel *)k->stream->priv;
1727 if (!((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask() &&
1728 m_bAVTimerEnabled)) {
1729 struct private_handle_t *priv_handle =
1730 (struct private_handle_t *) (*(k->buffer));
1731 setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
1732 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001733 }
1734 }
1735 return;
1736}
1737
Thierry Strudel3d639192016-09-09 11:52:26 -07001738/*===========================================================================
1739 * FUNCTION : configureStreams
1740 *
1741 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1742 * and output streams.
1743 *
1744 * PARAMETERS :
1745 * @stream_list : streams to be configured
1746 *
1747 * RETURN :
1748 *
1749 *==========================================================================*/
1750int QCamera3HardwareInterface::configureStreams(
1751 camera3_stream_configuration_t *streamList)
1752{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001753 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS);
Thierry Strudel3d639192016-09-09 11:52:26 -07001754 int rc = 0;
1755
1756 // Acquire perfLock before configure streams
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001757 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001758 rc = configureStreamsPerfLocked(streamList);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001759 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001760
1761 return rc;
1762}
1763
1764/*===========================================================================
1765 * FUNCTION : configureStreamsPerfLocked
1766 *
1767 * DESCRIPTION: configureStreams while perfLock is held.
1768 *
1769 * PARAMETERS :
1770 * @stream_list : streams to be configured
1771 *
1772 * RETURN : int32_t type of status
1773 * NO_ERROR -- success
1774 * none-zero failure code
1775 *==========================================================================*/
1776int QCamera3HardwareInterface::configureStreamsPerfLocked(
1777 camera3_stream_configuration_t *streamList)
1778{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001779 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS_PERF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07001780 int rc = 0;
1781
1782 // Sanity check stream_list
1783 if (streamList == NULL) {
1784 LOGE("NULL stream configuration");
1785 return BAD_VALUE;
1786 }
1787 if (streamList->streams == NULL) {
1788 LOGE("NULL stream list");
1789 return BAD_VALUE;
1790 }
1791
1792 if (streamList->num_streams < 1) {
1793 LOGE("Bad number of streams requested: %d",
1794 streamList->num_streams);
1795 return BAD_VALUE;
1796 }
1797
1798 if (streamList->num_streams >= MAX_NUM_STREAMS) {
1799 LOGE("Maximum number of streams %d exceeded: %d",
1800 MAX_NUM_STREAMS, streamList->num_streams);
1801 return BAD_VALUE;
1802 }
1803
Jason Leec4cf5032017-05-24 18:31:41 -07001804 mOpMode = streamList->operation_mode;
1805 LOGD("mOpMode: %d", mOpMode);
1806
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001807 rc = validateUsageFlags(streamList);
1808 if (rc != NO_ERROR) {
1809 return rc;
1810 }
1811
Chien-Yu Chen11c8edc2017-09-11 20:54:24 -07001812 // Disable HDR+ if it's enabled;
Chien-Yu Chen153c5172017-09-08 11:33:19 -07001813 {
1814 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
1815 finishHdrPlusClientOpeningLocked(l);
1816 disableHdrPlusModeLocked();
1817 }
1818
Thierry Strudel3d639192016-09-09 11:52:26 -07001819 /* first invalidate all the steams in the mStreamList
1820 * if they appear again, they will be validated */
1821 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1822 it != mStreamInfo.end(); it++) {
1823 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1824 if (channel) {
1825 channel->stop();
1826 }
1827 (*it)->status = INVALID;
1828 }
1829
1830 if (mRawDumpChannel) {
1831 mRawDumpChannel->stop();
1832 delete mRawDumpChannel;
1833 mRawDumpChannel = NULL;
1834 }
1835
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001836 if (mHdrPlusRawSrcChannel) {
1837 mHdrPlusRawSrcChannel->stop();
1838 delete mHdrPlusRawSrcChannel;
1839 mHdrPlusRawSrcChannel = NULL;
1840 }
1841
Thierry Strudel3d639192016-09-09 11:52:26 -07001842 if (mSupportChannel)
1843 mSupportChannel->stop();
1844
1845 if (mAnalysisChannel) {
1846 mAnalysisChannel->stop();
1847 }
1848 if (mMetadataChannel) {
1849 /* If content of mStreamInfo is not 0, there is metadata stream */
1850 mMetadataChannel->stop();
1851 }
1852 if (mChannelHandle) {
Chien-Yu Chen153c5172017-09-08 11:33:19 -07001853 stopChannelLocked(/*stop_immediately*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -07001854 }
1855
1856 pthread_mutex_lock(&mMutex);
1857
Chien-Yu Chendeaebad2017-06-30 11:46:34 -07001858 mPictureChannel = NULL;
1859
Thierry Strudel3d639192016-09-09 11:52:26 -07001860 // Check state
1861 switch (mState) {
1862 case INITIALIZED:
1863 case CONFIGURED:
1864 case STARTED:
1865 /* valid state */
1866 break;
1867 default:
1868 LOGE("Invalid state %d", mState);
1869 pthread_mutex_unlock(&mMutex);
1870 return -ENODEV;
1871 }
1872
1873 /* Check whether we have video stream */
1874 m_bIs4KVideo = false;
1875 m_bIsVideo = false;
1876 m_bEisSupportedSize = false;
1877 m_bTnrEnabled = false;
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001878 m_bVideoHdrEnabled = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001879 bool isZsl = false;
Emilian Peev7650c122017-01-19 08:24:33 -08001880 bool depthPresent = false;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001881 bool isPreview = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001882 uint32_t videoWidth = 0U;
1883 uint32_t videoHeight = 0U;
1884 size_t rawStreamCnt = 0;
1885 size_t stallStreamCnt = 0;
1886 size_t processedStreamCnt = 0;
1887 // Number of streams on ISP encoder path
1888 size_t numStreamsOnEncoder = 0;
1889 size_t numYuv888OnEncoder = 0;
1890 bool bYuv888OverrideJpeg = false;
1891 cam_dimension_t largeYuv888Size = {0, 0};
1892 cam_dimension_t maxViewfinderSize = {0, 0};
1893 bool bJpegExceeds4K = false;
1894 bool bJpegOnEncoder = false;
1895 bool bUseCommonFeatureMask = false;
1896 cam_feature_mask_t commonFeatureMask = 0;
1897 bool bSmallJpegSize = false;
1898 uint32_t width_ratio;
1899 uint32_t height_ratio;
1900 maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1901 camera3_stream_t *inputStream = NULL;
1902 bool isJpeg = false;
1903 cam_dimension_t jpegSize = {0, 0};
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001904 cam_dimension_t previewSize = {0, 0};
Emilian Peev0f3c3162017-03-15 12:57:46 +00001905 size_t pdStatCount = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07001906
1907 cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1908
1909 /*EIS configuration*/
Thierry Strudel3d639192016-09-09 11:52:26 -07001910 uint8_t eis_prop_set;
1911 uint32_t maxEisWidth = 0;
1912 uint32_t maxEisHeight = 0;
1913
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001914 // Initialize all instant AEC related variables
1915 mInstantAEC = false;
1916 mResetInstantAEC = false;
1917 mInstantAECSettledFrameNumber = 0;
1918 mAecSkipDisplayFrameBound = 0;
1919 mInstantAecFrameIdxCount = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001920 mCurrFeatureState = 0;
1921 mStreamConfig = true;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001922
Binhao Lin09245482017-08-31 18:25:29 -07001923 m_bAVTimerEnabled = false;
1924
Thierry Strudel3d639192016-09-09 11:52:26 -07001925 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1926
1927 size_t count = IS_TYPE_MAX;
1928 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1929 for (size_t i = 0; i < count; i++) {
1930 if ((gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001931 (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
1932 m_bEisSupported = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001933 break;
1934 }
1935 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001936
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001937 if (m_bEisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001938 maxEisWidth = MAX_EIS_WIDTH;
1939 maxEisHeight = MAX_EIS_HEIGHT;
1940 }
1941
1942 /* EIS setprop control */
1943 char eis_prop[PROPERTY_VALUE_MAX];
1944 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001945 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07001946 eis_prop_set = (uint8_t)atoi(eis_prop);
1947
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001948 m_bEisEnable = eis_prop_set && m_bEisSupported &&
Thierry Strudel3d639192016-09-09 11:52:26 -07001949 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1950
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001951 LOGD("m_bEisEnable: %d, eis_prop_set: %d, m_bEisSupported: %d",
1952 m_bEisEnable, eis_prop_set, m_bEisSupported);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001953
Thierry Strudel3d639192016-09-09 11:52:26 -07001954 /* stream configurations */
1955 for (size_t i = 0; i < streamList->num_streams; i++) {
1956 camera3_stream_t *newStream = streamList->streams[i];
1957 LOGI("stream[%d] type = %d, format = %d, width = %d, "
1958 "height = %d, rotation = %d, usage = 0x%x",
1959 i, newStream->stream_type, newStream->format,
1960 newStream->width, newStream->height, newStream->rotation,
1961 newStream->usage);
1962 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1963 newStream->stream_type == CAMERA3_STREAM_INPUT){
1964 isZsl = true;
1965 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001966 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1967 IS_USAGE_PREVIEW(newStream->usage)) {
1968 isPreview = true;
1969 }
1970
Thierry Strudel3d639192016-09-09 11:52:26 -07001971 if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1972 inputStream = newStream;
1973 }
1974
Emilian Peev7650c122017-01-19 08:24:33 -08001975 if ((newStream->format == HAL_PIXEL_FORMAT_BLOB) &&
1976 (newStream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001977 isJpeg = true;
1978 jpegSize.width = newStream->width;
1979 jpegSize.height = newStream->height;
1980 if (newStream->width > VIDEO_4K_WIDTH ||
1981 newStream->height > VIDEO_4K_HEIGHT)
1982 bJpegExceeds4K = true;
1983 }
1984
1985 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1986 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1987 m_bIsVideo = true;
Thierry Strudel2896d122017-02-23 19:18:03 -08001988 // In HAL3 we can have multiple different video streams.
1989 // The variables video width and height are used below as
1990 // dimensions of the biggest of them
1991 if (videoWidth < newStream->width ||
1992 videoHeight < newStream->height) {
1993 videoWidth = newStream->width;
1994 videoHeight = newStream->height;
1995 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001996 if ((VIDEO_4K_WIDTH <= newStream->width) &&
1997 (VIDEO_4K_HEIGHT <= newStream->height)) {
1998 m_bIs4KVideo = true;
1999 }
2000 m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
2001 (newStream->height <= maxEisHeight);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002002
Thierry Strudel3d639192016-09-09 11:52:26 -07002003 }
2004 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
2005 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
2006 switch (newStream->format) {
2007 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08002008 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
2009 depthPresent = true;
2010 break;
2011 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002012 stallStreamCnt++;
2013 if (isOnEncoder(maxViewfinderSize, newStream->width,
2014 newStream->height)) {
2015 numStreamsOnEncoder++;
2016 bJpegOnEncoder = true;
2017 }
2018 width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
2019 newStream->width);
2020 height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
2021 newStream->height);;
2022 FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
2023 "FATAL: max_downscale_factor cannot be zero and so assert");
2024 if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
2025 (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
2026 LOGH("Setting small jpeg size flag to true");
2027 bSmallJpegSize = true;
2028 }
2029 break;
2030 case HAL_PIXEL_FORMAT_RAW10:
2031 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2032 case HAL_PIXEL_FORMAT_RAW16:
2033 rawStreamCnt++;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002034 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
2035 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
2036 pdStatCount++;
2037 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002038 break;
2039 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2040 processedStreamCnt++;
2041 if (isOnEncoder(maxViewfinderSize, newStream->width,
2042 newStream->height)) {
2043 if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
2044 !IS_USAGE_ZSL(newStream->usage)) {
2045 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2046 }
2047 numStreamsOnEncoder++;
2048 }
2049 break;
2050 case HAL_PIXEL_FORMAT_YCbCr_420_888:
2051 processedStreamCnt++;
2052 if (isOnEncoder(maxViewfinderSize, newStream->width,
2053 newStream->height)) {
2054 // If Yuv888 size is not greater than 4K, set feature mask
2055 // to SUPERSET so that it support concurrent request on
2056 // YUV and JPEG.
2057 if (newStream->width <= VIDEO_4K_WIDTH &&
2058 newStream->height <= VIDEO_4K_HEIGHT) {
2059 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2060 }
2061 numStreamsOnEncoder++;
2062 numYuv888OnEncoder++;
2063 largeYuv888Size.width = newStream->width;
2064 largeYuv888Size.height = newStream->height;
2065 }
2066 break;
2067 default:
2068 processedStreamCnt++;
2069 if (isOnEncoder(maxViewfinderSize, newStream->width,
2070 newStream->height)) {
2071 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2072 numStreamsOnEncoder++;
2073 }
2074 break;
2075 }
2076
2077 }
2078 }
2079
2080 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
2081 gCamCapability[mCameraId]->position == CAM_POSITION_FRONT_AUX ||
2082 !m_bIsVideo) {
2083 m_bEisEnable = false;
2084 }
2085
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002086 if (validateUsageFlagsForEis(streamList) != NO_ERROR) {
2087 pthread_mutex_unlock(&mMutex);
2088 return -EINVAL;
2089 }
2090
Thierry Strudel54dc9782017-02-15 12:12:10 -08002091 uint8_t forceEnableTnr = 0;
2092 char tnr_prop[PROPERTY_VALUE_MAX];
2093 memset(tnr_prop, 0, sizeof(tnr_prop));
2094 property_get("debug.camera.tnr.forceenable", tnr_prop, "0");
2095 forceEnableTnr = (uint8_t)atoi(tnr_prop);
2096
Thierry Strudel3d639192016-09-09 11:52:26 -07002097 /* Logic to enable/disable TNR based on specific config size/etc.*/
2098 if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
Thierry Strudel3d639192016-09-09 11:52:26 -07002099 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
2100 m_bTnrEnabled = true;
Thierry Strudel54dc9782017-02-15 12:12:10 -08002101 else if (forceEnableTnr)
2102 m_bTnrEnabled = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002103
Mansoor Aftab93a66e52017-01-26 14:58:25 -08002104 char videoHdrProp[PROPERTY_VALUE_MAX];
2105 memset(videoHdrProp, 0, sizeof(videoHdrProp));
2106 property_get("persist.camera.hdr.video", videoHdrProp, "0");
2107 uint8_t hdr_mode_prop = (uint8_t)atoi(videoHdrProp);
2108
2109 if (hdr_mode_prop == 1 && m_bIsVideo &&
2110 mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2111 m_bVideoHdrEnabled = true;
2112 else
2113 m_bVideoHdrEnabled = false;
2114
2115
Thierry Strudel3d639192016-09-09 11:52:26 -07002116 /* Check if num_streams is sane */
2117 if (stallStreamCnt > MAX_STALLING_STREAMS ||
2118 rawStreamCnt > MAX_RAW_STREAMS ||
2119 processedStreamCnt > MAX_PROCESSED_STREAMS) {
2120 LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
2121 stallStreamCnt, rawStreamCnt, processedStreamCnt);
2122 pthread_mutex_unlock(&mMutex);
2123 return -EINVAL;
2124 }
2125 /* Check whether we have zsl stream or 4k video case */
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002126 if (isZsl && m_bIs4KVideo) {
2127 LOGE("Currently invalid configuration ZSL & 4K Video!");
Thierry Strudel3d639192016-09-09 11:52:26 -07002128 pthread_mutex_unlock(&mMutex);
2129 return -EINVAL;
2130 }
2131 /* Check if stream sizes are sane */
2132 if (numStreamsOnEncoder > 2) {
2133 LOGE("Number of streams on ISP encoder path exceeds limits of 2");
2134 pthread_mutex_unlock(&mMutex);
2135 return -EINVAL;
2136 } else if (1 < numStreamsOnEncoder){
2137 bUseCommonFeatureMask = true;
2138 LOGH("Multiple streams above max viewfinder size, common mask needed");
2139 }
2140
2141 /* Check if BLOB size is greater than 4k in 4k recording case */
2142 if (m_bIs4KVideo && bJpegExceeds4K) {
2143 LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
2144 pthread_mutex_unlock(&mMutex);
2145 return -EINVAL;
2146 }
2147
Emilian Peev7650c122017-01-19 08:24:33 -08002148 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2149 depthPresent) {
2150 LOGE("HAL doesn't support depth streams in HFR mode!");
2151 pthread_mutex_unlock(&mMutex);
2152 return -EINVAL;
2153 }
2154
Thierry Strudel3d639192016-09-09 11:52:26 -07002155 // When JPEG and preview streams share VFE output, CPP will not apply CAC2
2156 // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
2157 // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
2158 // is not true. Otherwise testMandatoryOutputCombinations will fail with following
2159 // configurations:
2160 // {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
2161 // {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
2162 // (These two configurations will not have CAC2 enabled even in HQ modes.)
2163 if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
2164 ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
2165 __func__);
2166 pthread_mutex_unlock(&mMutex);
2167 return -EINVAL;
2168 }
2169
2170 // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
2171 // the YUV stream's size is greater or equal to the JPEG size, set common
2172 // postprocess mask to NONE, so that we can take advantage of postproc bypass.
2173 if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
2174 jpegSize.width, jpegSize.height) &&
2175 largeYuv888Size.width > jpegSize.width &&
2176 largeYuv888Size.height > jpegSize.height) {
2177 bYuv888OverrideJpeg = true;
2178 } else if (!isJpeg && numStreamsOnEncoder > 1) {
2179 commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2180 }
2181
2182 LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
2183 maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
2184 commonFeatureMask);
2185 LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
2186 numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
2187
2188 rc = validateStreamDimensions(streamList);
2189 if (rc == NO_ERROR) {
2190 rc = validateStreamRotations(streamList);
2191 }
2192 if (rc != NO_ERROR) {
2193 LOGE("Invalid stream configuration requested!");
2194 pthread_mutex_unlock(&mMutex);
2195 return rc;
2196 }
2197
Emilian Peev0f3c3162017-03-15 12:57:46 +00002198 if (1 < pdStatCount) {
2199 LOGE("HAL doesn't support multiple PD streams");
2200 pthread_mutex_unlock(&mMutex);
2201 return -EINVAL;
2202 }
2203
2204 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2205 (1 == pdStatCount)) {
2206 LOGE("HAL doesn't support PD streams in HFR mode!");
2207 pthread_mutex_unlock(&mMutex);
2208 return -EINVAL;
2209 }
2210
Thierry Strudel3d639192016-09-09 11:52:26 -07002211 camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
2212 for (size_t i = 0; i < streamList->num_streams; i++) {
2213 camera3_stream_t *newStream = streamList->streams[i];
2214 LOGH("newStream type = %d, stream format = %d "
2215 "stream size : %d x %d, stream rotation = %d",
2216 newStream->stream_type, newStream->format,
2217 newStream->width, newStream->height, newStream->rotation);
2218 //if the stream is in the mStreamList validate it
2219 bool stream_exists = false;
2220 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2221 it != mStreamInfo.end(); it++) {
2222 if ((*it)->stream == newStream) {
2223 QCamera3ProcessingChannel *channel =
2224 (QCamera3ProcessingChannel*)(*it)->stream->priv;
2225 stream_exists = true;
2226 if (channel)
2227 delete channel;
2228 (*it)->status = VALID;
2229 (*it)->stream->priv = NULL;
2230 (*it)->channel = NULL;
2231 }
2232 }
2233 if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
2234 //new stream
2235 stream_info_t* stream_info;
2236 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
2237 if (!stream_info) {
2238 LOGE("Could not allocate stream info");
2239 rc = -ENOMEM;
2240 pthread_mutex_unlock(&mMutex);
2241 return rc;
2242 }
2243 stream_info->stream = newStream;
2244 stream_info->status = VALID;
2245 stream_info->channel = NULL;
Chien-Yu Chen3d836272017-09-20 11:10:21 -07002246 stream_info->id = i; // ID will be re-assigned in cleanAndSortStreamInfo().
Thierry Strudel3d639192016-09-09 11:52:26 -07002247 mStreamInfo.push_back(stream_info);
2248 }
2249 /* Covers Opaque ZSL and API1 F/W ZSL */
2250 if (IS_USAGE_ZSL(newStream->usage)
2251 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
2252 if (zslStream != NULL) {
2253 LOGE("Multiple input/reprocess streams requested!");
2254 pthread_mutex_unlock(&mMutex);
2255 return BAD_VALUE;
2256 }
2257 zslStream = newStream;
2258 }
2259 /* Covers YUV reprocess */
2260 if (inputStream != NULL) {
2261 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
2262 && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2263 && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2264 && inputStream->width == newStream->width
2265 && inputStream->height == newStream->height) {
2266 if (zslStream != NULL) {
2267 /* This scenario indicates multiple YUV streams with same size
2268 * as input stream have been requested, since zsl stream handle
2269 * is solely use for the purpose of overriding the size of streams
2270 * which share h/w streams we will just make a guess here as to
2271 * which of the stream is a ZSL stream, this will be refactored
2272 * once we make generic logic for streams sharing encoder output
2273 */
2274 LOGH("Warning, Multiple ip/reprocess streams requested!");
2275 }
2276 zslStream = newStream;
2277 }
2278 }
2279 }
2280
2281 /* If a zsl stream is set, we know that we have configured at least one input or
2282 bidirectional stream */
2283 if (NULL != zslStream) {
2284 mInputStreamInfo.dim.width = (int32_t)zslStream->width;
2285 mInputStreamInfo.dim.height = (int32_t)zslStream->height;
2286 mInputStreamInfo.format = zslStream->format;
2287 mInputStreamInfo.usage = zslStream->usage;
2288 LOGD("Input stream configured! %d x %d, format %d, usage %d",
2289 mInputStreamInfo.dim.width,
2290 mInputStreamInfo.dim.height,
2291 mInputStreamInfo.format, mInputStreamInfo.usage);
2292 }
2293
2294 cleanAndSortStreamInfo();
2295 if (mMetadataChannel) {
2296 delete mMetadataChannel;
2297 mMetadataChannel = NULL;
2298 }
2299 if (mSupportChannel) {
2300 delete mSupportChannel;
2301 mSupportChannel = NULL;
2302 }
2303
2304 if (mAnalysisChannel) {
2305 delete mAnalysisChannel;
2306 mAnalysisChannel = NULL;
2307 }
2308
2309 if (mDummyBatchChannel) {
2310 delete mDummyBatchChannel;
2311 mDummyBatchChannel = NULL;
2312 }
2313
Emilian Peev7650c122017-01-19 08:24:33 -08002314 if (mDepthChannel) {
2315 mDepthChannel = NULL;
2316 }
Emilian Peev656e4fa2017-06-02 16:47:04 +01002317 mDepthCloudMode = CAM_PD_DATA_SKIP;
Emilian Peev7650c122017-01-19 08:24:33 -08002318
Chien-Yu Chen3f303522017-05-19 15:21:45 -07002319 mShutterDispatcher.clear();
2320 mOutputBufferDispatcher.clear();
2321
Thierry Strudel2896d122017-02-23 19:18:03 -08002322 char is_type_value[PROPERTY_VALUE_MAX];
2323 property_get("persist.camera.is_type", is_type_value, "4");
2324 m_bEis3PropertyEnabled = (atoi(is_type_value) == IS_TYPE_EIS_3_0);
2325
Binhao Line406f062017-05-03 14:39:44 -07002326 char property_value[PROPERTY_VALUE_MAX];
2327 property_get("persist.camera.gzoom.at", property_value, "0");
2328 int goog_zoom_at = atoi(property_value);
Jason Leec4cf5032017-05-24 18:31:41 -07002329 bool is_goog_zoom_video_enabled = ((goog_zoom_at & 1) > 0) &&
2330 gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
2331 bool is_goog_zoom_preview_enabled = ((goog_zoom_at & 2) > 0) &&
2332 gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
Binhao Line406f062017-05-03 14:39:44 -07002333
2334 property_get("persist.camera.gzoom.4k", property_value, "0");
2335 bool is_goog_zoom_4k_enabled = (atoi(property_value) > 0);
2336
Thierry Strudel3d639192016-09-09 11:52:26 -07002337 //Create metadata channel and initialize it
2338 cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
2339 setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
2340 gCamCapability[mCameraId]->color_arrangement);
2341 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
2342 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002343 setBufferErrorStatus, &padding_info, metadataFeatureMask, this);
Thierry Strudel3d639192016-09-09 11:52:26 -07002344 if (mMetadataChannel == NULL) {
2345 LOGE("failed to allocate metadata channel");
2346 rc = -ENOMEM;
2347 pthread_mutex_unlock(&mMutex);
2348 return rc;
2349 }
Emilian Peev662c05e2017-05-16 10:00:04 +01002350 mMetadataChannel->enableDepthData(depthPresent);
Thierry Strudel3d639192016-09-09 11:52:26 -07002351 rc = mMetadataChannel->initialize(IS_TYPE_NONE);
2352 if (rc < 0) {
2353 LOGE("metadata channel initialization failed");
2354 delete mMetadataChannel;
2355 mMetadataChannel = NULL;
2356 pthread_mutex_unlock(&mMutex);
2357 return rc;
2358 }
2359
Thierry Strudel2896d122017-02-23 19:18:03 -08002360 cam_feature_mask_t zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002361 bool isRawStreamRequested = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002362 bool onlyRaw = true;
Binhao Lincdb362a2017-04-20 13:31:54 -07002363 // Keep track of preview/video streams indices.
2364 // There could be more than one preview streams, but only one video stream.
2365 int32_t video_stream_idx = -1;
2366 int32_t preview_stream_idx[streamList->num_streams];
2367 size_t preview_stream_cnt = 0;
Jason Leea52b77e2017-06-27 16:16:17 -07002368 bool previewTnr[streamList->num_streams];
2369 memset(previewTnr, 0, sizeof(bool) * streamList->num_streams);
2370 bool isFront = gCamCapability[mCameraId]->position == CAM_POSITION_FRONT;
2371 // Loop through once to determine preview TNR conditions before creating channels.
2372 for (size_t i = 0; i < streamList->num_streams; i++) {
2373 camera3_stream_t *newStream = streamList->streams[i];
2374 uint32_t stream_usage = newStream->usage;
2375 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT &&
2376 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
2377 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)
2378 video_stream_idx = (int32_t)i;
2379 else
2380 preview_stream_idx[preview_stream_cnt++] = (int32_t)i;
2381 }
2382 }
2383 // By default, preview stream TNR is disabled.
2384 // Enable TNR to the preview stream if all conditions below are satisfied:
2385 // 1. preview resolution == video resolution.
2386 // 2. video stream TNR is enabled.
2387 // 3. EIS2.0 OR is front camera (which wouldn't use EIS3 even if it's set)
2388 for (size_t i = 0; i < preview_stream_cnt && video_stream_idx != -1; i++) {
2389 camera3_stream_t *video_stream = streamList->streams[video_stream_idx];
2390 camera3_stream_t *preview_stream = streamList->streams[preview_stream_idx[i]];
2391 if (m_bTnrEnabled && m_bTnrVideo &&
2392 (isFront || (atoi(is_type_value) == IS_TYPE_EIS_2_0)) &&
2393 video_stream->width == preview_stream->width &&
2394 video_stream->height == preview_stream->height) {
2395 previewTnr[preview_stream_idx[i]] = true;
2396 }
2397 }
2398
Thierry Strudel3d639192016-09-09 11:52:26 -07002399 memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
2400 /* Allocate channel objects for the requested streams */
2401 for (size_t i = 0; i < streamList->num_streams; i++) {
Binhao Line406f062017-05-03 14:39:44 -07002402
Thierry Strudel3d639192016-09-09 11:52:26 -07002403 camera3_stream_t *newStream = streamList->streams[i];
2404 uint32_t stream_usage = newStream->usage;
2405 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
2406 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
2407 struct camera_info *p_info = NULL;
2408 pthread_mutex_lock(&gCamLock);
2409 p_info = get_cam_info(mCameraId, &mStreamConfigInfo.sync_type);
2410 pthread_mutex_unlock(&gCamLock);
2411 if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
2412 || IS_USAGE_ZSL(newStream->usage)) &&
2413 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
Thierry Strudel2896d122017-02-23 19:18:03 -08002414 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002415 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
Thierry Strudel2896d122017-02-23 19:18:03 -08002416 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2417 if (bUseCommonFeatureMask)
2418 zsl_ppmask = commonFeatureMask;
2419 else
2420 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002421 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08002422 if (numStreamsOnEncoder > 0)
2423 zsl_ppmask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2424 else
2425 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002426 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002427 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002428 } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
Thierry Strudel2896d122017-02-23 19:18:03 -08002429 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002430 LOGH("Input stream configured, reprocess config");
2431 } else {
2432 //for non zsl streams find out the format
2433 switch (newStream->format) {
2434 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
2435 {
Thierry Strudel2896d122017-02-23 19:18:03 -08002436 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002437 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2438 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2439 /* add additional features to pp feature mask */
2440 addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
2441 mStreamConfigInfo.num_streams);
2442
2443 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
2444 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2445 CAM_STREAM_TYPE_VIDEO;
2446 if (m_bTnrEnabled && m_bTnrVideo) {
2447 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2448 CAM_QCOM_FEATURE_CPP_TNR;
2449 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2450 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2451 ~CAM_QCOM_FEATURE_CDS;
2452 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002453 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2454 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2455 CAM_QTI_FEATURE_PPEISCORE;
2456 }
Binhao Line406f062017-05-03 14:39:44 -07002457 if (is_goog_zoom_video_enabled && (is_goog_zoom_4k_enabled || !m_bIs4KVideo)) {
2458 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2459 CAM_QCOM_FEATURE_GOOG_ZOOM;
2460 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002461 } else {
2462 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2463 CAM_STREAM_TYPE_PREVIEW;
Jason Leea52b77e2017-06-27 16:16:17 -07002464 if (m_bTnrEnabled && (previewTnr[i] || m_bTnrPreview)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002465 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2466 CAM_QCOM_FEATURE_CPP_TNR;
2467 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2468 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2469 ~CAM_QCOM_FEATURE_CDS;
2470 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002471 if(!m_bSwTnrPreview) {
2472 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2473 ~CAM_QTI_FEATURE_SW_TNR;
2474 }
Binhao Line406f062017-05-03 14:39:44 -07002475 if (is_goog_zoom_preview_enabled) {
2476 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2477 CAM_QCOM_FEATURE_GOOG_ZOOM;
2478 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002479 padding_info.width_padding = mSurfaceStridePadding;
2480 padding_info.height_padding = CAM_PAD_TO_2;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002481 previewSize.width = (int32_t)newStream->width;
2482 previewSize.height = (int32_t)newStream->height;
Thierry Strudel3d639192016-09-09 11:52:26 -07002483 }
2484 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
2485 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
2486 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2487 newStream->height;
2488 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2489 newStream->width;
2490 }
2491 }
2492 break;
2493 case HAL_PIXEL_FORMAT_YCbCr_420_888:
Thierry Strudel2896d122017-02-23 19:18:03 -08002494 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002495 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
2496 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2497 if (bUseCommonFeatureMask)
2498 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2499 commonFeatureMask;
2500 else
2501 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2502 CAM_QCOM_FEATURE_NONE;
2503 } else {
2504 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2505 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2506 }
2507 break;
2508 case HAL_PIXEL_FORMAT_BLOB:
Thierry Strudel2896d122017-02-23 19:18:03 -08002509 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002510 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
2511 // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
2512 if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
2513 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2514 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel2896d122017-02-23 19:18:03 -08002515 /* Remove rotation if it is not supported
2516 for 4K LiveVideo snapshot case (online processing) */
2517 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask &
2518 CAM_QCOM_FEATURE_ROTATION)) {
2519 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
2520 &= ~CAM_QCOM_FEATURE_ROTATION;
2521 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002522 } else {
2523 if (bUseCommonFeatureMask &&
2524 isOnEncoder(maxViewfinderSize, newStream->width,
2525 newStream->height)) {
2526 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
2527 } else {
2528 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2529 }
2530 }
2531 if (isZsl) {
2532 if (zslStream) {
2533 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2534 (int32_t)zslStream->width;
2535 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2536 (int32_t)zslStream->height;
Thierry Strudel2896d122017-02-23 19:18:03 -08002537 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2538 zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002539 } else {
2540 LOGE("Error, No ZSL stream identified");
2541 pthread_mutex_unlock(&mMutex);
2542 return -EINVAL;
2543 }
2544 } else if (m_bIs4KVideo) {
2545 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
2546 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
2547 } else if (bYuv888OverrideJpeg) {
2548 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2549 (int32_t)largeYuv888Size.width;
2550 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2551 (int32_t)largeYuv888Size.height;
2552 }
2553 break;
2554 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2555 case HAL_PIXEL_FORMAT_RAW16:
2556 case HAL_PIXEL_FORMAT_RAW10:
2557 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2558 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2559 isRawStreamRequested = true;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002560 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
2561 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
2562 mStreamConfigInfo.sub_format_type[mStreamConfigInfo.num_streams] =
2563 gCamCapability[mCameraId]->sub_fmt[mPDIndex];
2564 mStreamConfigInfo.format[mStreamConfigInfo.num_streams] =
2565 gCamCapability[mCameraId]->supported_meta_raw_fmts[mPDIndex];
2566 mStreamConfigInfo.dt[mStreamConfigInfo.num_streams] =
2567 gCamCapability[mCameraId]->dt[mPDIndex];
2568 mStreamConfigInfo.vc[mStreamConfigInfo.num_streams] =
2569 gCamCapability[mCameraId]->vc[mPDIndex];
2570 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002571 break;
2572 default:
Thierry Strudel2896d122017-02-23 19:18:03 -08002573 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002574 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
2575 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2576 break;
2577 }
2578 }
2579
2580 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2581 (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2582 gCamCapability[mCameraId]->color_arrangement);
2583
2584 if (newStream->priv == NULL) {
2585 //New stream, construct channel
2586 switch (newStream->stream_type) {
2587 case CAMERA3_STREAM_INPUT:
2588 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
2589 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
2590 break;
2591 case CAMERA3_STREAM_BIDIRECTIONAL:
2592 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
2593 GRALLOC_USAGE_HW_CAMERA_WRITE;
2594 break;
2595 case CAMERA3_STREAM_OUTPUT:
2596 /* For video encoding stream, set read/write rarely
2597 * flag so that they may be set to un-cached */
2598 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
2599 newStream->usage |=
2600 (GRALLOC_USAGE_SW_READ_RARELY |
2601 GRALLOC_USAGE_SW_WRITE_RARELY |
2602 GRALLOC_USAGE_HW_CAMERA_WRITE);
2603 else if (IS_USAGE_ZSL(newStream->usage))
2604 {
2605 LOGD("ZSL usage flag skipping");
2606 }
2607 else if (newStream == zslStream
2608 || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
2609 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
2610 } else
2611 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
2612 break;
2613 default:
2614 LOGE("Invalid stream_type %d", newStream->stream_type);
2615 break;
2616 }
2617
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002618 bool forcePreviewUBWC = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002619 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
2620 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
2621 QCamera3ProcessingChannel *channel = NULL;
2622 switch (newStream->format) {
2623 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2624 if ((newStream->usage &
2625 private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
2626 (streamList->operation_mode ==
2627 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2628 ) {
2629 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2630 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002631 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002632 this,
2633 newStream,
2634 (cam_stream_type_t)
2635 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2636 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2637 mMetadataChannel,
2638 0); //heap buffers are not required for HFR video channel
2639 if (channel == NULL) {
2640 LOGE("allocation of channel failed");
2641 pthread_mutex_unlock(&mMutex);
2642 return -ENOMEM;
2643 }
2644 //channel->getNumBuffers() will return 0 here so use
2645 //MAX_INFLIGH_HFR_REQUESTS
2646 newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2647 newStream->priv = channel;
2648 LOGI("num video buffers in HFR mode: %d",
2649 MAX_INFLIGHT_HFR_REQUESTS);
2650 } else {
2651 /* Copy stream contents in HFR preview only case to create
2652 * dummy batch channel so that sensor streaming is in
2653 * HFR mode */
2654 if (!m_bIsVideo && (streamList->operation_mode ==
2655 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2656 mDummyBatchStream = *newStream;
2657 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002658 int bufferCount = MAX_INFLIGHT_REQUESTS;
2659 if (mStreamConfigInfo.type[mStreamConfigInfo.num_streams] ==
2660 CAM_STREAM_TYPE_VIDEO) {
Zhijun He6cdf6372017-07-15 14:59:58 -07002661 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2662 // WAR: 4K video can only run <=30fps, reduce the buffer count.
2663 bufferCount = m_bIs4KVideo ?
2664 MAX_30FPS_VIDEO_BUFFERS : MAX_VIDEO_BUFFERS;
2665 }
2666
Thierry Strudel2896d122017-02-23 19:18:03 -08002667 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002668 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2669 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002670 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002671 this,
2672 newStream,
2673 (cam_stream_type_t)
2674 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2675 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2676 mMetadataChannel,
Thierry Strudel2896d122017-02-23 19:18:03 -08002677 bufferCount);
Thierry Strudel3d639192016-09-09 11:52:26 -07002678 if (channel == NULL) {
2679 LOGE("allocation of channel failed");
2680 pthread_mutex_unlock(&mMutex);
2681 return -ENOMEM;
2682 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002683 /* disable UBWC for preview, though supported,
2684 * to take advantage of CPP duplication */
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002685 if (m_bIsVideo && (!QCameraCommon::isVideoUBWCEnabled()) &&
Thierry Strudel2896d122017-02-23 19:18:03 -08002686 (previewSize.width == (int32_t)videoWidth)&&
2687 (previewSize.height == (int32_t)videoHeight)){
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002688 forcePreviewUBWC = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002689 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002690 channel->setUBWCEnabled(forcePreviewUBWC);
Binhao Line406f062017-05-03 14:39:44 -07002691 /* When goog_zoom is linked to the preview or video stream,
2692 * disable ubwc to the linked stream */
2693 if ((mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &
2694 CAM_QCOM_FEATURE_GOOG_ZOOM) != 0) {
2695 channel->setUBWCEnabled(false);
2696 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002697 newStream->max_buffers = channel->getNumBuffers();
2698 newStream->priv = channel;
2699 }
2700 break;
2701 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
2702 channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2703 mChannelHandle,
2704 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002705 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002706 this,
2707 newStream,
2708 (cam_stream_type_t)
2709 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2710 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2711 mMetadataChannel);
2712 if (channel == NULL) {
2713 LOGE("allocation of YUV channel failed");
2714 pthread_mutex_unlock(&mMutex);
2715 return -ENOMEM;
2716 }
2717 newStream->max_buffers = channel->getNumBuffers();
2718 newStream->priv = channel;
2719 break;
2720 }
2721 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2722 case HAL_PIXEL_FORMAT_RAW16:
Emilian Peev0f3c3162017-03-15 12:57:46 +00002723 case HAL_PIXEL_FORMAT_RAW10: {
2724 bool isRAW16 = ((newStream->format == HAL_PIXEL_FORMAT_RAW16) &&
2725 (HAL_DATASPACE_DEPTH != newStream->data_space))
2726 ? true : false;
Thierry Strudel3d639192016-09-09 11:52:26 -07002727 mRawChannel = new QCamera3RawChannel(
2728 mCameraHandle->camera_handle, mChannelHandle,
2729 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002730 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002731 this, newStream,
2732 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
Emilian Peev0f3c3162017-03-15 12:57:46 +00002733 mMetadataChannel, isRAW16);
Thierry Strudel3d639192016-09-09 11:52:26 -07002734 if (mRawChannel == NULL) {
2735 LOGE("allocation of raw channel failed");
2736 pthread_mutex_unlock(&mMutex);
2737 return -ENOMEM;
2738 }
2739 newStream->max_buffers = mRawChannel->getNumBuffers();
2740 newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2741 break;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002742 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002743 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08002744 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
2745 mDepthChannel = new QCamera3DepthChannel(
2746 mCameraHandle->camera_handle, mChannelHandle,
2747 mCameraHandle->ops, NULL, NULL, &padding_info,
2748 0, this, MAX_INFLIGHT_REQUESTS, newStream,
2749 mMetadataChannel);
2750 if (NULL == mDepthChannel) {
2751 LOGE("Allocation of depth channel failed");
2752 pthread_mutex_unlock(&mMutex);
2753 return NO_MEMORY;
2754 }
2755 newStream->priv = mDepthChannel;
2756 newStream->max_buffers = MAX_INFLIGHT_REQUESTS;
2757 } else {
2758 // Max live snapshot inflight buffer is 1. This is to mitigate
2759 // frame drop issues for video snapshot. The more buffers being
2760 // allocated, the more frame drops there are.
2761 mPictureChannel = new QCamera3PicChannel(
2762 mCameraHandle->camera_handle, mChannelHandle,
2763 mCameraHandle->ops, captureResultCb,
2764 setBufferErrorStatus, &padding_info, this, newStream,
2765 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2766 m_bIs4KVideo, isZsl, mMetadataChannel,
2767 (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2768 if (mPictureChannel == NULL) {
2769 LOGE("allocation of channel failed");
2770 pthread_mutex_unlock(&mMutex);
2771 return -ENOMEM;
2772 }
2773 newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2774 newStream->max_buffers = mPictureChannel->getNumBuffers();
2775 mPictureChannel->overrideYuvSize(
2776 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2777 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
Thierry Strudel3d639192016-09-09 11:52:26 -07002778 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002779 break;
2780
2781 default:
2782 LOGE("not a supported format 0x%x", newStream->format);
Thierry Strudel73e91562017-05-15 09:16:18 -07002783 pthread_mutex_unlock(&mMutex);
2784 return -EINVAL;
Thierry Strudel3d639192016-09-09 11:52:26 -07002785 }
2786 } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2787 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2788 } else {
2789 LOGE("Error, Unknown stream type");
2790 pthread_mutex_unlock(&mMutex);
2791 return -EINVAL;
2792 }
2793
2794 QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002795 if (channel != NULL && QCamera3Channel::isUBWCEnabled()) {
Jason Leec4cf5032017-05-24 18:31:41 -07002796 // Here we only care whether it's EIS3 or not
2797 cam_is_type_t isType = m_bEis3PropertyEnabled ? IS_TYPE_EIS_3_0 : IS_TYPE_NONE;
2798 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
2799 mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2800 isType = IS_TYPE_NONE;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002801 cam_format_t fmt = QCamera3Channel::getStreamDefaultFormat(
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07002802 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
Jason Leec4cf5032017-05-24 18:31:41 -07002803 newStream->width, newStream->height, forcePreviewUBWC, isType);
Thierry Strudel3d639192016-09-09 11:52:26 -07002804 if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2805 newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2806 }
2807 }
2808
2809 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2810 it != mStreamInfo.end(); it++) {
2811 if ((*it)->stream == newStream) {
2812 (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2813 break;
2814 }
2815 }
2816 } else {
2817 // Channel already exists for this stream
2818 // Do nothing for now
2819 }
2820 padding_info = gCamCapability[mCameraId]->padding_info;
2821
Emilian Peev7650c122017-01-19 08:24:33 -08002822 /* Do not add entries for input&depth stream in metastream info
Thierry Strudel3d639192016-09-09 11:52:26 -07002823 * since there is no real stream associated with it
2824 */
Emilian Peev7650c122017-01-19 08:24:33 -08002825 if ((newStream->stream_type != CAMERA3_STREAM_INPUT) &&
Emilian Peev0f3c3162017-03-15 12:57:46 +00002826 !((newStream->data_space == HAL_DATASPACE_DEPTH) &&
2827 (newStream->format == HAL_PIXEL_FORMAT_BLOB))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002828 mStreamConfigInfo.num_streams++;
Emilian Peev7650c122017-01-19 08:24:33 -08002829 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002830 }
2831
Chien-Yu Chen3f303522017-05-19 15:21:45 -07002832 // Let buffer dispatcher know the configured streams.
2833 mOutputBufferDispatcher.configureStreams(streamList);
2834
Thierry Strudel2896d122017-02-23 19:18:03 -08002835 if (mOpMode != QCAMERA3_VENDOR_STREAM_CONFIGURATION_RAW_ONLY_MODE) {
2836 onlyRaw = false;
2837 }
2838
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002839 // Create analysis stream all the time, even when h/w support is not available
Thierry Strudel2896d122017-02-23 19:18:03 -08002840 if (!onlyRaw) {
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002841 cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002842 cam_analysis_info_t analysisInfo;
2843 int32_t ret = NO_ERROR;
2844 ret = mCommon.getAnalysisInfo(
2845 FALSE,
2846 analysisFeatureMask,
2847 &analysisInfo);
2848 if (ret == NO_ERROR) {
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002849 cam_color_filter_arrangement_t analysis_color_arrangement =
2850 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2851 CAM_FILTER_ARRANGEMENT_Y :
2852 gCamCapability[mCameraId]->color_arrangement);
2853 setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
2854 analysis_color_arrangement);
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002855 cam_dimension_t analysisDim;
2856 analysisDim = mCommon.getMatchingDimension(previewSize,
2857 analysisInfo.analysis_recommended_res);
2858
2859 mAnalysisChannel = new QCamera3SupportChannel(
2860 mCameraHandle->camera_handle,
2861 mChannelHandle,
2862 mCameraHandle->ops,
2863 &analysisInfo.analysis_padding_info,
2864 analysisFeatureMask,
2865 CAM_STREAM_TYPE_ANALYSIS,
2866 &analysisDim,
2867 (analysisInfo.analysis_format
2868 == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
2869 : CAM_FORMAT_YUV_420_NV21),
2870 analysisInfo.hw_analysis_supported,
2871 gCamCapability[mCameraId]->color_arrangement,
2872 this,
2873 0); // force buffer count to 0
2874 } else {
2875 LOGW("getAnalysisInfo failed, ret = %d", ret);
2876 }
2877 if (!mAnalysisChannel) {
2878 LOGW("Analysis channel cannot be created");
2879 }
2880 }
2881
Thierry Strudel3d639192016-09-09 11:52:26 -07002882 //RAW DUMP channel
2883 if (mEnableRawDump && isRawStreamRequested == false){
2884 cam_dimension_t rawDumpSize;
2885 rawDumpSize = getMaxRawSize(mCameraId);
2886 cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2887 setPAAFSupport(rawDumpFeatureMask,
2888 CAM_STREAM_TYPE_RAW,
2889 gCamCapability[mCameraId]->color_arrangement);
2890 mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2891 mChannelHandle,
2892 mCameraHandle->ops,
2893 rawDumpSize,
2894 &padding_info,
2895 this, rawDumpFeatureMask);
2896 if (!mRawDumpChannel) {
2897 LOGE("Raw Dump channel cannot be created");
2898 pthread_mutex_unlock(&mMutex);
2899 return -ENOMEM;
2900 }
2901 }
2902
Thierry Strudel3d639192016-09-09 11:52:26 -07002903 if (mAnalysisChannel) {
2904 cam_analysis_info_t analysisInfo;
2905 memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2906 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2907 CAM_STREAM_TYPE_ANALYSIS;
2908 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2909 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002910 rc = mCommon.getAnalysisInfo(FALSE,
Thierry Strudel3d639192016-09-09 11:52:26 -07002911 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2912 &analysisInfo);
2913 if (rc != NO_ERROR) {
2914 LOGE("getAnalysisInfo failed, ret = %d", rc);
2915 pthread_mutex_unlock(&mMutex);
2916 return rc;
2917 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002918 cam_color_filter_arrangement_t analysis_color_arrangement =
2919 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2920 CAM_FILTER_ARRANGEMENT_Y :
2921 gCamCapability[mCameraId]->color_arrangement);
2922 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2923 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2924 analysis_color_arrangement);
2925
Thierry Strudel3d639192016-09-09 11:52:26 -07002926 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002927 mCommon.getMatchingDimension(previewSize,
2928 analysisInfo.analysis_recommended_res);
Thierry Strudel3d639192016-09-09 11:52:26 -07002929 mStreamConfigInfo.num_streams++;
2930 }
2931
Thierry Strudel2896d122017-02-23 19:18:03 -08002932 if (!onlyRaw && isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002933 cam_analysis_info_t supportInfo;
2934 memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
2935 cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2936 setPAAFSupport(callbackFeatureMask,
2937 CAM_STREAM_TYPE_CALLBACK,
2938 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002939 int32_t ret = NO_ERROR;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002940 ret = mCommon.getAnalysisInfo(FALSE, callbackFeatureMask, &supportInfo);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002941 if (ret != NO_ERROR) {
2942 /* Ignore the error for Mono camera
2943 * because the PAAF bit mask is only set
2944 * for CAM_STREAM_TYPE_ANALYSIS stream type
2945 */
2946 if (gCamCapability[mCameraId]->color_arrangement != CAM_FILTER_ARRANGEMENT_Y) {
2947 LOGW("getAnalysisInfo failed, ret = %d", ret);
2948 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002949 }
2950 mSupportChannel = new QCamera3SupportChannel(
2951 mCameraHandle->camera_handle,
2952 mChannelHandle,
2953 mCameraHandle->ops,
2954 &gCamCapability[mCameraId]->padding_info,
2955 callbackFeatureMask,
2956 CAM_STREAM_TYPE_CALLBACK,
2957 &QCamera3SupportChannel::kDim,
2958 CAM_FORMAT_YUV_420_NV21,
2959 supportInfo.hw_analysis_supported,
2960 gCamCapability[mCameraId]->color_arrangement,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002961 this, 0);
Thierry Strudel3d639192016-09-09 11:52:26 -07002962 if (!mSupportChannel) {
2963 LOGE("dummy channel cannot be created");
2964 pthread_mutex_unlock(&mMutex);
2965 return -ENOMEM;
2966 }
2967 }
2968
2969 if (mSupportChannel) {
2970 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2971 QCamera3SupportChannel::kDim;
2972 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2973 CAM_STREAM_TYPE_CALLBACK;
2974 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2975 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2976 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2977 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2978 gCamCapability[mCameraId]->color_arrangement);
2979 mStreamConfigInfo.num_streams++;
2980 }
2981
2982 if (mRawDumpChannel) {
2983 cam_dimension_t rawSize;
2984 rawSize = getMaxRawSize(mCameraId);
2985 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2986 rawSize;
2987 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2988 CAM_STREAM_TYPE_RAW;
2989 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2990 CAM_QCOM_FEATURE_NONE;
2991 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2992 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2993 gCamCapability[mCameraId]->color_arrangement);
2994 mStreamConfigInfo.num_streams++;
2995 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002996
2997 if (mHdrPlusRawSrcChannel) {
2998 cam_dimension_t rawSize;
2999 rawSize = getMaxRawSize(mCameraId);
3000 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] = rawSize;
3001 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
3002 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
3003 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
3004 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
3005 gCamCapability[mCameraId]->color_arrangement);
3006 mStreamConfigInfo.num_streams++;
3007 }
3008
Thierry Strudel3d639192016-09-09 11:52:26 -07003009 /* In HFR mode, if video stream is not added, create a dummy channel so that
3010 * ISP can create a batch mode even for preview only case. This channel is
3011 * never 'start'ed (no stream-on), it is only 'initialized' */
3012 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
3013 !m_bIsVideo) {
3014 cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
3015 setPAAFSupport(dummyFeatureMask,
3016 CAM_STREAM_TYPE_VIDEO,
3017 gCamCapability[mCameraId]->color_arrangement);
3018 mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
3019 mChannelHandle,
3020 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003021 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07003022 this,
3023 &mDummyBatchStream,
3024 CAM_STREAM_TYPE_VIDEO,
3025 dummyFeatureMask,
3026 mMetadataChannel);
3027 if (NULL == mDummyBatchChannel) {
3028 LOGE("creation of mDummyBatchChannel failed."
3029 "Preview will use non-hfr sensor mode ");
3030 }
3031 }
3032 if (mDummyBatchChannel) {
3033 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
3034 mDummyBatchStream.width;
3035 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
3036 mDummyBatchStream.height;
3037 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
3038 CAM_STREAM_TYPE_VIDEO;
3039 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
3040 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
3041 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
3042 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
3043 gCamCapability[mCameraId]->color_arrangement);
3044 mStreamConfigInfo.num_streams++;
3045 }
3046
3047 mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
3048 mStreamConfigInfo.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08003049 m_bIs4KVideo ? 0 :
Jason Leea46ad5e2017-07-07 15:20:56 -07003050 m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07003051
3052 /* Initialize mPendingRequestInfo and mPendingBuffersMap */
3053 for (pendingRequestIterator i = mPendingRequestsList.begin();
3054 i != mPendingRequestsList.end();) {
3055 i = erasePendingRequest(i);
3056 }
3057 mPendingFrameDropList.clear();
3058 // Initialize/Reset the pending buffers list
3059 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
3060 req.mPendingBufferList.clear();
3061 }
3062 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Emilian Peev30522a12017-08-03 14:36:33 +01003063 mExpectedInflightDuration = 0;
3064 mExpectedFrameDuration = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07003065
Thierry Strudel3d639192016-09-09 11:52:26 -07003066 mCurJpegMeta.clear();
3067 //Get min frame duration for this streams configuration
3068 deriveMinFrameDuration();
3069
Chien-Yu Chenee335912017-02-09 17:53:20 -08003070 mFirstPreviewIntentSeen = false;
3071
Thierry Strudel3d639192016-09-09 11:52:26 -07003072 // Update state
3073 mState = CONFIGURED;
3074
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003075 mFirstMetadataCallback = true;
3076
Thierry Strudel3d639192016-09-09 11:52:26 -07003077 pthread_mutex_unlock(&mMutex);
3078
3079 return rc;
3080}
3081
3082/*===========================================================================
3083 * FUNCTION : validateCaptureRequest
3084 *
3085 * DESCRIPTION: validate a capture request from camera service
3086 *
3087 * PARAMETERS :
3088 * @request : request from framework to process
3089 *
3090 * RETURN :
3091 *
3092 *==========================================================================*/
3093int QCamera3HardwareInterface::validateCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003094 camera3_capture_request_t *request,
3095 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07003096{
3097 ssize_t idx = 0;
3098 const camera3_stream_buffer_t *b;
3099 CameraMetadata meta;
3100
3101 /* Sanity check the request */
3102 if (request == NULL) {
3103 LOGE("NULL capture request");
3104 return BAD_VALUE;
3105 }
3106
3107 if ((request->settings == NULL) && (mState == CONFIGURED)) {
3108 /*settings cannot be null for the first request*/
3109 return BAD_VALUE;
3110 }
3111
3112 uint32_t frameNumber = request->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003113 if ((request->num_output_buffers < 1 || request->output_buffers == NULL)
3114 && (internallyRequestedStreams.size() == 0)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003115 LOGE("Request %d: No output buffers provided!",
3116 __FUNCTION__, frameNumber);
3117 return BAD_VALUE;
3118 }
3119 if (request->num_output_buffers >= MAX_NUM_STREAMS) {
3120 LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
3121 request->num_output_buffers, MAX_NUM_STREAMS);
3122 return BAD_VALUE;
3123 }
3124 if (request->input_buffer != NULL) {
3125 b = request->input_buffer;
3126 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3127 LOGE("Request %d: Buffer %ld: Status not OK!",
3128 frameNumber, (long)idx);
3129 return BAD_VALUE;
3130 }
3131 if (b->release_fence != -1) {
3132 LOGE("Request %d: Buffer %ld: Has a release fence!",
3133 frameNumber, (long)idx);
3134 return BAD_VALUE;
3135 }
3136 if (b->buffer == NULL) {
3137 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3138 frameNumber, (long)idx);
3139 return BAD_VALUE;
3140 }
3141 }
3142
3143 // Validate all buffers
3144 b = request->output_buffers;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003145 if (b == NULL) {
3146 return BAD_VALUE;
3147 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003148 while (idx < (ssize_t)request->num_output_buffers) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003149 QCamera3ProcessingChannel *channel =
3150 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
3151 if (channel == NULL) {
3152 LOGE("Request %d: Buffer %ld: Unconfigured stream!",
3153 frameNumber, (long)idx);
3154 return BAD_VALUE;
3155 }
3156 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3157 LOGE("Request %d: Buffer %ld: Status not OK!",
3158 frameNumber, (long)idx);
3159 return BAD_VALUE;
3160 }
3161 if (b->release_fence != -1) {
3162 LOGE("Request %d: Buffer %ld: Has a release fence!",
3163 frameNumber, (long)idx);
3164 return BAD_VALUE;
3165 }
3166 if (b->buffer == NULL) {
3167 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3168 frameNumber, (long)idx);
3169 return BAD_VALUE;
3170 }
3171 if (*(b->buffer) == NULL) {
3172 LOGE("Request %d: Buffer %ld: NULL private handle!",
3173 frameNumber, (long)idx);
3174 return BAD_VALUE;
3175 }
3176 idx++;
3177 b = request->output_buffers + idx;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003178 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003179 return NO_ERROR;
3180}
3181
3182/*===========================================================================
3183 * FUNCTION : deriveMinFrameDuration
3184 *
3185 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
3186 * on currently configured streams.
3187 *
3188 * PARAMETERS : NONE
3189 *
3190 * RETURN : NONE
3191 *
3192 *==========================================================================*/
3193void QCamera3HardwareInterface::deriveMinFrameDuration()
3194{
3195 int32_t maxJpegDim, maxProcessedDim, maxRawDim;
Jason Lee2d0ab112017-06-21 18:03:05 -07003196 bool hasRaw = false;
3197
3198 mMinRawFrameDuration = 0;
3199 mMinJpegFrameDuration = 0;
3200 mMinProcessedFrameDuration = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07003201
3202 maxJpegDim = 0;
3203 maxProcessedDim = 0;
3204 maxRawDim = 0;
3205
3206 // Figure out maximum jpeg, processed, and raw dimensions
3207 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
3208 it != mStreamInfo.end(); it++) {
3209
3210 // Input stream doesn't have valid stream_type
3211 if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
3212 continue;
3213
3214 int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
3215 if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3216 if (dimension > maxJpegDim)
3217 maxJpegDim = dimension;
3218 } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3219 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3220 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
Jason Lee2d0ab112017-06-21 18:03:05 -07003221 hasRaw = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07003222 if (dimension > maxRawDim)
3223 maxRawDim = dimension;
3224 } else {
3225 if (dimension > maxProcessedDim)
3226 maxProcessedDim = dimension;
3227 }
3228 }
3229
3230 size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
3231 MAX_SIZES_CNT);
3232
3233 //Assume all jpeg dimensions are in processed dimensions.
3234 if (maxJpegDim > maxProcessedDim)
3235 maxProcessedDim = maxJpegDim;
3236 //Find the smallest raw dimension that is greater or equal to jpeg dimension
Jason Lee2d0ab112017-06-21 18:03:05 -07003237 if (hasRaw && maxProcessedDim > maxRawDim) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003238 maxRawDim = INT32_MAX;
3239
3240 for (size_t i = 0; i < count; i++) {
3241 int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
3242 gCamCapability[mCameraId]->raw_dim[i].height;
3243 if (dimension >= maxProcessedDim && dimension < maxRawDim)
3244 maxRawDim = dimension;
3245 }
3246 }
3247
3248 //Find minimum durations for processed, jpeg, and raw
3249 for (size_t i = 0; i < count; i++) {
3250 if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
3251 gCamCapability[mCameraId]->raw_dim[i].height) {
3252 mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
3253 break;
3254 }
3255 }
3256 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
3257 for (size_t i = 0; i < count; i++) {
3258 if (maxProcessedDim ==
3259 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
3260 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
3261 mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3262 mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3263 break;
3264 }
3265 }
3266}
3267
3268/*===========================================================================
3269 * FUNCTION : getMinFrameDuration
3270 *
3271 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
3272 * and current request configuration.
3273 *
3274 * PARAMETERS : @request: requset sent by the frameworks
3275 *
3276 * RETURN : min farme duration for a particular request
3277 *
3278 *==========================================================================*/
3279int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
3280{
3281 bool hasJpegStream = false;
3282 bool hasRawStream = false;
3283 for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
3284 const camera3_stream_t *stream = request->output_buffers[i].stream;
3285 if (stream->format == HAL_PIXEL_FORMAT_BLOB)
3286 hasJpegStream = true;
3287 else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3288 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3289 stream->format == HAL_PIXEL_FORMAT_RAW16)
3290 hasRawStream = true;
3291 }
3292
3293 if (!hasJpegStream)
3294 return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
3295 else
3296 return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
3297}
3298
3299/*===========================================================================
3300 * FUNCTION : handleBuffersDuringFlushLock
3301 *
3302 * DESCRIPTION: Account for buffers returned from back-end during flush
3303 * This function is executed while mMutex is held by the caller.
3304 *
3305 * PARAMETERS :
3306 * @buffer: image buffer for the callback
3307 *
3308 * RETURN :
3309 *==========================================================================*/
3310void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
3311{
3312 bool buffer_found = false;
3313 for (List<PendingBuffersInRequest>::iterator req =
3314 mPendingBuffersMap.mPendingBuffersInRequest.begin();
3315 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
3316 for (List<PendingBufferInfo>::iterator i =
3317 req->mPendingBufferList.begin();
3318 i != req->mPendingBufferList.end(); i++) {
3319 if (i->buffer == buffer->buffer) {
3320 mPendingBuffersMap.numPendingBufsAtFlush--;
3321 LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
3322 buffer->buffer, req->frame_number,
3323 mPendingBuffersMap.numPendingBufsAtFlush);
3324 buffer_found = true;
3325 break;
3326 }
3327 }
3328 if (buffer_found) {
3329 break;
3330 }
3331 }
3332 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
3333 //signal the flush()
3334 LOGD("All buffers returned to HAL. Continue flush");
3335 pthread_cond_signal(&mBuffersCond);
3336 }
3337}
3338
Thierry Strudel3d639192016-09-09 11:52:26 -07003339/*===========================================================================
3340 * FUNCTION : handleBatchMetadata
3341 *
3342 * DESCRIPTION: Handles metadata buffer callback in batch mode
3343 *
3344 * PARAMETERS : @metadata_buf: metadata buffer
3345 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3346 * the meta buf in this method
3347 *
3348 * RETURN :
3349 *
3350 *==========================================================================*/
3351void QCamera3HardwareInterface::handleBatchMetadata(
3352 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
3353{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003354 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BATCH_METADATA);
Thierry Strudel3d639192016-09-09 11:52:26 -07003355
3356 if (NULL == metadata_buf) {
3357 LOGE("metadata_buf is NULL");
3358 return;
3359 }
3360 /* In batch mode, the metdata will contain the frame number and timestamp of
3361 * the last frame in the batch. Eg: a batch containing buffers from request
3362 * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
3363 * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
3364 * multiple process_capture_results */
3365 metadata_buffer_t *metadata =
3366 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3367 int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
3368 uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
3369 uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
3370 uint32_t frame_number = 0, urgent_frame_number = 0;
3371 int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
3372 bool invalid_metadata = false;
3373 size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
3374 size_t loopCount = 1;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003375 bool is_metabuf_queued = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07003376
3377 int32_t *p_frame_number_valid =
3378 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3379 uint32_t *p_frame_number =
3380 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3381 int64_t *p_capture_time =
3382 POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3383 int32_t *p_urgent_frame_number_valid =
3384 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3385 uint32_t *p_urgent_frame_number =
3386 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3387
3388 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
3389 (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
3390 (NULL == p_urgent_frame_number)) {
3391 LOGE("Invalid metadata");
3392 invalid_metadata = true;
3393 } else {
3394 frame_number_valid = *p_frame_number_valid;
3395 last_frame_number = *p_frame_number;
3396 last_frame_capture_time = *p_capture_time;
3397 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3398 last_urgent_frame_number = *p_urgent_frame_number;
3399 }
3400
3401 /* In batchmode, when no video buffers are requested, set_parms are sent
3402 * for every capture_request. The difference between consecutive urgent
3403 * frame numbers and frame numbers should be used to interpolate the
3404 * corresponding frame numbers and time stamps */
3405 pthread_mutex_lock(&mMutex);
3406 if (urgent_frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003407 ssize_t idx = mPendingBatchMap.indexOfKey(last_urgent_frame_number);
3408 if(idx < 0) {
3409 LOGE("Invalid urgent frame number received: %d. Irrecoverable error",
3410 last_urgent_frame_number);
3411 mState = ERROR;
3412 pthread_mutex_unlock(&mMutex);
3413 return;
3414 }
3415 first_urgent_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003416 urgentFrameNumDiff = last_urgent_frame_number + 1 -
3417 first_urgent_frame_number;
3418
3419 LOGD("urgent_frm: valid: %d frm_num: %d - %d",
3420 urgent_frame_number_valid,
3421 first_urgent_frame_number, last_urgent_frame_number);
3422 }
3423
3424 if (frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003425 ssize_t idx = mPendingBatchMap.indexOfKey(last_frame_number);
3426 if(idx < 0) {
3427 LOGE("Invalid frame number received: %d. Irrecoverable error",
3428 last_frame_number);
3429 mState = ERROR;
3430 pthread_mutex_unlock(&mMutex);
3431 return;
3432 }
3433 first_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003434 frameNumDiff = last_frame_number + 1 -
3435 first_frame_number;
3436 mPendingBatchMap.removeItem(last_frame_number);
3437
3438 LOGD("frm: valid: %d frm_num: %d - %d",
3439 frame_number_valid,
3440 first_frame_number, last_frame_number);
3441
3442 }
3443 pthread_mutex_unlock(&mMutex);
3444
3445 if (urgent_frame_number_valid || frame_number_valid) {
3446 loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
3447 if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
3448 LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
3449 urgentFrameNumDiff, last_urgent_frame_number);
3450 if (frameNumDiff > MAX_HFR_BATCH_SIZE)
3451 LOGE("frameNumDiff: %d frameNum: %d",
3452 frameNumDiff, last_frame_number);
3453 }
3454
3455 for (size_t i = 0; i < loopCount; i++) {
3456 /* handleMetadataWithLock is called even for invalid_metadata for
3457 * pipeline depth calculation */
3458 if (!invalid_metadata) {
3459 /* Infer frame number. Batch metadata contains frame number of the
3460 * last frame */
3461 if (urgent_frame_number_valid) {
3462 if (i < urgentFrameNumDiff) {
3463 urgent_frame_number =
3464 first_urgent_frame_number + i;
3465 LOGD("inferred urgent frame_number: %d",
3466 urgent_frame_number);
3467 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3468 CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
3469 } else {
3470 /* This is to handle when urgentFrameNumDiff < frameNumDiff */
3471 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3472 CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
3473 }
3474 }
3475
3476 /* Infer frame number. Batch metadata contains frame number of the
3477 * last frame */
3478 if (frame_number_valid) {
3479 if (i < frameNumDiff) {
3480 frame_number = first_frame_number + i;
3481 LOGD("inferred frame_number: %d", frame_number);
3482 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3483 CAM_INTF_META_FRAME_NUMBER, frame_number);
3484 } else {
3485 /* This is to handle when urgentFrameNumDiff > frameNumDiff */
3486 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3487 CAM_INTF_META_FRAME_NUMBER_VALID, 0);
3488 }
3489 }
3490
3491 if (last_frame_capture_time) {
3492 //Infer timestamp
3493 first_frame_capture_time = last_frame_capture_time -
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003494 (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003495 capture_time =
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003496 first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003497 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3498 CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
3499 LOGD("batch capture_time: %lld, capture_time: %lld",
3500 last_frame_capture_time, capture_time);
3501 }
3502 }
3503 pthread_mutex_lock(&mMutex);
3504 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003505 false /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003506 (i == urgentFrameNumDiff-1), /* last urgent metadata in the batch */
3507 (i == frameNumDiff-1), /* last metadata in the batch metadata */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003508 &is_metabuf_queued /* if metabuf isqueued or not */);
Thierry Strudel3d639192016-09-09 11:52:26 -07003509 pthread_mutex_unlock(&mMutex);
3510 }
3511
3512 /* BufDone metadata buffer */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003513 if (free_and_bufdone_meta_buf && !is_metabuf_queued) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003514 mMetadataChannel->bufDone(metadata_buf);
3515 free(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003516 metadata_buf = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -07003517 }
3518}
3519
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003520void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
3521 camera3_error_msg_code_t errorCode)
3522{
3523 camera3_notify_msg_t notify_msg;
3524 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3525 notify_msg.type = CAMERA3_MSG_ERROR;
3526 notify_msg.message.error.error_code = errorCode;
3527 notify_msg.message.error.error_stream = NULL;
3528 notify_msg.message.error.frame_number = frameNumber;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003529 orchestrateNotify(&notify_msg);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003530
3531 return;
3532}
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003533
3534/*===========================================================================
3535 * FUNCTION : sendPartialMetadataWithLock
3536 *
3537 * DESCRIPTION: Send partial capture result callback with mMutex lock held.
3538 *
3539 * PARAMETERS : @metadata: metadata buffer
3540 * @requestIter: The iterator for the pending capture request for
3541 * which the partial result is being sen
3542 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3543 * last urgent metadata in a batch. Always true for non-batch mode
Shuzhen Wang485e2442017-08-02 12:21:08 -07003544 * @isJumpstartMetadata: Whether this is a partial metadata for
3545 * jumpstart, i.e. even though it doesn't map to a valid partial
3546 * frame number, its metadata entries should be kept.
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003547 *
3548 * RETURN :
3549 *
3550 *==========================================================================*/
3551
3552void QCamera3HardwareInterface::sendPartialMetadataWithLock(
3553 metadata_buffer_t *metadata,
3554 const pendingRequestIterator requestIter,
Shuzhen Wang485e2442017-08-02 12:21:08 -07003555 bool lastUrgentMetadataInBatch,
3556 bool isJumpstartMetadata)
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003557{
3558 camera3_capture_result_t result;
3559 memset(&result, 0, sizeof(camera3_capture_result_t));
3560
3561 requestIter->partial_result_cnt++;
3562
3563 // Extract 3A metadata
3564 result.result = translateCbUrgentMetadataToResultMetadata(
Shuzhen Wang485e2442017-08-02 12:21:08 -07003565 metadata, lastUrgentMetadataInBatch, requestIter->frame_number,
3566 isJumpstartMetadata);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003567 // Populate metadata result
3568 result.frame_number = requestIter->frame_number;
3569 result.num_output_buffers = 0;
3570 result.output_buffers = NULL;
3571 result.partial_result = requestIter->partial_result_cnt;
3572
3573 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07003574 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003575 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
3576 // Notify HDR+ client about the partial metadata.
3577 gHdrPlusClient->notifyFrameMetadata(result.frame_number, *result.result,
3578 result.partial_result == PARTIAL_RESULT_COUNT);
3579 }
3580 }
3581
3582 orchestrateResult(&result);
3583 LOGD("urgent frame_number = %u", result.frame_number);
3584 free_camera_metadata((camera_metadata_t *)result.result);
3585}
3586
Thierry Strudel3d639192016-09-09 11:52:26 -07003587/*===========================================================================
3588 * FUNCTION : handleMetadataWithLock
3589 *
3590 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
3591 *
3592 * PARAMETERS : @metadata_buf: metadata buffer
3593 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3594 * the meta buf in this method
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003595 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3596 * last urgent metadata in a batch. Always true for non-batch mode
3597 * @lastMetadataInBatch: Boolean to indicate whether this is the
3598 * last metadata in a batch. Always true for non-batch mode
Thierry Strudel54dc9782017-02-15 12:12:10 -08003599 * @p_is_metabuf_queued: Pointer to Boolean to check if metadata
3600 * buffer is enqueued or not.
Thierry Strudel3d639192016-09-09 11:52:26 -07003601 *
3602 * RETURN :
3603 *
3604 *==========================================================================*/
3605void QCamera3HardwareInterface::handleMetadataWithLock(
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003606 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003607 bool lastUrgentMetadataInBatch, bool lastMetadataInBatch,
3608 bool *p_is_metabuf_queued)
Thierry Strudel3d639192016-09-09 11:52:26 -07003609{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003610 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_METADATA_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003611 if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
3612 //during flush do not send metadata from this thread
3613 LOGD("not sending metadata during flush or when mState is error");
3614 if (free_and_bufdone_meta_buf) {
3615 mMetadataChannel->bufDone(metadata_buf);
3616 free(metadata_buf);
3617 }
3618 return;
3619 }
3620
3621 //not in flush
3622 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3623 int32_t frame_number_valid, urgent_frame_number_valid;
3624 uint32_t frame_number, urgent_frame_number;
Jason Lee603176d2017-05-31 11:43:27 -07003625 int64_t capture_time, capture_time_av;
Thierry Strudel3d639192016-09-09 11:52:26 -07003626 nsecs_t currentSysTime;
3627
3628 int32_t *p_frame_number_valid =
3629 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3630 uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3631 int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
Jason Lee603176d2017-05-31 11:43:27 -07003632 int64_t *p_capture_time_av = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP_AV, metadata);
Thierry Strudel3d639192016-09-09 11:52:26 -07003633 int32_t *p_urgent_frame_number_valid =
3634 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3635 uint32_t *p_urgent_frame_number =
3636 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3637 IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
3638 metadata) {
3639 LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
3640 *p_frame_number_valid, *p_frame_number);
3641 }
3642
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003643 camera_metadata_t *resultMetadata = nullptr;
3644
Thierry Strudel3d639192016-09-09 11:52:26 -07003645 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
3646 (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
3647 LOGE("Invalid metadata");
3648 if (free_and_bufdone_meta_buf) {
3649 mMetadataChannel->bufDone(metadata_buf);
3650 free(metadata_buf);
3651 }
3652 goto done_metadata;
3653 }
3654 frame_number_valid = *p_frame_number_valid;
3655 frame_number = *p_frame_number;
3656 capture_time = *p_capture_time;
Jason Lee603176d2017-05-31 11:43:27 -07003657 capture_time_av = *p_capture_time_av;
Thierry Strudel3d639192016-09-09 11:52:26 -07003658 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3659 urgent_frame_number = *p_urgent_frame_number;
3660 currentSysTime = systemTime(CLOCK_MONOTONIC);
3661
Jason Lee603176d2017-05-31 11:43:27 -07003662 if (!gCamCapability[mCameraId]->timestamp_calibrated) {
3663 const int tries = 3;
3664 nsecs_t bestGap, measured;
3665 for (int i = 0; i < tries; ++i) {
3666 const nsecs_t tmono = systemTime(SYSTEM_TIME_MONOTONIC);
3667 const nsecs_t tbase = systemTime(SYSTEM_TIME_BOOTTIME);
3668 const nsecs_t tmono2 = systemTime(SYSTEM_TIME_MONOTONIC);
3669 const nsecs_t gap = tmono2 - tmono;
3670 if (i == 0 || gap < bestGap) {
3671 bestGap = gap;
3672 measured = tbase - ((tmono + tmono2) >> 1);
3673 }
3674 }
3675 capture_time -= measured;
3676 }
3677
Thierry Strudel3d639192016-09-09 11:52:26 -07003678 // Detect if buffers from any requests are overdue
3679 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003680 int64_t timeout;
3681 {
3682 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
3683 // If there is a pending HDR+ request, the following requests may be blocked until the
3684 // HDR+ request is done. So allow a longer timeout.
3685 timeout = (mHdrPlusPendingRequests.size() > 0) ?
3686 MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT : MISSING_REQUEST_BUF_TIMEOUT;
Emilian Peev30522a12017-08-03 14:36:33 +01003687 if (timeout < mExpectedInflightDuration) {
3688 timeout = mExpectedInflightDuration;
3689 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003690 }
3691
3692 if ( (currentSysTime - req.timestamp) > s2ns(timeout) ) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003693 for (auto &missed : req.mPendingBufferList) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003694 assert(missed.stream->priv);
3695 if (missed.stream->priv) {
3696 QCamera3Channel *ch = (QCamera3Channel *)(missed.stream->priv);
3697 assert(ch->mStreams[0]);
3698 if (ch->mStreams[0]) {
3699 LOGE("Cancel missing frame = %d, buffer = %p,"
3700 "stream type = %d, stream format = %d",
3701 req.frame_number, missed.buffer,
3702 ch->mStreams[0]->getMyType(), missed.stream->format);
3703 ch->timeoutFrame(req.frame_number);
3704 }
3705 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003706 }
3707 }
3708 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003709 //For the very first metadata callback, regardless whether it contains valid
3710 //frame number, send the partial metadata for the jumpstarting requests.
3711 //Note that this has to be done even if the metadata doesn't contain valid
3712 //urgent frame number, because in the case only 1 request is ever submitted
3713 //to HAL, there won't be subsequent valid urgent frame number.
3714 if (mFirstMetadataCallback) {
3715 for (pendingRequestIterator i =
3716 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3717 if (i->bUseFirstPartial) {
Shuzhen Wang485e2442017-08-02 12:21:08 -07003718 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch,
3719 true /*isJumpstartMetadata*/);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003720 }
3721 }
3722 mFirstMetadataCallback = false;
3723 }
3724
Thierry Strudel3d639192016-09-09 11:52:26 -07003725 //Partial result on process_capture_result for timestamp
3726 if (urgent_frame_number_valid) {
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003727 LOGD("valid urgent frame_number = %u", urgent_frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003728
3729 //Recieved an urgent Frame Number, handle it
3730 //using partial results
3731 for (pendingRequestIterator i =
3732 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3733 LOGD("Iterator Frame = %d urgent frame = %d",
3734 i->frame_number, urgent_frame_number);
3735
Chien-Yu Chen29fd1d72017-04-27 18:42:09 -07003736 if ((!i->input_buffer) && (!i->hdrplus) && (i->frame_number < urgent_frame_number) &&
Shuzhen Wanga1d82a92017-09-19 14:39:43 -07003737 (i->partial_result_cnt == 0)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003738 LOGE("Error: HAL missed urgent metadata for frame number %d",
3739 i->frame_number);
Shuzhen Wanga1d82a92017-09-19 14:39:43 -07003740 i->partialResultDropped = true;
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07003741 i->partial_result_cnt++;
Thierry Strudel3d639192016-09-09 11:52:26 -07003742 }
3743
3744 if (i->frame_number == urgent_frame_number &&
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003745 i->partial_result_cnt == 0) {
Shuzhen Wang485e2442017-08-02 12:21:08 -07003746 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch,
3747 false /*isJumpstartMetadata*/);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003748 if (mResetInstantAEC && mInstantAECSettledFrameNumber == 0) {
3749 // Instant AEC settled for this frame.
3750 LOGH("instant AEC settled for frame number %d", urgent_frame_number);
3751 mInstantAECSettledFrameNumber = urgent_frame_number;
3752 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003753 break;
3754 }
3755 }
3756 }
3757
3758 if (!frame_number_valid) {
3759 LOGD("Not a valid normal frame number, used as SOF only");
3760 if (free_and_bufdone_meta_buf) {
3761 mMetadataChannel->bufDone(metadata_buf);
3762 free(metadata_buf);
3763 }
3764 goto done_metadata;
3765 }
3766 LOGH("valid frame_number = %u, capture_time = %lld",
3767 frame_number, capture_time);
3768
Emilian Peev4e0fe952017-06-30 12:40:09 -07003769 handleDepthDataLocked(metadata->depth_data, frame_number,
3770 metadata->is_depth_data_valid);
Emilian Peev7650c122017-01-19 08:24:33 -08003771
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003772 // Check whether any stream buffer corresponding to this is dropped or not
3773 // If dropped, then send the ERROR_BUFFER for the corresponding stream
3774 // OR check if instant AEC is enabled, then need to drop frames untill AEC is settled.
3775 for (auto & pendingRequest : mPendingRequestsList) {
3776 if (p_cam_frame_drop || (mInstantAEC || pendingRequest.frame_number <
3777 mInstantAECSettledFrameNumber)) {
3778 camera3_notify_msg_t notify_msg = {};
3779 for (auto & buffer : pendingRequest.buffers) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003780 bool dropFrame = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003781 QCamera3ProcessingChannel *channel =
3782 (QCamera3ProcessingChannel *)buffer.stream->priv;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003783 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003784 if (p_cam_frame_drop) {
3785 for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003786 if (streamID == p_cam_frame_drop->stream_request[k].streamID) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003787 // Got the stream ID for drop frame.
3788 dropFrame = true;
3789 break;
3790 }
3791 }
3792 } else {
3793 // This is instant AEC case.
3794 // For instant AEC drop the stream untill AEC is settled.
3795 dropFrame = true;
3796 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003797
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003798 if (dropFrame) {
3799 // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
3800 if (p_cam_frame_drop) {
3801 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003802 LOGE("Start of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003803 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003804 } else {
3805 // For instant AEC, inform frame drop and frame number
3806 LOGH("Start of reporting error frame#=%u for instant AEC, streamID=%u, "
3807 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003808 pendingRequest.frame_number, streamID,
3809 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003810 }
3811 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003812 notify_msg.message.error.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003813 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003814 notify_msg.message.error.error_stream = buffer.stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003815 orchestrateNotify(&notify_msg);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003816 if (p_cam_frame_drop) {
3817 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003818 LOGE("End of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003819 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003820 } else {
3821 // For instant AEC, inform frame drop and frame number
3822 LOGH("End of reporting error frame#=%u for instant AEC, streamID=%u, "
3823 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003824 pendingRequest.frame_number, streamID,
3825 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003826 }
3827 PendingFrameDropInfo PendingFrameDrop;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003828 PendingFrameDrop.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003829 PendingFrameDrop.stream_ID = streamID;
3830 // Add the Frame drop info to mPendingFrameDropList
3831 mPendingFrameDropList.push_back(PendingFrameDrop);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003832 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003833 }
3834 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003835 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003836
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003837 for (auto & pendingRequest : mPendingRequestsList) {
3838 // Find the pending request with the frame number.
Shuzhen Wanga1d82a92017-09-19 14:39:43 -07003839 if (pendingRequest.frame_number < frame_number) {
3840 // Workaround for case where shutter is missing due to dropped
3841 // metadata
Emilian Peev7b0175d2017-09-29 12:57:31 +01003842 if (!pendingRequest.hdrplus && (pendingRequest.input_buffer == nullptr)) {
Chien-Yu Chen0469c9b2017-09-22 13:22:19 -07003843 mShutterDispatcher.markShutterReady(pendingRequest.frame_number, capture_time);
3844 }
Shuzhen Wanga1d82a92017-09-19 14:39:43 -07003845 } else if (pendingRequest.frame_number == frame_number) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003846 // Update the sensor timestamp.
3847 pendingRequest.timestamp = capture_time;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003848
Thierry Strudel3d639192016-09-09 11:52:26 -07003849
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003850 /* Set the timestamp in display metadata so that clients aware of
3851 private_handle such as VT can use this un-modified timestamps.
3852 Camera framework is unaware of this timestamp and cannot change this */
Jason Lee603176d2017-05-31 11:43:27 -07003853 updateTimeStampInPendingBuffers(pendingRequest.frame_number, capture_time_av);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003854
Thierry Strudel3d639192016-09-09 11:52:26 -07003855 // Find channel requiring metadata, meaning internal offline postprocess
3856 // is needed.
3857 //TODO: for now, we don't support two streams requiring metadata at the same time.
3858 // (because we are not making copies, and metadata buffer is not reference counted.
3859 bool internalPproc = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003860 for (pendingBufferIterator iter = pendingRequest.buffers.begin();
3861 iter != pendingRequest.buffers.end(); iter++) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003862 if (iter->need_metadata) {
3863 internalPproc = true;
3864 QCamera3ProcessingChannel *channel =
3865 (QCamera3ProcessingChannel *)iter->stream->priv;
3866 channel->queueReprocMetadata(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003867 if(p_is_metabuf_queued != NULL) {
3868 *p_is_metabuf_queued = true;
3869 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003870 break;
3871 }
3872 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003873 for (auto itr = pendingRequest.internalRequestList.begin();
3874 itr != pendingRequest.internalRequestList.end(); itr++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003875 if (itr->need_metadata) {
3876 internalPproc = true;
3877 QCamera3ProcessingChannel *channel =
3878 (QCamera3ProcessingChannel *)itr->stream->priv;
3879 channel->queueReprocMetadata(metadata_buf);
3880 break;
3881 }
3882 }
3883
Thierry Strudel54dc9782017-02-15 12:12:10 -08003884 saveExifParams(metadata);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003885
3886 bool *enableZsl = nullptr;
3887 if (gExposeEnableZslKey) {
3888 enableZsl = &pendingRequest.enableZsl;
3889 }
3890
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003891 resultMetadata = translateFromHalMetadata(metadata,
Shuzhen Wang181c57b2017-07-21 11:39:44 -07003892 pendingRequest, internalPproc,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003893 lastMetadataInBatch, enableZsl);
Thierry Strudel3d639192016-09-09 11:52:26 -07003894
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003895 updateFpsInPreviewBuffer(metadata, pendingRequest.frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003896
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003897 if (pendingRequest.blob_request) {
3898 //Dump tuning metadata if enabled and available
3899 char prop[PROPERTY_VALUE_MAX];
3900 memset(prop, 0, sizeof(prop));
3901 property_get("persist.camera.dumpmetadata", prop, "0");
3902 int32_t enabled = atoi(prop);
3903 if (enabled && metadata->is_tuning_params_valid) {
3904 dumpMetadataToFile(metadata->tuning_params,
3905 mMetaFrameCount,
3906 enabled,
3907 "Snapshot",
3908 frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003909 }
3910 }
3911
3912 if (!internalPproc) {
3913 LOGD("couldn't find need_metadata for this metadata");
3914 // Return metadata buffer
3915 if (free_and_bufdone_meta_buf) {
3916 mMetadataChannel->bufDone(metadata_buf);
3917 free(metadata_buf);
3918 }
3919 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003920
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003921 break;
Thierry Strudel3d639192016-09-09 11:52:26 -07003922 }
3923 }
3924
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003925 mShutterDispatcher.markShutterReady(frame_number, capture_time);
3926
3927 // Try to send out capture result metadata.
3928 handlePendingResultMetadataWithLock(frame_number, resultMetadata);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003929 return;
3930
Thierry Strudel3d639192016-09-09 11:52:26 -07003931done_metadata:
3932 for (pendingRequestIterator i = mPendingRequestsList.begin();
3933 i != mPendingRequestsList.end() ;i++) {
3934 i->pipeline_depth++;
3935 }
3936 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3937 unblockRequestIfNecessary();
3938}
3939
3940/*===========================================================================
Emilian Peev7650c122017-01-19 08:24:33 -08003941 * FUNCTION : handleDepthDataWithLock
3942 *
3943 * DESCRIPTION: Handles incoming depth data
3944 *
3945 * PARAMETERS : @depthData : Depth data
3946 * @frameNumber: Frame number of the incoming depth data
Emilian Peev4e0fe952017-06-30 12:40:09 -07003947 * @valid : Valid flag for the incoming data
Emilian Peev7650c122017-01-19 08:24:33 -08003948 *
3949 * RETURN :
3950 *
3951 *==========================================================================*/
3952void QCamera3HardwareInterface::handleDepthDataLocked(
Emilian Peev4e0fe952017-06-30 12:40:09 -07003953 const cam_depth_data_t &depthData, uint32_t frameNumber, uint8_t valid) {
Emilian Peev7650c122017-01-19 08:24:33 -08003954 uint32_t currentFrameNumber;
3955 buffer_handle_t *depthBuffer;
3956
3957 if (nullptr == mDepthChannel) {
Emilian Peev7650c122017-01-19 08:24:33 -08003958 return;
3959 }
3960
3961 camera3_stream_buffer_t resultBuffer =
3962 {.acquire_fence = -1,
3963 .release_fence = -1,
3964 .status = CAMERA3_BUFFER_STATUS_OK,
3965 .buffer = nullptr,
3966 .stream = mDepthChannel->getStream()};
Emilian Peev7650c122017-01-19 08:24:33 -08003967 do {
3968 depthBuffer = mDepthChannel->getOldestFrame(currentFrameNumber);
3969 if (nullptr == depthBuffer) {
3970 break;
3971 }
3972
Emilian Peev7650c122017-01-19 08:24:33 -08003973 resultBuffer.buffer = depthBuffer;
3974 if (currentFrameNumber == frameNumber) {
Emilian Peev4e0fe952017-06-30 12:40:09 -07003975 if (valid) {
3976 int32_t rc = mDepthChannel->populateDepthData(depthData,
3977 frameNumber);
3978 if (NO_ERROR != rc) {
3979 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3980 } else {
3981 resultBuffer.status = CAMERA3_BUFFER_STATUS_OK;
3982 }
Emilian Peev7650c122017-01-19 08:24:33 -08003983 } else {
Emilian Peev4e0fe952017-06-30 12:40:09 -07003984 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
Emilian Peev7650c122017-01-19 08:24:33 -08003985 }
3986 } else if (currentFrameNumber > frameNumber) {
3987 break;
3988 } else {
3989 camera3_notify_msg_t notify_msg = {.type = CAMERA3_MSG_ERROR,
3990 {{currentFrameNumber, mDepthChannel->getStream(),
3991 CAMERA3_MSG_ERROR_BUFFER}}};
3992 orchestrateNotify(&notify_msg);
3993
3994 LOGE("Depth buffer for frame number: %d is missing "
3995 "returning back!", currentFrameNumber);
3996 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3997 }
3998 mDepthChannel->unmapBuffer(currentFrameNumber);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003999 mOutputBufferDispatcher.markBufferReady(currentFrameNumber, resultBuffer);
Emilian Peev7650c122017-01-19 08:24:33 -08004000 } while (currentFrameNumber < frameNumber);
4001}
4002
4003/*===========================================================================
4004 * FUNCTION : notifyErrorFoPendingDepthData
4005 *
4006 * DESCRIPTION: Returns error for any pending depth buffers
4007 *
4008 * PARAMETERS : depthCh - depth channel that needs to get flushed
4009 *
4010 * RETURN :
4011 *
4012 *==========================================================================*/
4013void QCamera3HardwareInterface::notifyErrorFoPendingDepthData(
4014 QCamera3DepthChannel *depthCh) {
4015 uint32_t currentFrameNumber;
4016 buffer_handle_t *depthBuffer;
4017
4018 if (nullptr == depthCh) {
4019 return;
4020 }
4021
4022 camera3_notify_msg_t notify_msg =
4023 {.type = CAMERA3_MSG_ERROR,
4024 {{0, depthCh->getStream(), CAMERA3_MSG_ERROR_BUFFER}}};
4025 camera3_stream_buffer_t resultBuffer =
4026 {.acquire_fence = -1,
4027 .release_fence = -1,
4028 .buffer = nullptr,
4029 .stream = depthCh->getStream(),
4030 .status = CAMERA3_BUFFER_STATUS_ERROR};
Emilian Peev7650c122017-01-19 08:24:33 -08004031
4032 while (nullptr !=
4033 (depthBuffer = depthCh->getOldestFrame(currentFrameNumber))) {
4034 depthCh->unmapBuffer(currentFrameNumber);
4035
4036 notify_msg.message.error.frame_number = currentFrameNumber;
4037 orchestrateNotify(&notify_msg);
4038
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004039 mOutputBufferDispatcher.markBufferReady(currentFrameNumber, resultBuffer);
Emilian Peev7650c122017-01-19 08:24:33 -08004040 };
4041}
4042
4043/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07004044 * FUNCTION : hdrPlusPerfLock
4045 *
4046 * DESCRIPTION: perf lock for HDR+ using custom intent
4047 *
4048 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
4049 *
4050 * RETURN : None
4051 *
4052 *==========================================================================*/
4053void QCamera3HardwareInterface::hdrPlusPerfLock(
4054 mm_camera_super_buf_t *metadata_buf)
4055{
4056 if (NULL == metadata_buf) {
4057 LOGE("metadata_buf is NULL");
4058 return;
4059 }
4060 metadata_buffer_t *metadata =
4061 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
4062 int32_t *p_frame_number_valid =
4063 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
4064 uint32_t *p_frame_number =
4065 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
4066
4067 if (p_frame_number_valid == NULL || p_frame_number == NULL) {
4068 LOGE("%s: Invalid metadata", __func__);
4069 return;
4070 }
4071
Wei Wang01385482017-08-03 10:49:34 -07004072 //acquire perf lock for 2 secs after the last HDR frame is captured
4073 constexpr uint32_t HDR_PLUS_PERF_TIME_OUT = 2000;
Thierry Strudel3d639192016-09-09 11:52:26 -07004074 if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
4075 if ((p_frame_number != NULL) &&
4076 (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004077 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT, HDR_PLUS_PERF_TIME_OUT);
Thierry Strudel3d639192016-09-09 11:52:26 -07004078 }
4079 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004080}
4081
4082/*===========================================================================
4083 * FUNCTION : handleInputBufferWithLock
4084 *
4085 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
4086 *
4087 * PARAMETERS : @frame_number: frame number of the input buffer
4088 *
4089 * RETURN :
4090 *
4091 *==========================================================================*/
4092void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
4093{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004094 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_IN_BUF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07004095 pendingRequestIterator i = mPendingRequestsList.begin();
4096 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
4097 i++;
4098 }
4099 if (i != mPendingRequestsList.end() && i->input_buffer) {
4100 //found the right request
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004101 CameraMetadata settings;
4102 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
4103 if(i->settings) {
4104 settings = i->settings;
4105 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
4106 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -07004107 } else {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004108 LOGE("No timestamp in input settings! Using current one.");
Thierry Strudel3d639192016-09-09 11:52:26 -07004109 }
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004110 } else {
4111 LOGE("Input settings missing!");
Thierry Strudel3d639192016-09-09 11:52:26 -07004112 }
4113
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004114 mShutterDispatcher.markShutterReady(frame_number, capture_time);
4115 LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
4116 i->frame_number, capture_time);
Thierry Strudel3d639192016-09-09 11:52:26 -07004117
4118 camera3_capture_result result;
4119 memset(&result, 0, sizeof(camera3_capture_result));
4120 result.frame_number = frame_number;
4121 result.result = i->settings;
4122 result.input_buffer = i->input_buffer;
4123 result.partial_result = PARTIAL_RESULT_COUNT;
4124
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004125 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07004126 LOGD("Input request metadata and input buffer frame_number = %u",
4127 i->frame_number);
4128 i = erasePendingRequest(i);
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004129
4130 // Dispatch result metadata that may be just unblocked by this reprocess result.
4131 dispatchResultMetadataWithLock(frame_number, /*isLiveRequest*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -07004132 } else {
4133 LOGE("Could not find input request for frame number %d", frame_number);
4134 }
4135}
4136
4137/*===========================================================================
4138 * FUNCTION : handleBufferWithLock
4139 *
4140 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
4141 *
4142 * PARAMETERS : @buffer: image buffer for the callback
4143 * @frame_number: frame number of the image buffer
4144 *
4145 * RETURN :
4146 *
4147 *==========================================================================*/
4148void QCamera3HardwareInterface::handleBufferWithLock(
4149 camera3_stream_buffer_t *buffer, uint32_t frame_number)
4150{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004151 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BUF_LKD);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004152
4153 if (buffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
4154 mPerfLockMgr.releasePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
4155 }
4156
Thierry Strudel3d639192016-09-09 11:52:26 -07004157 /* Nothing to be done during error state */
4158 if ((ERROR == mState) || (DEINIT == mState)) {
4159 return;
4160 }
4161 if (mFlushPerf) {
4162 handleBuffersDuringFlushLock(buffer);
4163 return;
4164 }
4165 //not in flush
4166 // If the frame number doesn't exist in the pending request list,
4167 // directly send the buffer to the frameworks, and update pending buffers map
4168 // Otherwise, book-keep the buffer.
4169 pendingRequestIterator i = mPendingRequestsList.begin();
4170 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
4171 i++;
4172 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004173
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004174 if (i != mPendingRequestsList.end()) {
Thierry Strudel3d639192016-09-09 11:52:26 -07004175 if (i->input_buffer) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004176 // For a reprocessing request, try to send out result metadata.
4177 handlePendingResultMetadataWithLock(frame_number, nullptr);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004178 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004179 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004180
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004181 // Check if this frame was dropped.
4182 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
4183 m != mPendingFrameDropList.end(); m++) {
4184 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4185 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
4186 if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
4187 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
4188 LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
4189 frame_number, streamID);
4190 m = mPendingFrameDropList.erase(m);
4191 break;
4192 }
4193 }
4194
Binhao Lin09245482017-08-31 18:25:29 -07004195 // WAR for encoder avtimer timestamp issue
4196 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4197 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask() &&
4198 m_bAVTimerEnabled) {
4199 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
4200 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
4201 if (req->frame_number != frame_number)
4202 continue;
4203 if(req->av_timestamp == 0) {
4204 buffer->status |= CAMERA3_BUFFER_STATUS_ERROR;
4205 }
4206 else {
4207 struct private_handle_t *priv_handle =
4208 (struct private_handle_t *) (*(buffer->buffer));
4209 setMetaData(priv_handle, SET_VT_TIMESTAMP, &(req->av_timestamp));
4210 }
4211 }
4212 }
4213
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004214 buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
4215 LOGH("result frame_number = %d, buffer = %p",
4216 frame_number, buffer->buffer);
4217
4218 mPendingBuffersMap.removeBuf(buffer->buffer);
4219 mOutputBufferDispatcher.markBufferReady(frame_number, *buffer);
4220
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004221 if (mPreviewStarted == false) {
4222 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4223 if ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask()) {
Chien-Yu Chen509314b2017-04-07 15:27:55 -07004224 logEaselEvent("EASEL_STARTUP_LATENCY", "Preview Started");
4225
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004226 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
4227 mPerfLockMgr.releasePerfLock(PERF_LOCK_OPEN_CAMERA);
4228 mPreviewStarted = true;
4229
4230 // Set power hint for preview
4231 mPerfLockMgr.acquirePerfLock(PERF_LOCK_POWERHINT_ENCODE, 0);
4232 }
4233 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004234}
4235
Chien-Yu Chen21b9e9a2017-09-25 14:34:26 -07004236void QCamera3HardwareInterface::removeUnrequestedMetadata(pendingRequestIterator requestIter,
4237 camera_metadata_t *resultMetadata) {
4238 CameraMetadata metadata;
4239 metadata.acquire(resultMetadata);
4240
4241 // Remove len shading map if it's not requested.
4242 if (requestIter->requestedLensShadingMapMode == ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF &&
4243 metadata.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE) &&
4244 metadata.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF).data.u8[0] !=
4245 ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
4246 metadata.erase(ANDROID_STATISTICS_LENS_SHADING_MAP);
4247 metadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE,
4248 &requestIter->requestedLensShadingMapMode, 1);
4249 }
4250
4251 // Remove face information if it's not requested.
4252 if (requestIter->requestedFaceDetectMode == ANDROID_STATISTICS_FACE_DETECT_MODE_OFF &&
4253 metadata.exists(ANDROID_STATISTICS_FACE_DETECT_MODE) &&
4254 metadata.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0] !=
4255 ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
4256 metadata.erase(ANDROID_STATISTICS_FACE_RECTANGLES);
4257 metadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE,
4258 &requestIter->requestedFaceDetectMode, 1);
4259 }
4260
4261 requestIter->resultMetadata = metadata.release();
4262}
4263
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004264void QCamera3HardwareInterface::handlePendingResultMetadataWithLock(uint32_t frameNumber,
Chien-Yu Chenbc730232017-07-12 14:49:55 -07004265 camera_metadata_t *resultMetadata)
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004266{
4267 // Find the pending request for this result metadata.
4268 auto requestIter = mPendingRequestsList.begin();
4269 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != frameNumber) {
4270 requestIter++;
4271 }
4272
4273 if (requestIter == mPendingRequestsList.end()) {
4274 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, frameNumber);
4275 return;
4276 }
4277
4278 // Update the result metadata
4279 requestIter->resultMetadata = resultMetadata;
4280
4281 // Check what type of request this is.
4282 bool liveRequest = false;
4283 if (requestIter->hdrplus) {
Chien-Yu Chen9264fe92017-04-29 03:28:46 +00004284 // HDR+ request doesn't have partial results.
4285 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004286 } else if (requestIter->input_buffer != nullptr) {
4287 // Reprocessing request result is the same as settings.
4288 requestIter->resultMetadata = requestIter->settings;
4289 // Reprocessing request doesn't have partial results.
4290 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4291 } else {
4292 liveRequest = true;
Chien-Yu Chen0a921f92017-08-27 17:25:33 -07004293 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004294 mPendingLiveRequest--;
4295
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07004296 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07004297 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07004298 // For a live request, send the metadata to HDR+ client.
4299 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
4300 gHdrPlusClient->notifyFrameMetadata(frameNumber, *resultMetadata,
4301 requestIter->partial_result_cnt == PARTIAL_RESULT_COUNT);
4302 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004303 }
4304 }
4305
Chien-Yu Chen21b9e9a2017-09-25 14:34:26 -07004306 if (requestIter->input_buffer == nullptr) {
4307 removeUnrequestedMetadata(requestIter, resultMetadata);
Chien-Yu Chenbc730232017-07-12 14:49:55 -07004308 }
4309
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004310 dispatchResultMetadataWithLock(frameNumber, liveRequest);
4311}
4312
4313void QCamera3HardwareInterface::dispatchResultMetadataWithLock(uint32_t frameNumber,
4314 bool isLiveRequest) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004315 // The pending requests are ordered by increasing frame numbers. The result metadata are ready
4316 // to be sent if all previous pending requests are ready to be sent.
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004317 bool readyToSend = true;
4318
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004319 // Iterate through the pending requests to send out result metadata that are ready. Also if
4320 // this result metadata belongs to a live request, notify errors for previous live requests
4321 // that don't have result metadata yet.
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004322 auto iter = mPendingRequestsList.begin();
4323 while (iter != mPendingRequestsList.end()) {
4324 // Check if current pending request is ready. If it's not ready, the following pending
4325 // requests are also not ready.
4326 if (readyToSend && iter->resultMetadata == nullptr) {
4327 readyToSend = false;
4328 }
4329
4330 bool thisLiveRequest = iter->hdrplus == false && iter->input_buffer == nullptr;
Shuzhen Wanga1d82a92017-09-19 14:39:43 -07004331 bool errorResult = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004332
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004333 camera3_capture_result_t result = {};
4334 result.frame_number = iter->frame_number;
4335 result.result = iter->resultMetadata;
4336 result.partial_result = iter->partial_result_cnt;
4337
4338 // If this pending buffer has result metadata, we may be able to send out shutter callback
4339 // and result metadata.
4340 if (iter->resultMetadata != nullptr) {
4341 if (!readyToSend) {
4342 // If any of the previous pending request is not ready, this pending request is
4343 // also not ready to send in order to keep shutter callbacks and result metadata
4344 // in order.
4345 iter++;
4346 continue;
4347 }
Shuzhen Wanga1d82a92017-09-19 14:39:43 -07004348 // Notify ERROR_RESULT if partial result was dropped.
4349 errorResult = iter->partialResultDropped;
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004350 } else if (iter->frame_number < frameNumber && isLiveRequest && thisLiveRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004351 // If the result metadata belongs to a live request, notify errors for previous pending
4352 // live requests.
4353 mPendingLiveRequest--;
4354
Shuzhen Wanga1d82a92017-09-19 14:39:43 -07004355 LOGE("Error: HAL missed metadata for frame number %d", iter->frame_number);
4356 errorResult = true;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004357 } else {
4358 iter++;
4359 continue;
4360 }
4361
Shuzhen Wanga1d82a92017-09-19 14:39:43 -07004362 if (errorResult) {
4363 notifyError(iter->frame_number, CAMERA3_MSG_ERROR_RESULT);
4364 } else {
4365 result.output_buffers = nullptr;
4366 result.num_output_buffers = 0;
4367 orchestrateResult(&result);
4368 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004369 // For reprocessing, result metadata is the same as settings so do not free it here to
4370 // avoid double free.
4371 if (result.result != iter->settings) {
4372 free_camera_metadata((camera_metadata_t *)result.result);
4373 }
4374 iter->resultMetadata = nullptr;
4375 iter = erasePendingRequest(iter);
4376 }
4377
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004378 if (isLiveRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004379 for (auto &iter : mPendingRequestsList) {
4380 // Increment pipeline depth for the following pending requests.
4381 if (iter.frame_number > frameNumber) {
4382 iter.pipeline_depth++;
4383 }
4384 }
4385 }
4386
4387 unblockRequestIfNecessary();
4388}
4389
Thierry Strudel3d639192016-09-09 11:52:26 -07004390/*===========================================================================
4391 * FUNCTION : unblockRequestIfNecessary
4392 *
4393 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
4394 * that mMutex is held when this function is called.
4395 *
4396 * PARAMETERS :
4397 *
4398 * RETURN :
4399 *
4400 *==========================================================================*/
4401void QCamera3HardwareInterface::unblockRequestIfNecessary()
4402{
4403 // Unblock process_capture_request
4404 pthread_cond_signal(&mRequestCond);
4405}
4406
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004407/*===========================================================================
4408 * FUNCTION : isHdrSnapshotRequest
4409 *
4410 * DESCRIPTION: Function to determine if the request is for a HDR snapshot
4411 *
4412 * PARAMETERS : camera3 request structure
4413 *
4414 * RETURN : boolean decision variable
4415 *
4416 *==========================================================================*/
4417bool QCamera3HardwareInterface::isHdrSnapshotRequest(camera3_capture_request *request)
4418{
4419 if (request == NULL) {
4420 LOGE("Invalid request handle");
4421 assert(0);
4422 return false;
4423 }
4424
4425 if (!mForceHdrSnapshot) {
4426 CameraMetadata frame_settings;
4427 frame_settings = request->settings;
4428
4429 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
4430 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
4431 if (metaMode != ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
4432 return false;
4433 }
4434 } else {
4435 return false;
4436 }
4437
4438 if (frame_settings.exists(ANDROID_CONTROL_SCENE_MODE)) {
4439 uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
4440 if (fwk_sceneMode != ANDROID_CONTROL_SCENE_MODE_HDR) {
4441 return false;
4442 }
4443 } else {
4444 return false;
4445 }
4446 }
4447
4448 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4449 if (request->output_buffers[i].stream->format
4450 == HAL_PIXEL_FORMAT_BLOB) {
4451 return true;
4452 }
4453 }
4454
4455 return false;
4456}
4457/*===========================================================================
4458 * FUNCTION : orchestrateRequest
4459 *
4460 * DESCRIPTION: Orchestrates a capture request from camera service
4461 *
4462 * PARAMETERS :
4463 * @request : request from framework to process
4464 *
4465 * RETURN : Error status codes
4466 *
4467 *==========================================================================*/
4468int32_t QCamera3HardwareInterface::orchestrateRequest(
4469 camera3_capture_request_t *request)
4470{
4471
4472 uint32_t originalFrameNumber = request->frame_number;
4473 uint32_t originalOutputCount = request->num_output_buffers;
4474 const camera_metadata_t *original_settings = request->settings;
4475 List<InternalRequest> internallyRequestedStreams;
4476 List<InternalRequest> emptyInternalList;
4477
4478 if (isHdrSnapshotRequest(request) && request->input_buffer == NULL) {
4479 LOGD("Framework requested:%d buffers in HDR snapshot", request->num_output_buffers);
4480 uint32_t internalFrameNumber;
4481 CameraMetadata modified_meta;
4482
4483
4484 /* Add Blob channel to list of internally requested streams */
4485 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4486 if (request->output_buffers[i].stream->format
4487 == HAL_PIXEL_FORMAT_BLOB) {
4488 InternalRequest streamRequested;
4489 streamRequested.meteringOnly = 1;
4490 streamRequested.need_metadata = 0;
4491 streamRequested.stream = request->output_buffers[i].stream;
4492 internallyRequestedStreams.push_back(streamRequested);
4493 }
4494 }
4495 request->num_output_buffers = 0;
4496 auto itr = internallyRequestedStreams.begin();
4497
4498 /* Modify setting to set compensation */
4499 modified_meta = request->settings;
4500 int32_t expCompensation = GB_HDR_HALF_STEP_EV;
4501 uint8_t aeLock = 1;
4502 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4503 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4504 camera_metadata_t *modified_settings = modified_meta.release();
4505 request->settings = modified_settings;
4506
4507 /* Capture Settling & -2x frame */
4508 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4509 request->frame_number = internalFrameNumber;
4510 processCaptureRequest(request, internallyRequestedStreams);
4511
4512 request->num_output_buffers = originalOutputCount;
4513 _orchestrationDb.allocStoreInternalFrameNumber(originalFrameNumber, internalFrameNumber);
4514 request->frame_number = internalFrameNumber;
4515 processCaptureRequest(request, emptyInternalList);
4516 request->num_output_buffers = 0;
4517
4518 modified_meta = modified_settings;
4519 expCompensation = 0;
4520 aeLock = 1;
4521 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4522 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4523 modified_settings = modified_meta.release();
4524 request->settings = modified_settings;
4525
4526 /* Capture Settling & 0X frame */
4527
4528 itr = internallyRequestedStreams.begin();
4529 if (itr == internallyRequestedStreams.end()) {
4530 LOGE("Error Internally Requested Stream list is empty");
4531 assert(0);
4532 } else {
4533 itr->need_metadata = 0;
4534 itr->meteringOnly = 1;
4535 }
4536
4537 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4538 request->frame_number = internalFrameNumber;
4539 processCaptureRequest(request, internallyRequestedStreams);
4540
4541 itr = internallyRequestedStreams.begin();
4542 if (itr == internallyRequestedStreams.end()) {
4543 ALOGE("Error Internally Requested Stream list is empty");
4544 assert(0);
4545 } else {
4546 itr->need_metadata = 1;
4547 itr->meteringOnly = 0;
4548 }
4549
4550 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4551 request->frame_number = internalFrameNumber;
4552 processCaptureRequest(request, internallyRequestedStreams);
4553
4554 /* Capture 2X frame*/
4555 modified_meta = modified_settings;
4556 expCompensation = GB_HDR_2X_STEP_EV;
4557 aeLock = 1;
4558 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4559 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4560 modified_settings = modified_meta.release();
4561 request->settings = modified_settings;
4562
4563 itr = internallyRequestedStreams.begin();
4564 if (itr == internallyRequestedStreams.end()) {
4565 ALOGE("Error Internally Requested Stream list is empty");
4566 assert(0);
4567 } else {
4568 itr->need_metadata = 0;
4569 itr->meteringOnly = 1;
4570 }
4571 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4572 request->frame_number = internalFrameNumber;
4573 processCaptureRequest(request, internallyRequestedStreams);
4574
4575 itr = internallyRequestedStreams.begin();
4576 if (itr == internallyRequestedStreams.end()) {
4577 ALOGE("Error Internally Requested Stream list is empty");
4578 assert(0);
4579 } else {
4580 itr->need_metadata = 1;
4581 itr->meteringOnly = 0;
4582 }
4583
4584 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4585 request->frame_number = internalFrameNumber;
4586 processCaptureRequest(request, internallyRequestedStreams);
4587
4588
4589 /* Capture 2X on original streaming config*/
4590 internallyRequestedStreams.clear();
4591
4592 /* Restore original settings pointer */
4593 request->settings = original_settings;
4594 } else {
4595 uint32_t internalFrameNumber;
4596 _orchestrationDb.allocStoreInternalFrameNumber(request->frame_number, internalFrameNumber);
4597 request->frame_number = internalFrameNumber;
4598 return processCaptureRequest(request, internallyRequestedStreams);
4599 }
4600
4601 return NO_ERROR;
4602}
4603
4604/*===========================================================================
4605 * FUNCTION : orchestrateResult
4606 *
4607 * DESCRIPTION: Orchestrates a capture result to camera service
4608 *
4609 * PARAMETERS :
4610 * @request : request from framework to process
4611 *
4612 * RETURN :
4613 *
4614 *==========================================================================*/
4615void QCamera3HardwareInterface::orchestrateResult(
4616 camera3_capture_result_t *result)
4617{
4618 uint32_t frameworkFrameNumber;
4619 int32_t rc = _orchestrationDb.getFrameworkFrameNumber(result->frame_number,
4620 frameworkFrameNumber);
4621 if (rc != NO_ERROR) {
4622 LOGE("Cannot find translated frameworkFrameNumber");
4623 assert(0);
4624 } else {
4625 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004626 LOGD("Internal Request drop the result");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004627 } else {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004628 if (result->result != NULL) {
Binhao Lin299ffc92017-04-27 11:22:47 -07004629 camera_metadata_t *metadata = const_cast<camera_metadata_t*>(result->result);
4630 camera_metadata_entry_t entry;
4631 int ret = find_camera_metadata_entry(metadata, ANDROID_SYNC_FRAME_NUMBER, &entry);
4632 if (ret == OK) {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004633 int64_t sync_frame_number = frameworkFrameNumber;
Binhao Lin299ffc92017-04-27 11:22:47 -07004634 ret = update_camera_metadata_entry(metadata, entry.index, &sync_frame_number, 1, &entry);
4635 if (ret != OK)
4636 LOGE("Update ANDROID_SYNC_FRAME_NUMBER Error!");
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004637 }
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004638 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004639 result->frame_number = frameworkFrameNumber;
4640 mCallbackOps->process_capture_result(mCallbackOps, result);
4641 }
4642 }
4643}
4644
4645/*===========================================================================
4646 * FUNCTION : orchestrateNotify
4647 *
4648 * DESCRIPTION: Orchestrates a notify to camera service
4649 *
4650 * PARAMETERS :
4651 * @request : request from framework to process
4652 *
4653 * RETURN :
4654 *
4655 *==========================================================================*/
4656void QCamera3HardwareInterface::orchestrateNotify(camera3_notify_msg_t *notify_msg)
4657{
4658 uint32_t frameworkFrameNumber;
4659 uint32_t internalFrameNumber = notify_msg->message.shutter.frame_number;
Thierry Strudel2896d122017-02-23 19:18:03 -08004660 int32_t rc = NO_ERROR;
4661
4662 rc = _orchestrationDb.getFrameworkFrameNumber(internalFrameNumber,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004663 frameworkFrameNumber);
Thierry Strudel2896d122017-02-23 19:18:03 -08004664
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004665 if (rc != NO_ERROR) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004666 if (notify_msg->message.error.error_code == CAMERA3_MSG_ERROR_DEVICE) {
4667 LOGD("Sending CAMERA3_MSG_ERROR_DEVICE to framework");
4668 frameworkFrameNumber = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004669 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08004670 LOGE("Cannot find translated frameworkFrameNumber");
4671 assert(0);
4672 return;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004673 }
4674 }
Thierry Strudel2896d122017-02-23 19:18:03 -08004675
4676 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
4677 LOGD("Internal Request drop the notifyCb");
4678 } else {
4679 notify_msg->message.shutter.frame_number = frameworkFrameNumber;
4680 mCallbackOps->notify(mCallbackOps, notify_msg);
4681 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004682}
4683
4684/*===========================================================================
4685 * FUNCTION : FrameNumberRegistry
4686 *
4687 * DESCRIPTION: Constructor
4688 *
4689 * PARAMETERS :
4690 *
4691 * RETURN :
4692 *
4693 *==========================================================================*/
4694FrameNumberRegistry::FrameNumberRegistry()
4695{
4696 _nextFreeInternalNumber = INTERNAL_FRAME_STARTING_NUMBER;
4697}
4698
4699/*===========================================================================
4700 * FUNCTION : ~FrameNumberRegistry
4701 *
4702 * DESCRIPTION: Destructor
4703 *
4704 * PARAMETERS :
4705 *
4706 * RETURN :
4707 *
4708 *==========================================================================*/
4709FrameNumberRegistry::~FrameNumberRegistry()
4710{
4711}
4712
4713/*===========================================================================
4714 * FUNCTION : PurgeOldEntriesLocked
4715 *
4716 * DESCRIPTION: Maintainance function to trigger LRU cleanup mechanism
4717 *
4718 * PARAMETERS :
4719 *
4720 * RETURN : NONE
4721 *
4722 *==========================================================================*/
4723void FrameNumberRegistry::purgeOldEntriesLocked()
4724{
4725 while (_register.begin() != _register.end()) {
4726 auto itr = _register.begin();
4727 if (itr->first < (_nextFreeInternalNumber - FRAME_REGISTER_LRU_SIZE)) {
4728 _register.erase(itr);
4729 } else {
4730 return;
4731 }
4732 }
4733}
4734
4735/*===========================================================================
4736 * FUNCTION : allocStoreInternalFrameNumber
4737 *
4738 * DESCRIPTION: Method to note down a framework request and associate a new
4739 * internal request number against it
4740 *
4741 * PARAMETERS :
4742 * @fFrameNumber: Identifier given by framework
4743 * @internalFN : Output parameter which will have the newly generated internal
4744 * entry
4745 *
4746 * RETURN : Error code
4747 *
4748 *==========================================================================*/
4749int32_t FrameNumberRegistry::allocStoreInternalFrameNumber(uint32_t frameworkFrameNumber,
4750 uint32_t &internalFrameNumber)
4751{
4752 Mutex::Autolock lock(mRegistryLock);
4753 internalFrameNumber = _nextFreeInternalNumber++;
4754 LOGD("Storing ff#:%d, with internal:%d", frameworkFrameNumber, internalFrameNumber);
4755 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, frameworkFrameNumber));
4756 purgeOldEntriesLocked();
4757 return NO_ERROR;
4758}
4759
4760/*===========================================================================
4761 * FUNCTION : generateStoreInternalFrameNumber
4762 *
4763 * DESCRIPTION: Method to associate a new internal request number independent
4764 * of any associate with framework requests
4765 *
4766 * PARAMETERS :
4767 * @internalFrame#: Output parameter which will have the newly generated internal
4768 *
4769 *
4770 * RETURN : Error code
4771 *
4772 *==========================================================================*/
4773int32_t FrameNumberRegistry::generateStoreInternalFrameNumber(uint32_t &internalFrameNumber)
4774{
4775 Mutex::Autolock lock(mRegistryLock);
4776 internalFrameNumber = _nextFreeInternalNumber++;
4777 LOGD("Generated internal framenumber:%d", internalFrameNumber);
4778 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, EMPTY_FRAMEWORK_FRAME_NUMBER));
4779 purgeOldEntriesLocked();
4780 return NO_ERROR;
4781}
4782
4783/*===========================================================================
4784 * FUNCTION : getFrameworkFrameNumber
4785 *
4786 * DESCRIPTION: Method to query the framework framenumber given an internal #
4787 *
4788 * PARAMETERS :
4789 * @internalFrame#: Internal reference
4790 * @frameworkframenumber: Output parameter holding framework frame entry
4791 *
4792 * RETURN : Error code
4793 *
4794 *==========================================================================*/
4795int32_t FrameNumberRegistry::getFrameworkFrameNumber(uint32_t internalFrameNumber,
4796 uint32_t &frameworkFrameNumber)
4797{
4798 Mutex::Autolock lock(mRegistryLock);
4799 auto itr = _register.find(internalFrameNumber);
4800 if (itr == _register.end()) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004801 LOGE("Cannot find internal#: %d", internalFrameNumber);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004802 return -ENOENT;
4803 }
4804
4805 frameworkFrameNumber = itr->second;
4806 purgeOldEntriesLocked();
4807 return NO_ERROR;
4808}
Thierry Strudel3d639192016-09-09 11:52:26 -07004809
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004810status_t QCamera3HardwareInterface::fillPbStreamConfig(
Chien-Yu Chen14d3e392017-07-10 18:27:05 -07004811 pbcamera::StreamConfiguration *config, uint32_t pbStreamId, QCamera3Channel *channel,
4812 uint32_t streamIndex) {
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004813 if (config == nullptr) {
4814 LOGE("%s: config is null", __FUNCTION__);
4815 return BAD_VALUE;
4816 }
4817
4818 if (channel == nullptr) {
4819 LOGE("%s: channel is null", __FUNCTION__);
4820 return BAD_VALUE;
4821 }
4822
4823 QCamera3Stream *stream = channel->getStreamByIndex(streamIndex);
4824 if (stream == nullptr) {
4825 LOGE("%s: Failed to get stream %d in channel.", __FUNCTION__, streamIndex);
4826 return NAME_NOT_FOUND;
4827 }
4828
4829 const cam_stream_info_t* streamInfo = stream->getStreamInfo();
4830 if (streamInfo == nullptr) {
4831 LOGE("%s: Failed to get stream info for stream %d in channel.", __FUNCTION__, streamIndex);
4832 return NAME_NOT_FOUND;
4833 }
4834
4835 config->id = pbStreamId;
4836 config->image.width = streamInfo->dim.width;
4837 config->image.height = streamInfo->dim.height;
4838 config->image.padding = 0;
Chien-Yu Chen14d3e392017-07-10 18:27:05 -07004839
4840 int bytesPerPixel = 0;
4841
4842 switch (streamInfo->fmt) {
4843 case CAM_FORMAT_YUV_420_NV21:
4844 config->image.format = HAL_PIXEL_FORMAT_YCrCb_420_SP;
4845 bytesPerPixel = 1;
4846 break;
4847 case CAM_FORMAT_YUV_420_NV12:
4848 case CAM_FORMAT_YUV_420_NV12_VENUS:
4849 config->image.format = HAL_PIXEL_FORMAT_YCbCr_420_SP;
4850 bytesPerPixel = 1;
4851 break;
4852 default:
4853 ALOGE("%s: Stream format %d not supported.", __FUNCTION__, streamInfo->fmt);
4854 return BAD_VALUE;
4855 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004856
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004857 uint32_t totalPlaneSize = 0;
4858
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004859 // Fill plane information.
4860 for (uint32_t i = 0; i < streamInfo->buf_planes.plane_info.num_planes; i++) {
4861 pbcamera::PlaneConfiguration plane;
Chien-Yu Chen14d3e392017-07-10 18:27:05 -07004862 plane.stride = streamInfo->buf_planes.plane_info.mp[i].stride * bytesPerPixel;
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004863 plane.scanline = streamInfo->buf_planes.plane_info.mp[i].scanline;
4864 config->image.planes.push_back(plane);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004865
4866 totalPlaneSize += (plane.stride * plane.scanline);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004867 }
4868
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004869 config->image.padding = streamInfo->buf_planes.plane_info.frame_len - totalPlaneSize;
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004870 return OK;
4871}
4872
Thierry Strudel3d639192016-09-09 11:52:26 -07004873/*===========================================================================
4874 * FUNCTION : processCaptureRequest
4875 *
4876 * DESCRIPTION: process a capture request from camera service
4877 *
4878 * PARAMETERS :
4879 * @request : request from framework to process
4880 *
4881 * RETURN :
4882 *
4883 *==========================================================================*/
4884int QCamera3HardwareInterface::processCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004885 camera3_capture_request_t *request,
4886 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07004887{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004888 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CAP_REQ);
Thierry Strudel3d639192016-09-09 11:52:26 -07004889 int rc = NO_ERROR;
4890 int32_t request_id;
4891 CameraMetadata meta;
Thierry Strudel3d639192016-09-09 11:52:26 -07004892 bool isVidBufRequested = false;
4893 camera3_stream_buffer_t *pInputBuffer = NULL;
Thierry Strudel54dc9782017-02-15 12:12:10 -08004894 char prop[PROPERTY_VALUE_MAX];
Thierry Strudel3d639192016-09-09 11:52:26 -07004895
4896 pthread_mutex_lock(&mMutex);
4897
4898 // Validate current state
4899 switch (mState) {
4900 case CONFIGURED:
4901 case STARTED:
4902 /* valid state */
4903 break;
4904
4905 case ERROR:
4906 pthread_mutex_unlock(&mMutex);
4907 handleCameraDeviceError();
4908 return -ENODEV;
4909
4910 default:
4911 LOGE("Invalid state %d", mState);
4912 pthread_mutex_unlock(&mMutex);
4913 return -ENODEV;
4914 }
4915
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004916 rc = validateCaptureRequest(request, internallyRequestedStreams);
Thierry Strudel3d639192016-09-09 11:52:26 -07004917 if (rc != NO_ERROR) {
4918 LOGE("incoming request is not valid");
4919 pthread_mutex_unlock(&mMutex);
4920 return rc;
4921 }
4922
4923 meta = request->settings;
4924
4925 // For first capture request, send capture intent, and
4926 // stream on all streams
4927 if (mState == CONFIGURED) {
Chien-Yu Chene96475e2017-04-11 11:53:26 -07004928 logEaselEvent("EASEL_STARTUP_LATENCY", "First request");
Thierry Strudel3d639192016-09-09 11:52:26 -07004929 // send an unconfigure to the backend so that the isp
4930 // resources are deallocated
4931 if (!mFirstConfiguration) {
4932 cam_stream_size_info_t stream_config_info;
4933 int32_t hal_version = CAM_HAL_V3;
4934 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
4935 stream_config_info.buffer_info.min_buffers =
4936 MIN_INFLIGHT_REQUESTS;
4937 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08004938 m_bIs4KVideo ? 0 :
Jason Leea46ad5e2017-07-07 15:20:56 -07004939 m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004940 clear_metadata_buffer(mParameters);
4941 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4942 CAM_INTF_PARM_HAL_VERSION, hal_version);
4943 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4944 CAM_INTF_META_STREAM_INFO, stream_config_info);
4945 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4946 mParameters);
4947 if (rc < 0) {
4948 LOGE("set_parms for unconfigure failed");
4949 pthread_mutex_unlock(&mMutex);
4950 return rc;
4951 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07004952
Thierry Strudel3d639192016-09-09 11:52:26 -07004953 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004954 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004955 /* get eis information for stream configuration */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004956 cam_is_type_t isTypeVideo, isTypePreview, is_type=IS_TYPE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07004957 char is_type_value[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004958 property_get("persist.camera.is_type", is_type_value, "4");
4959 isTypeVideo = static_cast<cam_is_type_t>(atoi(is_type_value));
4960 // Make default value for preview IS_TYPE as IS_TYPE_EIS_2_0
4961 property_get("persist.camera.is_type_preview", is_type_value, "4");
4962 isTypePreview = static_cast<cam_is_type_t>(atoi(is_type_value));
4963 LOGD("isTypeVideo: %d isTypePreview: %d", isTypeVideo, isTypePreview);
Thierry Strudel3d639192016-09-09 11:52:26 -07004964
4965 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4966 int32_t hal_version = CAM_HAL_V3;
4967 uint8_t captureIntent =
4968 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4969 mCaptureIntent = captureIntent;
4970 clear_metadata_buffer(mParameters);
4971 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
4972 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
4973 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004974 if (mFirstConfiguration) {
4975 // configure instant AEC
4976 // Instant AEC is a session based parameter and it is needed only
4977 // once per complete session after open camera.
4978 // i.e. This is set only once for the first capture request, after open camera.
4979 setInstantAEC(meta);
4980 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004981 uint8_t fwkVideoStabMode=0;
4982 if (meta.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
4983 fwkVideoStabMode = meta.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
4984 }
4985
Xue Tuecac74e2017-04-17 13:58:15 -07004986 // If EIS setprop is enabled then only turn it on for video/preview
4987 bool setEis = m_bEisEnable && m_bEisSupportedSize &&
Jason Lee603176d2017-05-31 11:43:27 -07004988 (isTypeVideo >= IS_TYPE_EIS_2_0) && !meta.exists(QCAMERA3_USE_AV_TIMER);
Thierry Strudel3d639192016-09-09 11:52:26 -07004989 int32_t vsMode;
4990 vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
4991 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
4992 rc = BAD_VALUE;
4993 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004994 LOGD("setEis %d", setEis);
4995 bool eis3Supported = false;
4996 size_t count = IS_TYPE_MAX;
4997 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
4998 for (size_t i = 0; i < count; i++) {
4999 if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0) {
5000 eis3Supported = true;
5001 break;
5002 }
5003 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005004
5005 //IS type will be 0 unless EIS is supported. If EIS is supported
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005006 //it could either be 4 or 5 depending on the stream and video size
Thierry Strudel3d639192016-09-09 11:52:26 -07005007 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
5008 if (setEis) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005009 if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_PREVIEW) {
5010 is_type = isTypePreview;
5011 } else if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_VIDEO ) {
5012 if ( (isTypeVideo == IS_TYPE_EIS_3_0) && (eis3Supported == FALSE) ) {
5013 LOGW(" EIS_3.0 is not supported and so setting EIS_2.0");
Thierry Strudel3d639192016-09-09 11:52:26 -07005014 is_type = IS_TYPE_EIS_2_0;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005015 } else {
5016 is_type = isTypeVideo;
Thierry Strudel3d639192016-09-09 11:52:26 -07005017 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005018 } else {
5019 is_type = IS_TYPE_NONE;
5020 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005021 mStreamConfigInfo.is_type[i] = is_type;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005022 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005023 mStreamConfigInfo.is_type[i] = IS_TYPE_NONE;
5024 }
5025 }
5026
5027 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5028 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
5029
Thierry Strudel54dc9782017-02-15 12:12:10 -08005030 //Disable tintless only if the property is set to 0
5031 memset(prop, 0, sizeof(prop));
5032 property_get("persist.camera.tintless.enable", prop, "1");
5033 int32_t tintless_value = atoi(prop);
5034
Thierry Strudel3d639192016-09-09 11:52:26 -07005035 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5036 CAM_INTF_PARM_TINTLESS, tintless_value);
Thierry Strudel54dc9782017-02-15 12:12:10 -08005037
Thierry Strudel3d639192016-09-09 11:52:26 -07005038 //Disable CDS for HFR mode or if DIS/EIS is on.
5039 //CDS is a session parameter in the backend/ISP, so need to be set/reset
5040 //after every configure_stream
5041 if ((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
5042 (m_bIsVideo)) {
5043 int32_t cds = CAM_CDS_MODE_OFF;
5044 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5045 CAM_INTF_PARM_CDS_MODE, cds))
5046 LOGE("Failed to disable CDS for HFR mode");
5047
5048 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005049
5050 if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
5051 uint8_t* use_av_timer = NULL;
5052
5053 if (m_debug_avtimer){
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005054 LOGI(" Enabling AV timer through setprop");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005055 use_av_timer = &m_debug_avtimer;
Binhao Lin09245482017-08-31 18:25:29 -07005056 m_bAVTimerEnabled = true;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005057 }
5058 else{
5059 use_av_timer =
5060 meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005061 if (use_av_timer) {
Binhao Lin09245482017-08-31 18:25:29 -07005062 m_bAVTimerEnabled = true;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005063 LOGI("Enabling AV timer through Metadata: use_av_timer: %d", *use_av_timer);
5064 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005065 }
5066
5067 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
5068 rc = BAD_VALUE;
5069 }
5070 }
5071
Thierry Strudel3d639192016-09-09 11:52:26 -07005072 setMobicat();
5073
Emilian Peev49c4c6b2017-04-24 10:21:34 +01005074 uint8_t nrMode = 0;
5075 if (meta.exists(ANDROID_NOISE_REDUCTION_MODE)) {
5076 nrMode = meta.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
5077 }
5078
Thierry Strudel3d639192016-09-09 11:52:26 -07005079 /* Set fps and hfr mode while sending meta stream info so that sensor
5080 * can configure appropriate streaming mode */
5081 mHFRVideoFps = DEFAULT_VIDEO_FPS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005082 mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
5083 mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07005084 if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
5085 rc = setHalFpsRange(meta, mParameters);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005086 if (rc == NO_ERROR) {
5087 int32_t max_fps =
5088 (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
Zhijun He21b864a2016-06-24 13:41:19 -07005089 if (max_fps == 60 || mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005090 mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
5091 }
5092 /* For HFR, more buffers are dequeued upfront to improve the performance */
5093 if (mBatchSize) {
5094 mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
5095 mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
5096 }
5097 }
5098 else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005099 LOGE("setHalFpsRange failed");
5100 }
5101 }
5102 if (meta.exists(ANDROID_CONTROL_MODE)) {
5103 uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
5104 rc = extractSceneMode(meta, metaMode, mParameters);
5105 if (rc != NO_ERROR) {
5106 LOGE("extractSceneMode failed");
5107 }
5108 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005109 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07005110
Thierry Strudel04e026f2016-10-10 11:27:36 -07005111 if (meta.exists(QCAMERA3_VIDEO_HDR_MODE)) {
5112 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
5113 meta.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
5114 rc = setVideoHdrMode(mParameters, vhdr);
5115 if (rc != NO_ERROR) {
5116 LOGE("setVideoHDR is failed");
5117 }
5118 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005119
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07005120 if (meta.exists(TANGO_MODE_DATA_SENSOR_FULLFOV)) {
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07005121 uint8_t sensorModeFullFov =
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07005122 meta.find(TANGO_MODE_DATA_SENSOR_FULLFOV).data.u8[0];
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07005123 LOGD("SENSOR_MODE_FULLFOV %d" , sensorModeFullFov);
5124 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_SENSOR_MODE_FULLFOV,
5125 sensorModeFullFov)) {
5126 rc = BAD_VALUE;
5127 }
5128 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005129 //TODO: validate the arguments, HSV scenemode should have only the
5130 //advertised fps ranges
5131
5132 /*set the capture intent, hal version, tintless, stream info,
5133 *and disenable parameters to the backend*/
5134 LOGD("set_parms META_STREAM_INFO " );
5135 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
Thierry Strudel2896d122017-02-23 19:18:03 -08005136 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%" PRIx64
5137 ", Format:%d is_type: %d",
Thierry Strudel3d639192016-09-09 11:52:26 -07005138 mStreamConfigInfo.type[i],
5139 mStreamConfigInfo.stream_sizes[i].width,
5140 mStreamConfigInfo.stream_sizes[i].height,
5141 mStreamConfigInfo.postprocess_mask[i],
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005142 mStreamConfigInfo.format[i],
5143 mStreamConfigInfo.is_type[i]);
Thierry Strudel3d639192016-09-09 11:52:26 -07005144 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005145
Thierry Strudel3d639192016-09-09 11:52:26 -07005146 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5147 mParameters);
5148 if (rc < 0) {
5149 LOGE("set_parms failed for hal version, stream info");
5150 }
5151
Chien-Yu Chen605c3872017-06-14 11:09:23 -07005152 cam_sensor_mode_info_t sensorModeInfo = {};
5153 rc = getSensorModeInfo(sensorModeInfo);
Thierry Strudel3d639192016-09-09 11:52:26 -07005154 if (rc != NO_ERROR) {
5155 LOGE("Failed to get sensor output size");
5156 pthread_mutex_unlock(&mMutex);
5157 goto error_exit;
5158 }
5159
5160 mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
5161 gCamCapability[mCameraId]->active_array_size.height,
Chien-Yu Chen605c3872017-06-14 11:09:23 -07005162 sensorModeInfo.active_array_size.width,
5163 sensorModeInfo.active_array_size.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07005164
5165 /* Set batchmode before initializing channel. Since registerBuffer
5166 * internally initializes some of the channels, better set batchmode
5167 * even before first register buffer */
5168 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5169 it != mStreamInfo.end(); it++) {
5170 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5171 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5172 && mBatchSize) {
5173 rc = channel->setBatchSize(mBatchSize);
5174 //Disable per frame map unmap for HFR/batchmode case
5175 rc |= channel->setPerFrameMapUnmap(false);
5176 if (NO_ERROR != rc) {
5177 LOGE("Channel init failed %d", rc);
5178 pthread_mutex_unlock(&mMutex);
5179 goto error_exit;
5180 }
5181 }
5182 }
5183
5184 //First initialize all streams
5185 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5186 it != mStreamInfo.end(); it++) {
5187 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
Emilian Peev49c4c6b2017-04-24 10:21:34 +01005188
5189 /* Initial value of NR mode is needed before stream on */
5190 channel->setNRMode(nrMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07005191 if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
5192 ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005193 setEis) {
5194 for (size_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
5195 if ( (1U << mStreamConfigInfo.type[i]) == channel->getStreamTypeMask() ) {
5196 is_type = mStreamConfigInfo.is_type[i];
5197 break;
5198 }
5199 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005200 rc = channel->initialize(is_type);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005201 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005202 rc = channel->initialize(IS_TYPE_NONE);
5203 }
5204 if (NO_ERROR != rc) {
5205 LOGE("Channel initialization failed %d", rc);
5206 pthread_mutex_unlock(&mMutex);
5207 goto error_exit;
5208 }
5209 }
5210
5211 if (mRawDumpChannel) {
5212 rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
5213 if (rc != NO_ERROR) {
5214 LOGE("Error: Raw Dump Channel init failed");
5215 pthread_mutex_unlock(&mMutex);
5216 goto error_exit;
5217 }
5218 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005219 if (mHdrPlusRawSrcChannel) {
5220 rc = mHdrPlusRawSrcChannel->initialize(IS_TYPE_NONE);
5221 if (rc != NO_ERROR) {
5222 LOGE("Error: HDR+ RAW Source Channel init failed");
5223 pthread_mutex_unlock(&mMutex);
5224 goto error_exit;
5225 }
5226 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005227 if (mSupportChannel) {
5228 rc = mSupportChannel->initialize(IS_TYPE_NONE);
5229 if (rc < 0) {
5230 LOGE("Support channel initialization failed");
5231 pthread_mutex_unlock(&mMutex);
5232 goto error_exit;
5233 }
5234 }
5235 if (mAnalysisChannel) {
5236 rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
5237 if (rc < 0) {
5238 LOGE("Analysis channel initialization failed");
5239 pthread_mutex_unlock(&mMutex);
5240 goto error_exit;
5241 }
5242 }
5243 if (mDummyBatchChannel) {
5244 rc = mDummyBatchChannel->setBatchSize(mBatchSize);
5245 if (rc < 0) {
5246 LOGE("mDummyBatchChannel setBatchSize failed");
5247 pthread_mutex_unlock(&mMutex);
5248 goto error_exit;
5249 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005250 rc = mDummyBatchChannel->initialize(IS_TYPE_NONE);
Thierry Strudel3d639192016-09-09 11:52:26 -07005251 if (rc < 0) {
5252 LOGE("mDummyBatchChannel initialization failed");
5253 pthread_mutex_unlock(&mMutex);
5254 goto error_exit;
5255 }
5256 }
5257
5258 // Set bundle info
5259 rc = setBundleInfo();
5260 if (rc < 0) {
5261 LOGE("setBundleInfo failed %d", rc);
5262 pthread_mutex_unlock(&mMutex);
5263 goto error_exit;
5264 }
5265
5266 //update settings from app here
5267 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
5268 mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
5269 LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
5270 }
5271 if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
5272 mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
5273 LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
5274 }
5275 if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
5276 mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
5277 LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
5278
5279 if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
5280 (mLinkedCameraId != mCameraId) ) {
5281 LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
5282 mLinkedCameraId, mCameraId);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005283 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005284 goto error_exit;
5285 }
5286 }
5287
5288 // add bundle related cameras
5289 LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
5290 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005291 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
5292 &m_pDualCamCmdPtr->bundle_info;
5293 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005294 if (mIsDeviceLinked)
5295 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
5296 else
5297 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
5298
5299 pthread_mutex_lock(&gCamLock);
5300
5301 if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
5302 LOGE("Dualcam: Invalid Session Id ");
5303 pthread_mutex_unlock(&gCamLock);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005304 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005305 goto error_exit;
5306 }
5307
5308 if (mIsMainCamera == 1) {
5309 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
5310 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005311 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005312 m_pRelCamSyncBuf->cam_role = CAM_ROLE_BAYER;
Thierry Strudel3d639192016-09-09 11:52:26 -07005313 // related session id should be session id of linked session
5314 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5315 } else {
5316 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
5317 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005318 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005319 m_pRelCamSyncBuf->cam_role = CAM_ROLE_MONO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005320 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5321 }
Thierry Strudel2896d122017-02-23 19:18:03 -08005322 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07005323 pthread_mutex_unlock(&gCamLock);
5324
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005325 rc = mCameraHandle->ops->set_dual_cam_cmd(
5326 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07005327 if (rc < 0) {
5328 LOGE("Dualcam: link failed");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005329 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005330 goto error_exit;
5331 }
5332 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005333 goto no_error;
5334error_exit:
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005335 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005336 return rc;
5337no_error:
Thierry Strudel3d639192016-09-09 11:52:26 -07005338 mWokenUpByDaemon = false;
5339 mPendingLiveRequest = 0;
5340 mFirstConfiguration = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005341 }
5342
5343 uint32_t frameNumber = request->frame_number;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005344 cam_stream_ID_t streamsArray;
Thierry Strudel3d639192016-09-09 11:52:26 -07005345
5346 if (mFlushPerf) {
5347 //we cannot accept any requests during flush
5348 LOGE("process_capture_request cannot proceed during flush");
5349 pthread_mutex_unlock(&mMutex);
5350 return NO_ERROR; //should return an error
5351 }
5352
5353 if (meta.exists(ANDROID_REQUEST_ID)) {
5354 request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
5355 mCurrentRequestId = request_id;
5356 LOGD("Received request with id: %d", request_id);
5357 } else if (mState == CONFIGURED || mCurrentRequestId == -1){
5358 LOGE("Unable to find request id field, \
5359 & no previous id available");
5360 pthread_mutex_unlock(&mMutex);
5361 return NAME_NOT_FOUND;
5362 } else {
5363 LOGD("Re-using old request id");
5364 request_id = mCurrentRequestId;
5365 }
5366
5367 LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
5368 request->num_output_buffers,
5369 request->input_buffer,
5370 frameNumber);
5371 // Acquire all request buffers first
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005372 streamsArray.num_streams = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005373 int blob_request = 0;
Emilian Peev7650c122017-01-19 08:24:33 -08005374 bool depthRequestPresent = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005375 uint32_t snapshotStreamId = 0;
5376 for (size_t i = 0; i < request->num_output_buffers; i++) {
5377 const camera3_stream_buffer_t& output = request->output_buffers[i];
5378 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5379
Emilian Peev7650c122017-01-19 08:24:33 -08005380 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5381 (output.stream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005382 //FIXME??:Call function to store local copy of jpeg data for encode params.
Thierry Strudel3d639192016-09-09 11:52:26 -07005383 blob_request = 1;
5384 snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
5385 }
5386
5387 if (output.acquire_fence != -1) {
5388 rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
5389 close(output.acquire_fence);
5390 if (rc != OK) {
5391 LOGE("sync wait failed %d", rc);
5392 pthread_mutex_unlock(&mMutex);
5393 return rc;
5394 }
5395 }
5396
Emilian Peev0f3c3162017-03-15 12:57:46 +00005397 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5398 (output.stream->data_space == HAL_DATASPACE_DEPTH)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005399 depthRequestPresent = true;
5400 continue;
5401 }
5402
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005403 streamsArray.stream_request[streamsArray.num_streams++].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005404 channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel3d639192016-09-09 11:52:26 -07005405
5406 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5407 isVidBufRequested = true;
5408 }
5409 }
5410
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005411 //FIXME: Add checks to ensure to dups in validateCaptureRequest
5412 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5413 itr++) {
5414 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5415 streamsArray.stream_request[streamsArray.num_streams++].streamID =
5416 channel->getStreamID(channel->getStreamTypeMask());
5417
5418 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5419 isVidBufRequested = true;
5420 }
5421 }
5422
Thierry Strudel3d639192016-09-09 11:52:26 -07005423 if (blob_request) {
Shuzhen Wang850a7c22017-05-02 14:48:23 -07005424 ATRACE_ASYNC_BEGIN("SNAPSHOT", frameNumber);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005425 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
Thierry Strudel3d639192016-09-09 11:52:26 -07005426 }
5427 if (blob_request && mRawDumpChannel) {
5428 LOGD("Trigger Raw based on blob request if Raw dump is enabled");
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005429 streamsArray.stream_request[streamsArray.num_streams].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005430 mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005431 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
Thierry Strudel3d639192016-09-09 11:52:26 -07005432 }
5433
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005434 {
5435 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5436 // Request a RAW buffer if
5437 // 1. mHdrPlusRawSrcChannel is valid.
5438 // 2. frameNumber is multiples of kHdrPlusRawPeriod (in order to limit RAW capture rate.)
5439 // 3. There is no pending HDR+ request.
5440 if (mHdrPlusRawSrcChannel && frameNumber % kHdrPlusRawPeriod == 0 &&
5441 mHdrPlusPendingRequests.size() == 0) {
5442 streamsArray.stream_request[streamsArray.num_streams].streamID =
5443 mHdrPlusRawSrcChannel->getStreamID(mHdrPlusRawSrcChannel->getStreamTypeMask());
5444 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
5445 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005446 }
5447
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005448 //extract capture intent
5449 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
5450 mCaptureIntent =
5451 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
5452 }
5453
5454 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5455 mCacMode =
5456 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5457 }
5458
Chien-Yu Chenbc730232017-07-12 14:49:55 -07005459 uint8_t requestedLensShadingMapMode;
5460 // Get the shading map mode.
5461 if (meta.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
5462 mLastRequestedLensShadingMapMode = requestedLensShadingMapMode =
5463 meta.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
5464 } else {
5465 requestedLensShadingMapMode = mLastRequestedLensShadingMapMode;
5466 }
5467
Chien-Yu Chen21b9e9a2017-09-25 14:34:26 -07005468 if (meta.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
5469 mLastRequestedFaceDetectMode =
5470 meta.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
5471 }
5472
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005473 bool hdrPlusRequest = false;
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005474 HdrPlusPendingRequest pendingHdrPlusRequest = {};
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005475
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005476 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07005477 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005478 // If this request has a still capture intent, try to submit an HDR+ request.
5479 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled &&
5480 mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE) {
5481 hdrPlusRequest = trySubmittingHdrPlusRequestLocked(&pendingHdrPlusRequest, *request, meta);
5482 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005483 }
5484
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005485 if (hdrPlusRequest) {
5486 // For a HDR+ request, just set the frame parameters.
5487 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
5488 if (rc < 0) {
5489 LOGE("fail to set frame parameters");
5490 pthread_mutex_unlock(&mMutex);
5491 return rc;
5492 }
5493 } else if(request->input_buffer == NULL) {
Thierry Strudel3d639192016-09-09 11:52:26 -07005494 /* Parse the settings:
5495 * - For every request in NORMAL MODE
5496 * - For every request in HFR mode during preview only case
5497 * - For first request of every batch in HFR mode during video
5498 * recording. In batchmode the same settings except frame number is
5499 * repeated in each request of the batch.
5500 */
5501 if (!mBatchSize ||
5502 (mBatchSize && !isVidBufRequested) ||
5503 (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005504 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
Thierry Strudel3d639192016-09-09 11:52:26 -07005505 if (rc < 0) {
5506 LOGE("fail to set frame parameters");
5507 pthread_mutex_unlock(&mMutex);
5508 return rc;
5509 }
Chien-Yu Chenbc730232017-07-12 14:49:55 -07005510
5511 {
Chien-Yu Chen21b9e9a2017-09-25 14:34:26 -07005512 // If HDR+ mode is enabled, override the following modes so the necessary metadata
5513 // will be included in the result metadata sent to Easel HDR+.
Chien-Yu Chenbc730232017-07-12 14:49:55 -07005514 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
5515 if (mHdrPlusModeEnabled) {
5516 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_LENS_SHADING_MAP_MODE,
5517 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON);
Chien-Yu Chen21b9e9a2017-09-25 14:34:26 -07005518 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STATS_FACEDETECT_MODE,
5519 ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
Chien-Yu Chenbc730232017-07-12 14:49:55 -07005520 }
5521 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005522 }
5523 /* For batchMode HFR, setFrameParameters is not called for every
5524 * request. But only frame number of the latest request is parsed.
5525 * Keep track of first and last frame numbers in a batch so that
5526 * metadata for the frame numbers of batch can be duplicated in
5527 * handleBatchMetadta */
5528 if (mBatchSize) {
5529 if (!mToBeQueuedVidBufs) {
5530 //start of the batch
5531 mFirstFrameNumberInBatch = request->frame_number;
5532 }
5533 if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5534 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
5535 LOGE("Failed to set the frame number in the parameters");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005536 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005537 return BAD_VALUE;
5538 }
5539 }
5540 if (mNeedSensorRestart) {
5541 /* Unlock the mutex as restartSensor waits on the channels to be
5542 * stopped, which in turn calls stream callback functions -
5543 * handleBufferWithLock and handleMetadataWithLock */
5544 pthread_mutex_unlock(&mMutex);
5545 rc = dynamicUpdateMetaStreamInfo();
5546 if (rc != NO_ERROR) {
5547 LOGE("Restarting the sensor failed");
5548 return BAD_VALUE;
5549 }
5550 mNeedSensorRestart = false;
5551 pthread_mutex_lock(&mMutex);
5552 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005553 if(mResetInstantAEC) {
5554 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5555 CAM_INTF_PARM_INSTANT_AEC, (uint8_t)CAM_AEC_NORMAL_CONVERGENCE);
5556 mResetInstantAEC = false;
5557 }
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005558 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005559 if (request->input_buffer->acquire_fence != -1) {
5560 rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
5561 close(request->input_buffer->acquire_fence);
5562 if (rc != OK) {
5563 LOGE("input buffer sync wait failed %d", rc);
5564 pthread_mutex_unlock(&mMutex);
5565 return rc;
5566 }
5567 }
5568 }
5569
5570 if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
5571 mLastCustIntentFrmNum = frameNumber;
5572 }
5573 /* Update pending request list and pending buffers map */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005574 PendingRequestInfo pendingRequest = {};
Thierry Strudel3d639192016-09-09 11:52:26 -07005575 pendingRequestIterator latestRequest;
5576 pendingRequest.frame_number = frameNumber;
Emilian Peev7650c122017-01-19 08:24:33 -08005577 pendingRequest.num_buffers = depthRequestPresent ?
5578 (request->num_output_buffers - 1 ) : request->num_output_buffers;
Thierry Strudel3d639192016-09-09 11:52:26 -07005579 pendingRequest.request_id = request_id;
5580 pendingRequest.blob_request = blob_request;
5581 pendingRequest.timestamp = 0;
Chien-Yu Chenbc730232017-07-12 14:49:55 -07005582 pendingRequest.requestedLensShadingMapMode = requestedLensShadingMapMode;
Chien-Yu Chen21b9e9a2017-09-25 14:34:26 -07005583 pendingRequest.requestedFaceDetectMode = mLastRequestedFaceDetectMode;
Thierry Strudel3d639192016-09-09 11:52:26 -07005584 if (request->input_buffer) {
5585 pendingRequest.input_buffer =
5586 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
5587 *(pendingRequest.input_buffer) = *(request->input_buffer);
5588 pInputBuffer = pendingRequest.input_buffer;
5589 } else {
5590 pendingRequest.input_buffer = NULL;
5591 pInputBuffer = NULL;
5592 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005593 pendingRequest.bUseFirstPartial = (mState == CONFIGURED && !request->input_buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07005594
5595 pendingRequest.pipeline_depth = 0;
5596 pendingRequest.partial_result_cnt = 0;
5597 extractJpegMetadata(mCurJpegMeta, request);
5598 pendingRequest.jpegMetadata = mCurJpegMeta;
5599 pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
Thierry Strudel3d639192016-09-09 11:52:26 -07005600 pendingRequest.capture_intent = mCaptureIntent;
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07005601 if (meta.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
Shuzhen Wang77b049a2017-08-30 12:24:36 -07005602 pendingRequest.hybrid_ae_enable =
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07005603 meta.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8[0];
5604 }
Chien-Yu Chen98b126c2017-03-14 14:55:32 -07005605
Samuel Ha68ba5172016-12-15 18:41:12 -08005606 /* DevCamDebug metadata processCaptureRequest */
5607 if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
5608 mDevCamDebugMetaEnable =
5609 meta.find(DEVCAMDEBUG_META_ENABLE).data.u8[0];
5610 }
5611 pendingRequest.DevCamDebug_meta_enable = mDevCamDebugMetaEnable;
5612 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07005613
5614 //extract CAC info
5615 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5616 mCacMode =
5617 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5618 }
5619 pendingRequest.fwkCacMode = mCacMode;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005620 pendingRequest.hdrplus = hdrPlusRequest;
Emilian Peev30522a12017-08-03 14:36:33 +01005621 pendingRequest.expectedFrameDuration = mExpectedFrameDuration;
5622 mExpectedInflightDuration += mExpectedFrameDuration;
Thierry Strudel3d639192016-09-09 11:52:26 -07005623
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07005624 // extract enableZsl info
5625 if (gExposeEnableZslKey) {
5626 if (meta.exists(ANDROID_CONTROL_ENABLE_ZSL)) {
5627 pendingRequest.enableZsl = meta.find(ANDROID_CONTROL_ENABLE_ZSL).data.u8[0];
5628 mZslEnabled = pendingRequest.enableZsl;
5629 } else {
5630 pendingRequest.enableZsl = mZslEnabled;
5631 }
5632 }
5633
Thierry Strudel3d639192016-09-09 11:52:26 -07005634 PendingBuffersInRequest bufsForCurRequest;
5635 bufsForCurRequest.frame_number = frameNumber;
5636 // Mark current timestamp for the new request
5637 bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
Binhao Lin09245482017-08-31 18:25:29 -07005638 bufsForCurRequest.av_timestamp = 0;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005639 bufsForCurRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005640
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005641 if (hdrPlusRequest) {
5642 // Save settings for this request.
5643 pendingHdrPlusRequest.settings = std::make_shared<metadata_buffer_t>();
5644 memcpy(pendingHdrPlusRequest.settings.get(), mParameters, sizeof(metadata_buffer_t));
5645
5646 // Add to pending HDR+ request queue.
5647 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5648 mHdrPlusPendingRequests.emplace(frameNumber, pendingHdrPlusRequest);
5649
5650 ALOGD("%s: frame number %u is an HDR+ request.", __FUNCTION__, frameNumber);
5651 }
5652
Thierry Strudel3d639192016-09-09 11:52:26 -07005653 for (size_t i = 0; i < request->num_output_buffers; i++) {
Emilian Peev0f3c3162017-03-15 12:57:46 +00005654 if ((request->output_buffers[i].stream->data_space ==
5655 HAL_DATASPACE_DEPTH) &&
5656 (HAL_PIXEL_FORMAT_BLOB ==
5657 request->output_buffers[i].stream->format)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005658 continue;
5659 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005660 RequestedBufferInfo requestedBuf;
5661 memset(&requestedBuf, 0, sizeof(requestedBuf));
5662 requestedBuf.stream = request->output_buffers[i].stream;
5663 requestedBuf.buffer = NULL;
5664 pendingRequest.buffers.push_back(requestedBuf);
5665
5666 // Add to buffer handle the pending buffers list
5667 PendingBufferInfo bufferInfo;
5668 bufferInfo.buffer = request->output_buffers[i].buffer;
5669 bufferInfo.stream = request->output_buffers[i].stream;
5670 bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
5671 QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
5672 LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
5673 frameNumber, bufferInfo.buffer,
5674 channel->getStreamTypeMask(), bufferInfo.stream->format);
5675 }
5676 // Add this request packet into mPendingBuffersMap
5677 mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
5678 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
5679 mPendingBuffersMap.get_num_overall_buffers());
5680
5681 latestRequest = mPendingRequestsList.insert(
5682 mPendingRequestsList.end(), pendingRequest);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07005683
5684 // Let shutter dispatcher and buffer dispatcher know shutter and output buffers are expected
5685 // for the frame number.
Chien-Yu Chena7f98612017-06-20 16:54:10 -07005686 mShutterDispatcher.expectShutter(frameNumber, request->input_buffer != nullptr);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07005687 for (size_t i = 0; i < request->num_output_buffers; i++) {
5688 mOutputBufferDispatcher.expectBuffer(frameNumber, request->output_buffers[i].stream);
5689 }
5690
Thierry Strudel3d639192016-09-09 11:52:26 -07005691 if(mFlush) {
5692 LOGI("mFlush is true");
5693 pthread_mutex_unlock(&mMutex);
5694 return NO_ERROR;
5695 }
5696
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005697 // If this is not an HDR+ request, send the request to metadata and each output buffer's
5698 // channel.
5699 if (!hdrPlusRequest) {
5700 int indexUsed;
5701 // Notify metadata channel we receive a request
5702 mMetadataChannel->request(NULL, frameNumber, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07005703
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005704 if(request->input_buffer != NULL){
5705 LOGD("Input request, frame_number %d", frameNumber);
5706 rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
5707 if (NO_ERROR != rc) {
5708 LOGE("fail to set reproc parameters");
5709 pthread_mutex_unlock(&mMutex);
5710 return rc;
5711 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005712 }
5713
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005714 // Call request on other streams
5715 uint32_t streams_need_metadata = 0;
5716 pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
5717 for (size_t i = 0; i < request->num_output_buffers; i++) {
5718 const camera3_stream_buffer_t& output = request->output_buffers[i];
5719 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5720
5721 if (channel == NULL) {
5722 LOGW("invalid channel pointer for stream");
5723 continue;
5724 }
5725
5726 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
5727 LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
5728 output.buffer, request->input_buffer, frameNumber);
5729 if(request->input_buffer != NULL){
Thierry Strudel3d639192016-09-09 11:52:26 -07005730 rc = channel->request(output.buffer, frameNumber,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005731 pInputBuffer, &mReprocMeta, indexUsed, false, false);
5732 if (rc < 0) {
5733 LOGE("Fail to request on picture channel");
5734 pthread_mutex_unlock(&mMutex);
5735 return rc;
5736 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005737 } else {
Emilian Peev7650c122017-01-19 08:24:33 -08005738 if (HAL_DATASPACE_DEPTH == output.stream->data_space) {
5739 assert(NULL != mDepthChannel);
5740 assert(mDepthChannel == output.stream->priv);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005741
Emilian Peev7650c122017-01-19 08:24:33 -08005742 rc = mDepthChannel->mapBuffer(output.buffer, request->frame_number);
5743 if (rc < 0) {
5744 LOGE("Fail to map on depth buffer");
5745 pthread_mutex_unlock(&mMutex);
5746 return rc;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005747 }
Emilian Peev4e0fe952017-06-30 12:40:09 -07005748 continue;
Emilian Peev7650c122017-01-19 08:24:33 -08005749 } else {
5750 LOGD("snapshot request with buffer %p, frame_number %d",
5751 output.buffer, frameNumber);
5752 if (!request->settings) {
5753 rc = channel->request(output.buffer, frameNumber,
5754 NULL, mPrevParameters, indexUsed);
5755 } else {
5756 rc = channel->request(output.buffer, frameNumber,
5757 NULL, mParameters, indexUsed);
5758 }
5759 if (rc < 0) {
5760 LOGE("Fail to request on picture channel");
5761 pthread_mutex_unlock(&mMutex);
5762 return rc;
5763 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005764
Emilian Peev7650c122017-01-19 08:24:33 -08005765 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5766 uint32_t j = 0;
5767 for (j = 0; j < streamsArray.num_streams; j++) {
5768 if (streamsArray.stream_request[j].streamID == streamId) {
5769 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5770 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5771 else
5772 streamsArray.stream_request[j].buf_index = indexUsed;
5773 break;
5774 }
5775 }
5776 if (j == streamsArray.num_streams) {
5777 LOGE("Did not find matching stream to update index");
5778 assert(0);
5779 }
5780
5781 pendingBufferIter->need_metadata = true;
5782 streams_need_metadata++;
5783 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005784 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005785 } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
5786 bool needMetadata = false;
5787 QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
5788 rc = yuvChannel->request(output.buffer, frameNumber,
5789 pInputBuffer, (pInputBuffer ? &mReprocMeta : mParameters),
5790 needMetadata, indexUsed, false, false);
Thierry Strudel3d639192016-09-09 11:52:26 -07005791 if (rc < 0) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005792 LOGE("Fail to request on YUV channel");
Thierry Strudel3d639192016-09-09 11:52:26 -07005793 pthread_mutex_unlock(&mMutex);
5794 return rc;
5795 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005796
5797 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5798 uint32_t j = 0;
5799 for (j = 0; j < streamsArray.num_streams; j++) {
5800 if (streamsArray.stream_request[j].streamID == streamId) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005801 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5802 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5803 else
5804 streamsArray.stream_request[j].buf_index = indexUsed;
5805 break;
5806 }
5807 }
5808 if (j == streamsArray.num_streams) {
5809 LOGE("Did not find matching stream to update index");
5810 assert(0);
5811 }
5812
5813 pendingBufferIter->need_metadata = needMetadata;
5814 if (needMetadata)
5815 streams_need_metadata += 1;
5816 LOGD("calling YUV channel request, need_metadata is %d",
5817 needMetadata);
5818 } else {
5819 LOGD("request with buffer %p, frame_number %d",
5820 output.buffer, frameNumber);
5821
5822 rc = channel->request(output.buffer, frameNumber, indexUsed);
5823
5824 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5825 uint32_t j = 0;
5826 for (j = 0; j < streamsArray.num_streams; j++) {
5827 if (streamsArray.stream_request[j].streamID == streamId) {
5828 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5829 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5830 else
5831 streamsArray.stream_request[j].buf_index = indexUsed;
5832 break;
5833 }
5834 }
5835 if (j == streamsArray.num_streams) {
5836 LOGE("Did not find matching stream to update index");
5837 assert(0);
5838 }
5839
5840 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5841 && mBatchSize) {
5842 mToBeQueuedVidBufs++;
5843 if (mToBeQueuedVidBufs == mBatchSize) {
5844 channel->queueBatchBuf();
5845 }
5846 }
5847 if (rc < 0) {
5848 LOGE("request failed");
5849 pthread_mutex_unlock(&mMutex);
5850 return rc;
5851 }
5852 }
5853 pendingBufferIter++;
5854 }
5855
5856 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5857 itr++) {
5858 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5859
5860 if (channel == NULL) {
5861 LOGE("invalid channel pointer for stream");
5862 assert(0);
Shuzhen Wang3a1b92d2017-08-09 13:39:47 -07005863 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005864 return BAD_VALUE;
5865 }
5866
5867 InternalRequest requestedStream;
5868 requestedStream = (*itr);
5869
5870
5871 if ((*itr).stream->format == HAL_PIXEL_FORMAT_BLOB) {
5872 LOGD("snapshot request internally input buffer %p, frame_number %d",
5873 request->input_buffer, frameNumber);
5874 if(request->input_buffer != NULL){
5875 rc = channel->request(NULL, frameNumber,
5876 pInputBuffer, &mReprocMeta, indexUsed, true,
5877 requestedStream.meteringOnly);
5878 if (rc < 0) {
5879 LOGE("Fail to request on picture channel");
5880 pthread_mutex_unlock(&mMutex);
5881 return rc;
5882 }
5883 } else {
5884 LOGD("snapshot request with frame_number %d", frameNumber);
5885 if (!request->settings) {
5886 rc = channel->request(NULL, frameNumber,
5887 NULL, mPrevParameters, indexUsed, true,
5888 requestedStream.meteringOnly);
5889 } else {
5890 rc = channel->request(NULL, frameNumber,
5891 NULL, mParameters, indexUsed, true, requestedStream.meteringOnly);
5892 }
5893 if (rc < 0) {
5894 LOGE("Fail to request on picture channel");
5895 pthread_mutex_unlock(&mMutex);
5896 return rc;
5897 }
5898
5899 if ((*itr).meteringOnly != 1) {
5900 requestedStream.need_metadata = 1;
5901 streams_need_metadata++;
5902 }
5903 }
5904
5905 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5906 uint32_t j = 0;
5907 for (j = 0; j < streamsArray.num_streams; j++) {
5908 if (streamsArray.stream_request[j].streamID == streamId) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005909 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5910 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5911 else
5912 streamsArray.stream_request[j].buf_index = indexUsed;
5913 break;
5914 }
5915 }
5916 if (j == streamsArray.num_streams) {
5917 LOGE("Did not find matching stream to update index");
5918 assert(0);
5919 }
5920
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005921 } else {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005922 LOGE("Internal requests not supported on this stream type");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005923 assert(0);
Shuzhen Wang3a1b92d2017-08-09 13:39:47 -07005924 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005925 return INVALID_OPERATION;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005926 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005927 latestRequest->internalRequestList.push_back(requestedStream);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005928 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005929
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005930 //If 2 streams have need_metadata set to true, fail the request, unless
5931 //we copy/reference count the metadata buffer
5932 if (streams_need_metadata > 1) {
5933 LOGE("not supporting request in which two streams requires"
5934 " 2 HAL metadata for reprocessing");
5935 pthread_mutex_unlock(&mMutex);
5936 return -EINVAL;
5937 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005938
Emilian Peev656e4fa2017-06-02 16:47:04 +01005939 cam_sensor_pd_data_t pdafEnable = (nullptr != mDepthChannel) ?
5940 CAM_PD_DATA_SKIP : CAM_PD_DATA_DISABLED;
5941 if (depthRequestPresent && mDepthChannel) {
5942 if (request->settings) {
5943 camera_metadata_ro_entry entry;
5944 if (find_camera_metadata_ro_entry(request->settings,
5945 NEXUS_EXPERIMENTAL_2017_PD_DATA_ENABLE, &entry) == 0) {
5946 if (entry.data.u8[0]) {
5947 pdafEnable = CAM_PD_DATA_ENABLED;
5948 } else {
5949 pdafEnable = CAM_PD_DATA_SKIP;
5950 }
5951 mDepthCloudMode = pdafEnable;
5952 } else {
5953 pdafEnable = mDepthCloudMode;
5954 }
5955 } else {
5956 pdafEnable = mDepthCloudMode;
5957 }
5958 }
5959
Emilian Peev7650c122017-01-19 08:24:33 -08005960 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5961 CAM_INTF_META_PDAF_DATA_ENABLE, pdafEnable)) {
5962 LOGE("%s: Failed to enable PDAF data in parameters!", __func__);
5963 pthread_mutex_unlock(&mMutex);
5964 return BAD_VALUE;
5965 }
Emilian Peev656e4fa2017-06-02 16:47:04 +01005966
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005967 if (request->input_buffer == NULL) {
5968 /* Set the parameters to backend:
5969 * - For every request in NORMAL MODE
5970 * - For every request in HFR mode during preview only case
5971 * - Once every batch in HFR mode during video recording
5972 */
5973 if (!mBatchSize ||
5974 (mBatchSize && !isVidBufRequested) ||
5975 (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
5976 LOGD("set_parms batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
5977 mBatchSize, isVidBufRequested,
5978 mToBeQueuedVidBufs);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005979
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005980 if(mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize)) {
5981 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5982 uint32_t m = 0;
5983 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5984 if (streamsArray.stream_request[k].streamID ==
5985 mBatchedStreamsArray.stream_request[m].streamID)
5986 break;
5987 }
5988 if (m == mBatchedStreamsArray.num_streams) {
5989 mBatchedStreamsArray.stream_request\
5990 [mBatchedStreamsArray.num_streams].streamID =
5991 streamsArray.stream_request[k].streamID;
5992 mBatchedStreamsArray.stream_request\
5993 [mBatchedStreamsArray.num_streams].buf_index =
5994 streamsArray.stream_request[k].buf_index;
5995 mBatchedStreamsArray.num_streams =
5996 mBatchedStreamsArray.num_streams + 1;
5997 }
5998 }
5999 streamsArray = mBatchedStreamsArray;
6000 }
6001 /* Update stream id of all the requested buffers */
6002 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID,
6003 streamsArray)) {
6004 LOGE("Failed to set stream type mask in the parameters");
Shuzhen Wang3c077d72017-04-20 22:48:59 -07006005 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08006006 return BAD_VALUE;
6007 }
6008
6009 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
6010 mParameters);
6011 if (rc < 0) {
6012 LOGE("set_parms failed");
6013 }
6014 /* reset to zero coz, the batch is queued */
6015 mToBeQueuedVidBufs = 0;
6016 mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
6017 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
6018 } else if (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs != mBatchSize)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08006019 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
6020 uint32_t m = 0;
6021 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
6022 if (streamsArray.stream_request[k].streamID ==
6023 mBatchedStreamsArray.stream_request[m].streamID)
6024 break;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08006025 }
6026 if (m == mBatchedStreamsArray.num_streams) {
6027 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
6028 streamID = streamsArray.stream_request[k].streamID;
6029 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
6030 buf_index = streamsArray.stream_request[k].buf_index;
6031 mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
6032 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08006033 }
6034 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08006035 mPendingLiveRequest++;
Shuzhen Wang3c077d72017-04-20 22:48:59 -07006036
6037 // Start all streams after the first setting is sent, so that the
6038 // setting can be applied sooner: (0 + apply_delay)th frame.
6039 if (mState == CONFIGURED && mChannelHandle) {
6040 //Then start them.
6041 LOGH("Start META Channel");
6042 rc = mMetadataChannel->start();
6043 if (rc < 0) {
6044 LOGE("META channel start failed");
6045 pthread_mutex_unlock(&mMutex);
6046 return rc;
6047 }
6048
6049 if (mAnalysisChannel) {
6050 rc = mAnalysisChannel->start();
6051 if (rc < 0) {
6052 LOGE("Analysis channel start failed");
6053 mMetadataChannel->stop();
6054 pthread_mutex_unlock(&mMutex);
6055 return rc;
6056 }
6057 }
6058
6059 if (mSupportChannel) {
6060 rc = mSupportChannel->start();
6061 if (rc < 0) {
6062 LOGE("Support channel start failed");
6063 mMetadataChannel->stop();
6064 /* Although support and analysis are mutually exclusive today
6065 adding it in anycase for future proofing */
6066 if (mAnalysisChannel) {
6067 mAnalysisChannel->stop();
6068 }
6069 pthread_mutex_unlock(&mMutex);
6070 return rc;
6071 }
6072 }
6073 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6074 it != mStreamInfo.end(); it++) {
6075 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
6076 LOGH("Start Processing Channel mask=%d",
6077 channel->getStreamTypeMask());
6078 rc = channel->start();
6079 if (rc < 0) {
6080 LOGE("channel start failed");
6081 pthread_mutex_unlock(&mMutex);
6082 return rc;
6083 }
6084 }
6085
6086 if (mRawDumpChannel) {
6087 LOGD("Starting raw dump stream");
6088 rc = mRawDumpChannel->start();
6089 if (rc != NO_ERROR) {
6090 LOGE("Error Starting Raw Dump Channel");
6091 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6092 it != mStreamInfo.end(); it++) {
6093 QCamera3Channel *channel =
6094 (QCamera3Channel *)(*it)->stream->priv;
6095 LOGH("Stopping Processing Channel mask=%d",
6096 channel->getStreamTypeMask());
6097 channel->stop();
6098 }
6099 if (mSupportChannel)
6100 mSupportChannel->stop();
6101 if (mAnalysisChannel) {
6102 mAnalysisChannel->stop();
6103 }
6104 mMetadataChannel->stop();
6105 pthread_mutex_unlock(&mMutex);
6106 return rc;
6107 }
6108 }
6109
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006110 // Configure modules for stream on.
Chien-Yu Chen153c5172017-09-08 11:33:19 -07006111 rc = startChannelLocked();
Shuzhen Wang3c077d72017-04-20 22:48:59 -07006112 if (rc != NO_ERROR) {
Chien-Yu Chen153c5172017-09-08 11:33:19 -07006113 LOGE("startChannelLocked failed %d", rc);
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006114 pthread_mutex_unlock(&mMutex);
6115 return rc;
6116 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07006117 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006118 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006119 }
6120
Chien-Yu Chenfadf40e2017-09-15 14:33:57 -07006121 // Enable HDR+ mode for the first PREVIEW_INTENT request that doesn't disable HDR+.
Chenjie Luo4a761802017-06-13 17:35:54 +00006122 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07006123 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chend77a5462017-06-02 18:00:38 -07006124 if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice() &&
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07006125 !gEaselBypassOnly && !mFirstPreviewIntentSeen &&
6126 meta.exists(ANDROID_CONTROL_CAPTURE_INTENT) &&
6127 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0] ==
Chien-Yu Chenfadf40e2017-09-15 14:33:57 -07006128 ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW &&
6129 meta.exists(NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS) &&
6130 meta.find(NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS).data.i32[0] == 0) {
Chien-Yu Chendeaebad2017-06-30 11:46:34 -07006131
6132 if (isSessionHdrPlusModeCompatible()) {
6133 rc = enableHdrPlusModeLocked();
6134 if (rc != OK) {
6135 LOGE("%s: Failed to open HDR+ asynchronously", __FUNCTION__);
6136 pthread_mutex_unlock(&mMutex);
6137 return rc;
6138 }
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07006139 }
6140
6141 mFirstPreviewIntentSeen = true;
6142 }
6143 }
6144
Thierry Strudel3d639192016-09-09 11:52:26 -07006145 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
6146
6147 mState = STARTED;
6148 // Added a timed condition wait
6149 struct timespec ts;
6150 uint8_t isValidTimeout = 1;
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006151 rc = clock_gettime(CLOCK_MONOTONIC, &ts);
Thierry Strudel3d639192016-09-09 11:52:26 -07006152 if (rc < 0) {
6153 isValidTimeout = 0;
6154 LOGE("Error reading the real time clock!!");
6155 }
6156 else {
6157 // Make timeout as 5 sec for request to be honored
Chien-Yu Chene687bd02016-12-07 18:30:26 -08006158 int64_t timeout = 5;
6159 {
6160 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
6161 // If there is a pending HDR+ request, the following requests may be blocked until the
6162 // HDR+ request is done. So allow a longer timeout.
6163 if (mHdrPlusPendingRequests.size() > 0) {
6164 timeout = MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT;
6165 }
6166 }
6167 ts.tv_sec += timeout;
Thierry Strudel3d639192016-09-09 11:52:26 -07006168 }
6169 //Block on conditional variable
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006170 while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
Thierry Strudel3d639192016-09-09 11:52:26 -07006171 (mState != ERROR) && (mState != DEINIT)) {
6172 if (!isValidTimeout) {
6173 LOGD("Blocking on conditional wait");
6174 pthread_cond_wait(&mRequestCond, &mMutex);
6175 }
6176 else {
6177 LOGD("Blocking on timed conditional wait");
6178 rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
6179 if (rc == ETIMEDOUT) {
6180 rc = -ENODEV;
6181 LOGE("Unblocked on timeout!!!!");
6182 break;
6183 }
6184 }
6185 LOGD("Unblocked");
6186 if (mWokenUpByDaemon) {
6187 mWokenUpByDaemon = false;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006188 if (mPendingLiveRequest < mMaxInFlightRequests)
Thierry Strudel3d639192016-09-09 11:52:26 -07006189 break;
6190 }
6191 }
6192 pthread_mutex_unlock(&mMutex);
6193
6194 return rc;
6195}
6196
Chien-Yu Chen153c5172017-09-08 11:33:19 -07006197int32_t QCamera3HardwareInterface::startChannelLocked()
6198{
6199 // Configure modules for stream on.
6200 int32_t rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
6201 mChannelHandle, /*start_sensor_streaming*/false);
6202 if (rc != NO_ERROR) {
6203 LOGE("start_channel failed %d", rc);
6204 return rc;
6205 }
6206
6207 {
6208 // Configure Easel for stream on.
6209 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
6210
6211 // Now that sensor mode should have been selected, get the selected sensor mode
6212 // info.
6213 memset(&mSensorModeInfo, 0, sizeof(mSensorModeInfo));
6214 getCurrentSensorModeInfo(mSensorModeInfo);
6215
6216 if (EaselManagerClientOpened) {
6217 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI");
6218 rc = gEaselManagerClient->startMipi(mCameraId, mSensorModeInfo.op_pixel_clk,
6219 /*enableCapture*/true);
6220 if (rc != OK) {
6221 ALOGE("%s: Failed to start MIPI rate for camera %u to %u", __FUNCTION__,
6222 mCameraId, mSensorModeInfo.op_pixel_clk);
6223 return rc;
6224 }
6225 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI done");
6226 mEaselMipiStarted = true;
6227 }
6228 }
6229
6230 // Start sensor streaming.
6231 rc = mCameraHandle->ops->start_sensor_streaming(mCameraHandle->camera_handle,
6232 mChannelHandle);
6233 if (rc != NO_ERROR) {
6234 LOGE("start_sensor_stream_on failed %d", rc);
6235 return rc;
6236 }
6237
6238 return 0;
6239}
6240
6241void QCamera3HardwareInterface::stopChannelLocked(bool stopChannelImmediately)
6242{
6243 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
6244 mChannelHandle, stopChannelImmediately);
6245
6246 {
6247 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
6248 if (EaselManagerClientOpened && mEaselMipiStarted) {
6249 int32_t rc = gEaselManagerClient->stopMipi(mCameraId);
6250 if (rc != 0) {
6251 ALOGE("%s: Stopping MIPI failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
6252 }
6253 mEaselMipiStarted = false;
6254 }
6255 }
6256}
6257
Thierry Strudel3d639192016-09-09 11:52:26 -07006258/*===========================================================================
6259 * FUNCTION : dump
6260 *
6261 * DESCRIPTION:
6262 *
6263 * PARAMETERS :
6264 *
6265 *
6266 * RETURN :
6267 *==========================================================================*/
6268void QCamera3HardwareInterface::dump(int fd)
6269{
6270 pthread_mutex_lock(&mMutex);
6271 dprintf(fd, "\n Camera HAL3 information Begin \n");
6272
6273 dprintf(fd, "\nNumber of pending requests: %zu \n",
6274 mPendingRequestsList.size());
6275 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6276 dprintf(fd, " Frame | Number of Buffers | Req Id: | Blob Req | Input buffer present\n");
6277 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6278 for(pendingRequestIterator i = mPendingRequestsList.begin();
6279 i != mPendingRequestsList.end(); i++) {
6280 dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
6281 i->frame_number, i->num_buffers, i->request_id, i->blob_request,
6282 i->input_buffer);
6283 }
6284 dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
6285 mPendingBuffersMap.get_num_overall_buffers());
6286 dprintf(fd, "-------+------------------\n");
6287 dprintf(fd, " Frame | Stream type mask \n");
6288 dprintf(fd, "-------+------------------\n");
6289 for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
6290 for(auto &j : req.mPendingBufferList) {
6291 QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
6292 dprintf(fd, " %5d | %11d \n",
6293 req.frame_number, channel->getStreamTypeMask());
6294 }
6295 }
6296 dprintf(fd, "-------+------------------\n");
6297
6298 dprintf(fd, "\nPending frame drop list: %zu\n",
6299 mPendingFrameDropList.size());
6300 dprintf(fd, "-------+-----------\n");
6301 dprintf(fd, " Frame | Stream ID \n");
6302 dprintf(fd, "-------+-----------\n");
6303 for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
6304 i != mPendingFrameDropList.end(); i++) {
6305 dprintf(fd, " %5d | %9d \n",
6306 i->frame_number, i->stream_ID);
6307 }
6308 dprintf(fd, "-------+-----------\n");
6309
6310 dprintf(fd, "\n Camera HAL3 information End \n");
6311
6312 /* use dumpsys media.camera as trigger to send update debug level event */
6313 mUpdateDebugLevel = true;
6314 pthread_mutex_unlock(&mMutex);
6315 return;
6316}
6317
6318/*===========================================================================
6319 * FUNCTION : flush
6320 *
6321 * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
6322 * conditionally restarts channels
6323 *
6324 * PARAMETERS :
6325 * @ restartChannels: re-start all channels
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006326 * @ stopChannelImmediately: stop the channel immediately. This should be used
6327 * when device encountered an error and MIPI may has
6328 * been stopped.
Thierry Strudel3d639192016-09-09 11:52:26 -07006329 *
6330 * RETURN :
6331 * 0 on success
6332 * Error code on failure
6333 *==========================================================================*/
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006334int QCamera3HardwareInterface::flush(bool restartChannels, bool stopChannelImmediately)
Thierry Strudel3d639192016-09-09 11:52:26 -07006335{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006336 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006337 int32_t rc = NO_ERROR;
6338
6339 LOGD("Unblocking Process Capture Request");
6340 pthread_mutex_lock(&mMutex);
6341 mFlush = true;
6342 pthread_mutex_unlock(&mMutex);
6343
Chien-Yu Chen11c8edc2017-09-11 20:54:24 -07006344 // Disable HDR+ if it's enabled;
6345 {
6346 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
6347 finishHdrPlusClientOpeningLocked(l);
6348 disableHdrPlusModeLocked();
6349 }
6350
Thierry Strudel3d639192016-09-09 11:52:26 -07006351 rc = stopAllChannels();
6352 // unlink of dualcam
6353 if (mIsDeviceLinked) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006354 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
6355 &m_pDualCamCmdPtr->bundle_info;
6356 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07006357 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
6358 pthread_mutex_lock(&gCamLock);
6359
6360 if (mIsMainCamera == 1) {
6361 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
6362 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006363 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006364 // related session id should be session id of linked session
6365 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6366 } else {
6367 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
6368 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006369 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006370 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6371 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006372 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07006373 pthread_mutex_unlock(&gCamLock);
6374
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006375 rc = mCameraHandle->ops->set_dual_cam_cmd(
6376 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07006377 if (rc < 0) {
6378 LOGE("Dualcam: Unlink failed, but still proceed to close");
6379 }
6380 }
6381
6382 if (rc < 0) {
6383 LOGE("stopAllChannels failed");
6384 return rc;
6385 }
6386 if (mChannelHandle) {
Chien-Yu Chen153c5172017-09-08 11:33:19 -07006387 stopChannelLocked(stopChannelImmediately);
Thierry Strudel3d639192016-09-09 11:52:26 -07006388 }
6389
6390 // Reset bundle info
6391 rc = setBundleInfo();
6392 if (rc < 0) {
6393 LOGE("setBundleInfo failed %d", rc);
6394 return rc;
6395 }
6396
6397 // Mutex Lock
6398 pthread_mutex_lock(&mMutex);
6399
6400 // Unblock process_capture_request
6401 mPendingLiveRequest = 0;
6402 pthread_cond_signal(&mRequestCond);
6403
6404 rc = notifyErrorForPendingRequests();
6405 if (rc < 0) {
6406 LOGE("notifyErrorForPendingRequests failed");
6407 pthread_mutex_unlock(&mMutex);
6408 return rc;
6409 }
6410
6411 mFlush = false;
6412
6413 // Start the Streams/Channels
6414 if (restartChannels) {
6415 rc = startAllChannels();
6416 if (rc < 0) {
6417 LOGE("startAllChannels failed");
6418 pthread_mutex_unlock(&mMutex);
6419 return rc;
6420 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006421 if (mChannelHandle) {
Chien-Yu Chen153c5172017-09-08 11:33:19 -07006422 // Configure modules for stream on.
6423 rc = startChannelLocked();
Thierry Strudel2896d122017-02-23 19:18:03 -08006424 if (rc < 0) {
Chien-Yu Chen153c5172017-09-08 11:33:19 -07006425 LOGE("startChannelLocked failed");
Thierry Strudel2896d122017-02-23 19:18:03 -08006426 pthread_mutex_unlock(&mMutex);
6427 return rc;
6428 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006429 }
6430 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006431 pthread_mutex_unlock(&mMutex);
6432
6433 return 0;
6434}
6435
6436/*===========================================================================
6437 * FUNCTION : flushPerf
6438 *
6439 * DESCRIPTION: This is the performance optimization version of flush that does
6440 * not use stream off, rather flushes the system
6441 *
6442 * PARAMETERS :
6443 *
6444 *
6445 * RETURN : 0 : success
6446 * -EINVAL: input is malformed (device is not valid)
6447 * -ENODEV: if the device has encountered a serious error
6448 *==========================================================================*/
6449int QCamera3HardwareInterface::flushPerf()
6450{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006451 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006452 int32_t rc = 0;
6453 struct timespec timeout;
6454 bool timed_wait = false;
6455
6456 pthread_mutex_lock(&mMutex);
6457 mFlushPerf = true;
6458 mPendingBuffersMap.numPendingBufsAtFlush =
6459 mPendingBuffersMap.get_num_overall_buffers();
6460 LOGD("Calling flush. Wait for %d buffers to return",
6461 mPendingBuffersMap.numPendingBufsAtFlush);
6462
6463 /* send the flush event to the backend */
6464 rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
6465 if (rc < 0) {
6466 LOGE("Error in flush: IOCTL failure");
6467 mFlushPerf = false;
6468 pthread_mutex_unlock(&mMutex);
6469 return -ENODEV;
6470 }
6471
6472 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
6473 LOGD("No pending buffers in HAL, return flush");
6474 mFlushPerf = false;
6475 pthread_mutex_unlock(&mMutex);
6476 return rc;
6477 }
6478
6479 /* wait on a signal that buffers were received */
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006480 rc = clock_gettime(CLOCK_MONOTONIC, &timeout);
Thierry Strudel3d639192016-09-09 11:52:26 -07006481 if (rc < 0) {
6482 LOGE("Error reading the real time clock, cannot use timed wait");
6483 } else {
6484 timeout.tv_sec += FLUSH_TIMEOUT;
6485 timed_wait = true;
6486 }
6487
6488 //Block on conditional variable
6489 while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
6490 LOGD("Waiting on mBuffersCond");
6491 if (!timed_wait) {
6492 rc = pthread_cond_wait(&mBuffersCond, &mMutex);
6493 if (rc != 0) {
6494 LOGE("pthread_cond_wait failed due to rc = %s",
6495 strerror(rc));
6496 break;
6497 }
6498 } else {
6499 rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
6500 if (rc != 0) {
6501 LOGE("pthread_cond_timedwait failed due to rc = %s",
6502 strerror(rc));
6503 break;
6504 }
6505 }
6506 }
6507 if (rc != 0) {
6508 mFlushPerf = false;
6509 pthread_mutex_unlock(&mMutex);
6510 return -ENODEV;
6511 }
6512
6513 LOGD("Received buffers, now safe to return them");
6514
6515 //make sure the channels handle flush
6516 //currently only required for the picture channel to release snapshot resources
6517 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6518 it != mStreamInfo.end(); it++) {
6519 QCamera3Channel *channel = (*it)->channel;
6520 if (channel) {
6521 rc = channel->flush();
6522 if (rc) {
6523 LOGE("Flushing the channels failed with error %d", rc);
6524 // even though the channel flush failed we need to continue and
6525 // return the buffers we have to the framework, however the return
6526 // value will be an error
6527 rc = -ENODEV;
6528 }
6529 }
6530 }
6531
6532 /* notify the frameworks and send errored results */
6533 rc = notifyErrorForPendingRequests();
6534 if (rc < 0) {
6535 LOGE("notifyErrorForPendingRequests failed");
6536 pthread_mutex_unlock(&mMutex);
6537 return rc;
6538 }
6539
6540 //unblock process_capture_request
6541 mPendingLiveRequest = 0;
6542 unblockRequestIfNecessary();
6543
6544 mFlushPerf = false;
6545 pthread_mutex_unlock(&mMutex);
6546 LOGD ("Flush Operation complete. rc = %d", rc);
6547 return rc;
6548}
6549
6550/*===========================================================================
6551 * FUNCTION : handleCameraDeviceError
6552 *
6553 * DESCRIPTION: This function calls internal flush and notifies the error to
6554 * framework and updates the state variable.
6555 *
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006556 * PARAMETERS :
6557 * @stopChannelImmediately : stop channels immediately without waiting for
6558 * frame boundary.
Thierry Strudel3d639192016-09-09 11:52:26 -07006559 *
6560 * RETURN : NO_ERROR on Success
6561 * Error code on failure
6562 *==========================================================================*/
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006563int32_t QCamera3HardwareInterface::handleCameraDeviceError(bool stopChannelImmediately)
Thierry Strudel3d639192016-09-09 11:52:26 -07006564{
6565 int32_t rc = NO_ERROR;
6566
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006567 {
6568 Mutex::Autolock lock(mFlushLock);
6569 pthread_mutex_lock(&mMutex);
6570 if (mState != ERROR) {
6571 //if mState != ERROR, nothing to be done
6572 pthread_mutex_unlock(&mMutex);
6573 return NO_ERROR;
6574 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006575 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07006576
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006577 rc = flush(false /* restart channels */, stopChannelImmediately);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006578 if (NO_ERROR != rc) {
6579 LOGE("internal flush to handle mState = ERROR failed");
6580 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006581
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006582 pthread_mutex_lock(&mMutex);
6583 mState = DEINIT;
6584 pthread_mutex_unlock(&mMutex);
6585 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006586
6587 camera3_notify_msg_t notify_msg;
6588 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
6589 notify_msg.type = CAMERA3_MSG_ERROR;
6590 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
6591 notify_msg.message.error.error_stream = NULL;
6592 notify_msg.message.error.frame_number = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006593 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07006594
6595 return rc;
6596}
6597
6598/*===========================================================================
6599 * FUNCTION : captureResultCb
6600 *
6601 * DESCRIPTION: Callback handler for all capture result
6602 * (streams, as well as metadata)
6603 *
6604 * PARAMETERS :
6605 * @metadata : metadata information
6606 * @buffer : actual gralloc buffer to be returned to frameworks.
6607 * NULL if metadata.
6608 *
6609 * RETURN : NONE
6610 *==========================================================================*/
6611void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
6612 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
6613{
6614 if (metadata_buf) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006615 pthread_mutex_lock(&mMutex);
6616 uint8_t batchSize = mBatchSize;
6617 pthread_mutex_unlock(&mMutex);
6618 if (batchSize) {
Thierry Strudel3d639192016-09-09 11:52:26 -07006619 handleBatchMetadata(metadata_buf,
6620 true /* free_and_bufdone_meta_buf */);
6621 } else { /* mBatchSize = 0 */
6622 hdrPlusPerfLock(metadata_buf);
6623 pthread_mutex_lock(&mMutex);
6624 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006625 true /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006626 true /* last urgent frame of batch metadata */,
6627 true /* last frame of batch metadata */,
Thierry Strudel54dc9782017-02-15 12:12:10 -08006628 NULL);
Thierry Strudel3d639192016-09-09 11:52:26 -07006629 pthread_mutex_unlock(&mMutex);
6630 }
6631 } else if (isInputBuffer) {
6632 pthread_mutex_lock(&mMutex);
6633 handleInputBufferWithLock(frame_number);
6634 pthread_mutex_unlock(&mMutex);
6635 } else {
6636 pthread_mutex_lock(&mMutex);
6637 handleBufferWithLock(buffer, frame_number);
6638 pthread_mutex_unlock(&mMutex);
6639 }
6640 return;
6641}
6642
6643/*===========================================================================
6644 * FUNCTION : getReprocessibleOutputStreamId
6645 *
6646 * DESCRIPTION: Get source output stream id for the input reprocess stream
6647 * based on size and format, which would be the largest
6648 * output stream if an input stream exists.
6649 *
6650 * PARAMETERS :
6651 * @id : return the stream id if found
6652 *
6653 * RETURN : int32_t type of status
6654 * NO_ERROR -- success
6655 * none-zero failure code
6656 *==========================================================================*/
6657int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
6658{
6659 /* check if any output or bidirectional stream with the same size and format
6660 and return that stream */
6661 if ((mInputStreamInfo.dim.width > 0) &&
6662 (mInputStreamInfo.dim.height > 0)) {
6663 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6664 it != mStreamInfo.end(); it++) {
6665
6666 camera3_stream_t *stream = (*it)->stream;
6667 if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
6668 (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
6669 (stream->format == mInputStreamInfo.format)) {
6670 // Usage flag for an input stream and the source output stream
6671 // may be different.
6672 LOGD("Found reprocessible output stream! %p", *it);
6673 LOGD("input stream usage 0x%x, current stream usage 0x%x",
6674 stream->usage, mInputStreamInfo.usage);
6675
6676 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
6677 if (channel != NULL && channel->mStreams[0]) {
6678 id = channel->mStreams[0]->getMyServerID();
6679 return NO_ERROR;
6680 }
6681 }
6682 }
6683 } else {
6684 LOGD("No input stream, so no reprocessible output stream");
6685 }
6686 return NAME_NOT_FOUND;
6687}
6688
6689/*===========================================================================
6690 * FUNCTION : lookupFwkName
6691 *
6692 * DESCRIPTION: In case the enum is not same in fwk and backend
6693 * make sure the parameter is correctly propogated
6694 *
6695 * PARAMETERS :
6696 * @arr : map between the two enums
6697 * @len : len of the map
6698 * @hal_name : name of the hal_parm to map
6699 *
6700 * RETURN : int type of status
6701 * fwk_name -- success
6702 * none-zero failure code
6703 *==========================================================================*/
6704template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
6705 size_t len, halType hal_name)
6706{
6707
6708 for (size_t i = 0; i < len; i++) {
6709 if (arr[i].hal_name == hal_name) {
6710 return arr[i].fwk_name;
6711 }
6712 }
6713
6714 /* Not able to find matching framework type is not necessarily
6715 * an error case. This happens when mm-camera supports more attributes
6716 * than the frameworks do */
6717 LOGH("Cannot find matching framework type");
6718 return NAME_NOT_FOUND;
6719}
6720
6721/*===========================================================================
6722 * FUNCTION : lookupHalName
6723 *
6724 * DESCRIPTION: In case the enum is not same in fwk and backend
6725 * make sure the parameter is correctly propogated
6726 *
6727 * PARAMETERS :
6728 * @arr : map between the two enums
6729 * @len : len of the map
6730 * @fwk_name : name of the hal_parm to map
6731 *
6732 * RETURN : int32_t type of status
6733 * hal_name -- success
6734 * none-zero failure code
6735 *==========================================================================*/
6736template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
6737 size_t len, fwkType fwk_name)
6738{
6739 for (size_t i = 0; i < len; i++) {
6740 if (arr[i].fwk_name == fwk_name) {
6741 return arr[i].hal_name;
6742 }
6743 }
6744
6745 LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
6746 return NAME_NOT_FOUND;
6747}
6748
6749/*===========================================================================
6750 * FUNCTION : lookupProp
6751 *
6752 * DESCRIPTION: lookup a value by its name
6753 *
6754 * PARAMETERS :
6755 * @arr : map between the two enums
6756 * @len : size of the map
6757 * @name : name to be looked up
6758 *
6759 * RETURN : Value if found
6760 * CAM_CDS_MODE_MAX if not found
6761 *==========================================================================*/
6762template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
6763 size_t len, const char *name)
6764{
6765 if (name) {
6766 for (size_t i = 0; i < len; i++) {
6767 if (!strcmp(arr[i].desc, name)) {
6768 return arr[i].val;
6769 }
6770 }
6771 }
6772 return CAM_CDS_MODE_MAX;
6773}
6774
6775/*===========================================================================
6776 *
6777 * DESCRIPTION:
6778 *
6779 * PARAMETERS :
6780 * @metadata : metadata information from callback
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006781 * @pendingRequest: pending request for this metadata
Thierry Strudel3d639192016-09-09 11:52:26 -07006782 * @pprocDone: whether internal offline postprocsesing is done
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006783 * @lastMetadataInBatch: Boolean to indicate whether this is the last metadata
6784 * in a batch. Always true for non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07006785 *
6786 * RETURN : camera_metadata_t*
6787 * metadata in a format specified by fwk
6788 *==========================================================================*/
6789camera_metadata_t*
6790QCamera3HardwareInterface::translateFromHalMetadata(
6791 metadata_buffer_t *metadata,
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006792 const PendingRequestInfo& pendingRequest,
Thierry Strudel3d639192016-09-09 11:52:26 -07006793 bool pprocDone,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07006794 bool lastMetadataInBatch,
6795 const bool *enableZsl)
Thierry Strudel3d639192016-09-09 11:52:26 -07006796{
6797 CameraMetadata camMetadata;
6798 camera_metadata_t *resultMetadata;
6799
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006800 if (!lastMetadataInBatch) {
Shuzhen Wang8fe62792017-03-20 16:10:25 -07006801 /* In batch mode, only populate SENSOR_TIMESTAMP if this is not the last in batch.
6802 * Timestamp is needed because it's used for shutter notify calculation.
6803 * */
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006804 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &pendingRequest.timestamp, 1);
Shuzhen Wang8fe62792017-03-20 16:10:25 -07006805 resultMetadata = camMetadata.release();
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006806 return resultMetadata;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006807 }
6808
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006809 if (pendingRequest.jpegMetadata.entryCount())
6810 camMetadata.append(pendingRequest.jpegMetadata);
Thierry Strudel3d639192016-09-09 11:52:26 -07006811
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006812 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &pendingRequest.timestamp, 1);
6813 camMetadata.update(ANDROID_REQUEST_ID, &pendingRequest.request_id, 1);
6814 camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pendingRequest.pipeline_depth, 1);
6815 camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &pendingRequest.capture_intent, 1);
6816 camMetadata.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &pendingRequest.hybrid_ae_enable, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08006817 if (mBatchSize == 0) {
6818 // DevCamDebug metadata translateFromHalMetadata. Only update this one for non-HFR mode
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006819 camMetadata.update(DEVCAMDEBUG_META_ENABLE, &pendingRequest.DevCamDebug_meta_enable, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08006820 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006821
Samuel Ha68ba5172016-12-15 18:41:12 -08006822 // atrace_begin(ATRACE_TAG_ALWAYS, "DevCamDebugInfo");
6823 // Only update DevCameraDebug metadta conditionally: non-HFR mode and it is enabled.
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006824 if (mBatchSize == 0 && pendingRequest.DevCamDebug_meta_enable != 0) {
Samuel Ha68ba5172016-12-15 18:41:12 -08006825 // DevCamDebug metadata translateFromHalMetadata AF
6826 IF_META_AVAILABLE(int32_t, DevCamDebug_af_lens_position,
6827 CAM_INTF_META_DEV_CAM_AF_LENS_POSITION, metadata) {
6828 int32_t fwk_DevCamDebug_af_lens_position = *DevCamDebug_af_lens_position;
6829 camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
6830 }
6831 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_confidence,
Shuzhen Wang3569d4a2017-09-04 19:10:28 -07006832 CAM_INTF_META_AF_TOF_CONFIDENCE, metadata) {
Samuel Ha68ba5172016-12-15 18:41:12 -08006833 int32_t fwk_DevCamDebug_af_tof_confidence = *DevCamDebug_af_tof_confidence;
6834 camMetadata.update(DEVCAMDEBUG_AF_TOF_CONFIDENCE, &fwk_DevCamDebug_af_tof_confidence, 1);
6835 }
6836 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_distance,
Shuzhen Wang3569d4a2017-09-04 19:10:28 -07006837 CAM_INTF_META_AF_TOF_DISTANCE, metadata) {
Samuel Ha68ba5172016-12-15 18:41:12 -08006838 int32_t fwk_DevCamDebug_af_tof_distance = *DevCamDebug_af_tof_distance;
6839 camMetadata.update(DEVCAMDEBUG_AF_TOF_DISTANCE, &fwk_DevCamDebug_af_tof_distance, 1);
6840 }
6841 IF_META_AVAILABLE(int32_t, DevCamDebug_af_luma,
6842 CAM_INTF_META_DEV_CAM_AF_LUMA, metadata) {
6843 int32_t fwk_DevCamDebug_af_luma = *DevCamDebug_af_luma;
6844 camMetadata.update(DEVCAMDEBUG_AF_LUMA, &fwk_DevCamDebug_af_luma, 1);
6845 }
6846 IF_META_AVAILABLE(int32_t, DevCamDebug_af_haf_state,
6847 CAM_INTF_META_DEV_CAM_AF_HAF_STATE, metadata) {
6848 int32_t fwk_DevCamDebug_af_haf_state = *DevCamDebug_af_haf_state;
6849 camMetadata.update(DEVCAMDEBUG_AF_HAF_STATE, &fwk_DevCamDebug_af_haf_state, 1);
6850 }
6851 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_target_pos,
6852 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS, metadata) {
6853 int32_t fwk_DevCamDebug_af_monitor_pdaf_target_pos =
6854 *DevCamDebug_af_monitor_pdaf_target_pos;
6855 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
6856 &fwk_DevCamDebug_af_monitor_pdaf_target_pos, 1);
6857 }
6858 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_confidence,
6859 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_CONFIDENCE, metadata) {
6860 int32_t fwk_DevCamDebug_af_monitor_pdaf_confidence =
6861 *DevCamDebug_af_monitor_pdaf_confidence;
6862 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
6863 &fwk_DevCamDebug_af_monitor_pdaf_confidence, 1);
6864 }
6865 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_refocus,
6866 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_REFOCUS, metadata) {
6867 int32_t fwk_DevCamDebug_af_monitor_pdaf_refocus = *DevCamDebug_af_monitor_pdaf_refocus;
6868 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
6869 &fwk_DevCamDebug_af_monitor_pdaf_refocus, 1);
6870 }
6871 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_target_pos,
6872 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_TARGET_POS, metadata) {
6873 int32_t fwk_DevCamDebug_af_monitor_tof_target_pos =
6874 *DevCamDebug_af_monitor_tof_target_pos;
6875 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
6876 &fwk_DevCamDebug_af_monitor_tof_target_pos, 1);
6877 }
6878 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_confidence,
6879 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_CONFIDENCE, metadata) {
6880 int32_t fwk_DevCamDebug_af_monitor_tof_confidence =
6881 *DevCamDebug_af_monitor_tof_confidence;
6882 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
6883 &fwk_DevCamDebug_af_monitor_tof_confidence, 1);
6884 }
6885 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_refocus,
6886 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_REFOCUS, metadata) {
6887 int32_t fwk_DevCamDebug_af_monitor_tof_refocus = *DevCamDebug_af_monitor_tof_refocus;
6888 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
6889 &fwk_DevCamDebug_af_monitor_tof_refocus, 1);
6890 }
6891 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_type_select,
6892 CAM_INTF_META_DEV_CAM_AF_MONITOR_TYPE_SELECT, metadata) {
6893 int32_t fwk_DevCamDebug_af_monitor_type_select = *DevCamDebug_af_monitor_type_select;
6894 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
6895 &fwk_DevCamDebug_af_monitor_type_select, 1);
6896 }
6897 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_refocus,
6898 CAM_INTF_META_DEV_CAM_AF_MONITOR_REFOCUS, metadata) {
6899 int32_t fwk_DevCamDebug_af_monitor_refocus = *DevCamDebug_af_monitor_refocus;
6900 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_REFOCUS,
6901 &fwk_DevCamDebug_af_monitor_refocus, 1);
6902 }
6903 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_target_pos,
6904 CAM_INTF_META_DEV_CAM_AF_MONITOR_TARGET_POS, metadata) {
6905 int32_t fwk_DevCamDebug_af_monitor_target_pos = *DevCamDebug_af_monitor_target_pos;
6906 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
6907 &fwk_DevCamDebug_af_monitor_target_pos, 1);
6908 }
6909 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_target_pos,
6910 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_TARGET_POS, metadata) {
6911 int32_t fwk_DevCamDebug_af_search_pdaf_target_pos =
6912 *DevCamDebug_af_search_pdaf_target_pos;
6913 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
6914 &fwk_DevCamDebug_af_search_pdaf_target_pos, 1);
6915 }
6916 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_next_pos,
6917 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEXT_POS, metadata) {
6918 int32_t fwk_DevCamDebug_af_search_pdaf_next_pos = *DevCamDebug_af_search_pdaf_next_pos;
6919 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
6920 &fwk_DevCamDebug_af_search_pdaf_next_pos, 1);
6921 }
6922 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_near_pos,
6923 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEAR_POS, metadata) {
6924 int32_t fwk_DevCamDebug_af_search_pdaf_near_pos = *DevCamDebug_af_search_pdaf_near_pos;
6925 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
6926 &fwk_DevCamDebug_af_search_pdaf_near_pos, 1);
6927 }
6928 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_far_pos,
6929 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_FAR_POS, metadata) {
6930 int32_t fwk_DevCamDebug_af_search_pdaf_far_pos = *DevCamDebug_af_search_pdaf_far_pos;
6931 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
6932 &fwk_DevCamDebug_af_search_pdaf_far_pos, 1);
6933 }
6934 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_confidence,
6935 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_CONFIDENCE, metadata) {
6936 int32_t fwk_DevCamDebug_af_search_pdaf_confidence = *DevCamDebug_af_search_pdaf_confidence;
6937 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
6938 &fwk_DevCamDebug_af_search_pdaf_confidence, 1);
6939 }
6940 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_target_pos,
6941 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_TARGET_POS, metadata) {
6942 int32_t fwk_DevCamDebug_af_search_tof_target_pos =
6943 *DevCamDebug_af_search_tof_target_pos;
6944 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
6945 &fwk_DevCamDebug_af_search_tof_target_pos, 1);
6946 }
6947 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_next_pos,
6948 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEXT_POS, metadata) {
6949 int32_t fwk_DevCamDebug_af_search_tof_next_pos = *DevCamDebug_af_search_tof_next_pos;
6950 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
6951 &fwk_DevCamDebug_af_search_tof_next_pos, 1);
6952 }
6953 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_near_pos,
6954 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEAR_POS, metadata) {
6955 int32_t fwk_DevCamDebug_af_search_tof_near_pos = *DevCamDebug_af_search_tof_near_pos;
6956 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
6957 &fwk_DevCamDebug_af_search_tof_near_pos, 1);
6958 }
6959 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_far_pos,
6960 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_FAR_POS, metadata) {
6961 int32_t fwk_DevCamDebug_af_search_tof_far_pos = *DevCamDebug_af_search_tof_far_pos;
6962 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
6963 &fwk_DevCamDebug_af_search_tof_far_pos, 1);
6964 }
6965 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_confidence,
6966 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_CONFIDENCE, metadata) {
6967 int32_t fwk_DevCamDebug_af_search_tof_confidence = *DevCamDebug_af_search_tof_confidence;
6968 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
6969 &fwk_DevCamDebug_af_search_tof_confidence, 1);
6970 }
6971 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_type_select,
6972 CAM_INTF_META_DEV_CAM_AF_SEARCH_TYPE_SELECT, metadata) {
6973 int32_t fwk_DevCamDebug_af_search_type_select = *DevCamDebug_af_search_type_select;
6974 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
6975 &fwk_DevCamDebug_af_search_type_select, 1);
6976 }
6977 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_next_pos,
6978 CAM_INTF_META_DEV_CAM_AF_SEARCH_NEXT_POS, metadata) {
6979 int32_t fwk_DevCamDebug_af_search_next_pos = *DevCamDebug_af_search_next_pos;
6980 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
6981 &fwk_DevCamDebug_af_search_next_pos, 1);
6982 }
6983 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_target_pos,
6984 CAM_INTF_META_DEV_CAM_AF_SEARCH_TARGET_POS, metadata) {
6985 int32_t fwk_DevCamDebug_af_search_target_pos = *DevCamDebug_af_search_target_pos;
6986 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
6987 &fwk_DevCamDebug_af_search_target_pos, 1);
6988 }
6989 // DevCamDebug metadata translateFromHalMetadata AEC
6990 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_target_luma,
6991 CAM_INTF_META_DEV_CAM_AEC_TARGET_LUMA, metadata) {
6992 int32_t fwk_DevCamDebug_aec_target_luma = *DevCamDebug_aec_target_luma;
6993 camMetadata.update(DEVCAMDEBUG_AEC_TARGET_LUMA, &fwk_DevCamDebug_aec_target_luma, 1);
6994 }
6995 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_comp_luma,
6996 CAM_INTF_META_DEV_CAM_AEC_COMP_LUMA, metadata) {
6997 int32_t fwk_DevCamDebug_aec_comp_luma = *DevCamDebug_aec_comp_luma;
6998 camMetadata.update(DEVCAMDEBUG_AEC_COMP_LUMA, &fwk_DevCamDebug_aec_comp_luma, 1);
6999 }
7000 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_avg_luma,
7001 CAM_INTF_META_DEV_CAM_AEC_AVG_LUMA, metadata) {
7002 int32_t fwk_DevCamDebug_aec_avg_luma = *DevCamDebug_aec_avg_luma;
7003 camMetadata.update(DEVCAMDEBUG_AEC_AVG_LUMA, &fwk_DevCamDebug_aec_avg_luma, 1);
7004 }
7005 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_cur_luma,
7006 CAM_INTF_META_DEV_CAM_AEC_CUR_LUMA, metadata) {
7007 int32_t fwk_DevCamDebug_aec_cur_luma = *DevCamDebug_aec_cur_luma;
7008 camMetadata.update(DEVCAMDEBUG_AEC_CUR_LUMA, &fwk_DevCamDebug_aec_cur_luma, 1);
7009 }
7010 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_linecount,
7011 CAM_INTF_META_DEV_CAM_AEC_LINECOUNT, metadata) {
7012 int32_t fwk_DevCamDebug_aec_linecount = *DevCamDebug_aec_linecount;
7013 camMetadata.update(DEVCAMDEBUG_AEC_LINECOUNT, &fwk_DevCamDebug_aec_linecount, 1);
7014 }
7015 IF_META_AVAILABLE(float, DevCamDebug_aec_real_gain,
7016 CAM_INTF_META_DEV_CAM_AEC_REAL_GAIN, metadata) {
7017 float fwk_DevCamDebug_aec_real_gain = *DevCamDebug_aec_real_gain;
7018 camMetadata.update(DEVCAMDEBUG_AEC_REAL_GAIN, &fwk_DevCamDebug_aec_real_gain, 1);
7019 }
7020 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_exp_index,
7021 CAM_INTF_META_DEV_CAM_AEC_EXP_INDEX, metadata) {
7022 int32_t fwk_DevCamDebug_aec_exp_index = *DevCamDebug_aec_exp_index;
7023 camMetadata.update(DEVCAMDEBUG_AEC_EXP_INDEX, &fwk_DevCamDebug_aec_exp_index, 1);
7024 }
7025 IF_META_AVAILABLE(float, DevCamDebug_aec_lux_idx,
7026 CAM_INTF_META_DEV_CAM_AEC_LUX_IDX, metadata) {
7027 float fwk_DevCamDebug_aec_lux_idx = *DevCamDebug_aec_lux_idx;
7028 camMetadata.update(DEVCAMDEBUG_AEC_LUX_IDX, &fwk_DevCamDebug_aec_lux_idx, 1);
7029 }
Samuel Ha34229982017-02-17 13:51:11 -08007030 // DevCamDebug metadata translateFromHalMetadata zzHDR
7031 IF_META_AVAILABLE(float, DevCamDebug_aec_l_real_gain,
7032 CAM_INTF_META_DEV_CAM_AEC_L_REAL_GAIN, metadata) {
7033 float fwk_DevCamDebug_aec_l_real_gain = *DevCamDebug_aec_l_real_gain;
7034 camMetadata.update(DEVCAMDEBUG_AEC_L_REAL_GAIN, &fwk_DevCamDebug_aec_l_real_gain, 1);
7035 }
7036 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_l_linecount,
7037 CAM_INTF_META_DEV_CAM_AEC_L_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07007038 int32_t fwk_DevCamDebug_aec_l_linecount = *DevCamDebug_aec_l_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08007039 camMetadata.update(DEVCAMDEBUG_AEC_L_LINECOUNT, &fwk_DevCamDebug_aec_l_linecount, 1);
7040 }
7041 IF_META_AVAILABLE(float, DevCamDebug_aec_s_real_gain,
7042 CAM_INTF_META_DEV_CAM_AEC_S_REAL_GAIN, metadata) {
7043 float fwk_DevCamDebug_aec_s_real_gain = *DevCamDebug_aec_s_real_gain;
7044 camMetadata.update(DEVCAMDEBUG_AEC_S_REAL_GAIN, &fwk_DevCamDebug_aec_s_real_gain, 1);
7045 }
7046 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_s_linecount,
7047 CAM_INTF_META_DEV_CAM_AEC_S_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07007048 int32_t fwk_DevCamDebug_aec_s_linecount = *DevCamDebug_aec_s_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08007049 camMetadata.update(DEVCAMDEBUG_AEC_S_LINECOUNT, &fwk_DevCamDebug_aec_s_linecount, 1);
7050 }
7051 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_sensitivity_ratio,
7052 CAM_INTF_META_DEV_CAM_AEC_HDR_SENSITIVITY_RATIO, metadata) {
7053 float fwk_DevCamDebug_aec_hdr_sensitivity_ratio =
7054 *DevCamDebug_aec_hdr_sensitivity_ratio;
7055 camMetadata.update(DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
7056 &fwk_DevCamDebug_aec_hdr_sensitivity_ratio, 1);
7057 }
7058 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_exp_time_ratio,
7059 CAM_INTF_META_DEV_CAM_AEC_HDR_EXP_TIME_RATIO, metadata) {
7060 float fwk_DevCamDebug_aec_hdr_exp_time_ratio = *DevCamDebug_aec_hdr_exp_time_ratio;
7061 camMetadata.update(DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
7062 &fwk_DevCamDebug_aec_hdr_exp_time_ratio, 1);
7063 }
7064 // DevCamDebug metadata translateFromHalMetadata ADRC
7065 IF_META_AVAILABLE(float, DevCamDebug_aec_total_drc_gain,
7066 CAM_INTF_META_DEV_CAM_AEC_TOTAL_DRC_GAIN, metadata) {
7067 float fwk_DevCamDebug_aec_total_drc_gain = *DevCamDebug_aec_total_drc_gain;
7068 camMetadata.update(DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
7069 &fwk_DevCamDebug_aec_total_drc_gain, 1);
7070 }
7071 IF_META_AVAILABLE(float, DevCamDebug_aec_color_drc_gain,
7072 CAM_INTF_META_DEV_CAM_AEC_COLOR_DRC_GAIN, metadata) {
7073 float fwk_DevCamDebug_aec_color_drc_gain = *DevCamDebug_aec_color_drc_gain;
7074 camMetadata.update(DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
7075 &fwk_DevCamDebug_aec_color_drc_gain, 1);
7076 }
7077 IF_META_AVAILABLE(float, DevCamDebug_aec_gtm_ratio,
7078 CAM_INTF_META_DEV_CAM_AEC_GTM_RATIO, metadata) {
7079 float fwk_DevCamDebug_aec_gtm_ratio = *DevCamDebug_aec_gtm_ratio;
7080 camMetadata.update(DEVCAMDEBUG_AEC_GTM_RATIO, &fwk_DevCamDebug_aec_gtm_ratio, 1);
7081 }
7082 IF_META_AVAILABLE(float, DevCamDebug_aec_ltm_ratio,
7083 CAM_INTF_META_DEV_CAM_AEC_LTM_RATIO, metadata) {
7084 float fwk_DevCamDebug_aec_ltm_ratio = *DevCamDebug_aec_ltm_ratio;
7085 camMetadata.update(DEVCAMDEBUG_AEC_LTM_RATIO, &fwk_DevCamDebug_aec_ltm_ratio, 1);
7086 }
7087 IF_META_AVAILABLE(float, DevCamDebug_aec_la_ratio,
7088 CAM_INTF_META_DEV_CAM_AEC_LA_RATIO, metadata) {
7089 float fwk_DevCamDebug_aec_la_ratio = *DevCamDebug_aec_la_ratio;
7090 camMetadata.update(DEVCAMDEBUG_AEC_LA_RATIO, &fwk_DevCamDebug_aec_la_ratio, 1);
7091 }
7092 IF_META_AVAILABLE(float, DevCamDebug_aec_gamma_ratio,
7093 CAM_INTF_META_DEV_CAM_AEC_GAMMA_RATIO, metadata) {
7094 float fwk_DevCamDebug_aec_gamma_ratio = *DevCamDebug_aec_gamma_ratio;
7095 camMetadata.update(DEVCAMDEBUG_AEC_GAMMA_RATIO, &fwk_DevCamDebug_aec_gamma_ratio, 1);
7096 }
Samuel Habdf4fac2017-07-28 17:21:18 -07007097 // DevCamDebug metadata translateFromHalMetadata AEC MOTION
7098 IF_META_AVAILABLE(float, DevCamDebug_aec_camera_motion_dx,
7099 CAM_INTF_META_DEV_CAM_AEC_CAMERA_MOTION_DX, metadata) {
7100 float fwk_DevCamDebug_aec_camera_motion_dx = *DevCamDebug_aec_camera_motion_dx;
7101 camMetadata.update(DEVCAMDEBUG_AEC_CAMERA_MOTION_DX,
7102 &fwk_DevCamDebug_aec_camera_motion_dx, 1);
7103 }
7104 IF_META_AVAILABLE(float, DevCamDebug_aec_camera_motion_dy,
7105 CAM_INTF_META_DEV_CAM_AEC_CAMERA_MOTION_DY, metadata) {
7106 float fwk_DevCamDebug_aec_camera_motion_dy = *DevCamDebug_aec_camera_motion_dy;
7107 camMetadata.update(DEVCAMDEBUG_AEC_CAMERA_MOTION_DY,
7108 &fwk_DevCamDebug_aec_camera_motion_dy, 1);
7109 }
7110 IF_META_AVAILABLE(float, DevCamDebug_aec_subject_motion,
7111 CAM_INTF_META_DEV_CAM_AEC_SUBJECT_MOTION, metadata) {
7112 float fwk_DevCamDebug_aec_subject_motion = *DevCamDebug_aec_subject_motion;
7113 camMetadata.update(DEVCAMDEBUG_AEC_SUBJECT_MOTION,
7114 &fwk_DevCamDebug_aec_subject_motion, 1);
7115 }
Samuel Ha68ba5172016-12-15 18:41:12 -08007116 // DevCamDebug metadata translateFromHalMetadata AWB
7117 IF_META_AVAILABLE(float, DevCamDebug_awb_r_gain,
7118 CAM_INTF_META_DEV_CAM_AWB_R_GAIN, metadata) {
7119 float fwk_DevCamDebug_awb_r_gain = *DevCamDebug_awb_r_gain;
7120 camMetadata.update(DEVCAMDEBUG_AWB_R_GAIN, &fwk_DevCamDebug_awb_r_gain, 1);
7121 }
7122 IF_META_AVAILABLE(float, DevCamDebug_awb_g_gain,
7123 CAM_INTF_META_DEV_CAM_AWB_G_GAIN, metadata) {
7124 float fwk_DevCamDebug_awb_g_gain = *DevCamDebug_awb_g_gain;
7125 camMetadata.update(DEVCAMDEBUG_AWB_G_GAIN, &fwk_DevCamDebug_awb_g_gain, 1);
7126 }
7127 IF_META_AVAILABLE(float, DevCamDebug_awb_b_gain,
7128 CAM_INTF_META_DEV_CAM_AWB_B_GAIN, metadata) {
7129 float fwk_DevCamDebug_awb_b_gain = *DevCamDebug_awb_b_gain;
7130 camMetadata.update(DEVCAMDEBUG_AWB_B_GAIN, &fwk_DevCamDebug_awb_b_gain, 1);
7131 }
7132 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_cct,
7133 CAM_INTF_META_DEV_CAM_AWB_CCT, metadata) {
7134 int32_t fwk_DevCamDebug_awb_cct = *DevCamDebug_awb_cct;
7135 camMetadata.update(DEVCAMDEBUG_AWB_CCT, &fwk_DevCamDebug_awb_cct, 1);
7136 }
7137 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_decision,
7138 CAM_INTF_META_DEV_CAM_AWB_DECISION, metadata) {
7139 int32_t fwk_DevCamDebug_awb_decision = *DevCamDebug_awb_decision;
7140 camMetadata.update(DEVCAMDEBUG_AWB_DECISION, &fwk_DevCamDebug_awb_decision, 1);
7141 }
7142 }
7143 // atrace_end(ATRACE_TAG_ALWAYS);
7144
Thierry Strudel3d639192016-09-09 11:52:26 -07007145 IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
7146 int64_t fwk_frame_number = *frame_number;
7147 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
7148 }
7149
7150 IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
7151 int32_t fps_range[2];
7152 fps_range[0] = (int32_t)float_range->min_fps;
7153 fps_range[1] = (int32_t)float_range->max_fps;
7154 camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
7155 fps_range, 2);
7156 LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
7157 fps_range[0], fps_range[1]);
7158 }
7159
7160 IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
7161 camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
7162 }
7163
7164 IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7165 int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
7166 METADATA_MAP_SIZE(SCENE_MODES_MAP),
7167 *sceneMode);
7168 if (NAME_NOT_FOUND != val) {
7169 uint8_t fwkSceneMode = (uint8_t)val;
7170 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
7171 LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
7172 fwkSceneMode);
7173 }
7174 }
7175
7176 IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
7177 uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
7178 camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
7179 }
7180
7181 IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
7182 uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
7183 camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
7184 }
7185
7186 IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
7187 uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
7188 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
7189 }
7190
7191 IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
7192 CAM_INTF_META_EDGE_MODE, metadata) {
7193 camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
7194 }
7195
7196 IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
7197 uint8_t fwk_flashPower = (uint8_t) *flashPower;
7198 camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
7199 }
7200
7201 IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
7202 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
7203 }
7204
7205 IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
7206 if (0 <= *flashState) {
7207 uint8_t fwk_flashState = (uint8_t) *flashState;
7208 if (!gCamCapability[mCameraId]->flash_available) {
7209 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
7210 }
7211 camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
7212 }
7213 }
7214
7215 IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
7216 int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
7217 if (NAME_NOT_FOUND != val) {
7218 uint8_t fwk_flashMode = (uint8_t)val;
7219 camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
7220 }
7221 }
7222
7223 IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
7224 uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
7225 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
7226 }
7227
7228 IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
7229 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
7230 }
7231
7232 IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
7233 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
7234 }
7235
7236 IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
7237 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
7238 }
7239
7240 IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
7241 uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
7242 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
7243 }
7244
7245 IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
7246 uint8_t fwk_videoStab = (uint8_t) *videoStab;
7247 LOGD("fwk_videoStab = %d", fwk_videoStab);
7248 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
7249 } else {
7250 // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
7251 // and so hardcoding the Video Stab result to OFF mode.
7252 uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
7253 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007254 LOGD("EIS result default to OFF mode");
Thierry Strudel3d639192016-09-09 11:52:26 -07007255 }
7256
7257 IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
7258 uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
7259 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
7260 }
7261
7262 IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
7263 camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
7264 }
7265
Thierry Strudel3d639192016-09-09 11:52:26 -07007266 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
7267 CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007268 float fwk_blackLevelInd[BLACK_LEVEL_PATTERN_CNT];
Thierry Strudel3d639192016-09-09 11:52:26 -07007269
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007270 adjustBlackLevelForCFA(blackLevelAppliedPattern->cam_black_level, fwk_blackLevelInd,
7271 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07007272
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007273 LOGD("applied dynamicblackLevel in RGGB order = %f %f %f %f",
Thierry Strudel3d639192016-09-09 11:52:26 -07007274 blackLevelAppliedPattern->cam_black_level[0],
7275 blackLevelAppliedPattern->cam_black_level[1],
7276 blackLevelAppliedPattern->cam_black_level[2],
7277 blackLevelAppliedPattern->cam_black_level[3]);
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007278 camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd,
7279 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007280
7281#ifndef USE_HAL_3_3
7282 // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
Jason Lee4f3d96e2017-02-28 19:24:14 +05307283 // Need convert the internal 14 bit depth to sensor 10 bit sensor raw
Zhijun Heb753c672016-06-15 14:50:48 -07007284 // depth space.
Jason Lee4f3d96e2017-02-28 19:24:14 +05307285 fwk_blackLevelInd[0] /= 16.0;
7286 fwk_blackLevelInd[1] /= 16.0;
7287 fwk_blackLevelInd[2] /= 16.0;
7288 fwk_blackLevelInd[3] /= 16.0;
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007289 camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd,
7290 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007291#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007292 }
7293
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007294#ifndef USE_HAL_3_3
7295 // Fixed whitelevel is used by ISP/Sensor
7296 camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
7297 &gCamCapability[mCameraId]->white_level, 1);
7298#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007299
7300 IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
7301 CAM_INTF_META_SCALER_CROP_REGION, metadata) {
7302 int32_t scalerCropRegion[4];
7303 scalerCropRegion[0] = hScalerCropRegion->left;
7304 scalerCropRegion[1] = hScalerCropRegion->top;
7305 scalerCropRegion[2] = hScalerCropRegion->width;
7306 scalerCropRegion[3] = hScalerCropRegion->height;
7307
7308 // Adjust crop region from sensor output coordinate system to active
7309 // array coordinate system.
7310 mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
7311 scalerCropRegion[2], scalerCropRegion[3]);
7312
7313 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
7314 }
7315
7316 IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
7317 LOGD("sensorExpTime = %lld", *sensorExpTime);
7318 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
7319 }
7320
Shuzhen Wang6a1dd612017-08-05 15:03:53 -07007321 IF_META_AVAILABLE(float, expTimeBoost, CAM_INTF_META_EXP_TIME_BOOST, metadata) {
7322 LOGD("expTimeBoost = %f", *expTimeBoost);
7323 camMetadata.update(NEXUS_EXPERIMENTAL_2017_EXP_TIME_BOOST, expTimeBoost, 1);
7324 }
7325
Thierry Strudel3d639192016-09-09 11:52:26 -07007326 IF_META_AVAILABLE(int64_t, sensorFameDuration,
7327 CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
7328 LOGD("sensorFameDuration = %lld", *sensorFameDuration);
7329 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
7330 }
7331
7332 IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
7333 CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
7334 LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
7335 camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
7336 sensorRollingShutterSkew, 1);
7337 }
7338
7339 IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
7340 LOGD("sensorSensitivity = %d", *sensorSensitivity);
7341 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
7342
7343 //calculate the noise profile based on sensitivity
7344 double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
7345 double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
7346 double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
7347 for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
7348 noise_profile[i] = noise_profile_S;
7349 noise_profile[i+1] = noise_profile_O;
7350 }
7351 LOGD("noise model entry (S, O) is (%f, %f)",
7352 noise_profile_S, noise_profile_O);
7353 camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
7354 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
7355 }
7356
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007357#ifndef USE_HAL_3_3
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007358 int32_t fwk_ispSensitivity = 100;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007359 IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007360 fwk_ispSensitivity = (int32_t) *ispSensitivity;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007361 }
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007362 IF_META_AVAILABLE(float, postStatsSensitivity, CAM_INTF_META_ISP_POST_STATS_SENSITIVITY, metadata) {
7363 fwk_ispSensitivity = (int32_t) (*postStatsSensitivity * fwk_ispSensitivity);
7364 }
7365 camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007366#endif
7367
Thierry Strudel3d639192016-09-09 11:52:26 -07007368 IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
7369 uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
7370 camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
7371 }
7372
7373 IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
7374 int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
7375 *faceDetectMode);
7376 if (NAME_NOT_FOUND != val) {
7377 uint8_t fwk_faceDetectMode = (uint8_t)val;
7378 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
7379
7380 if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
7381 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
7382 CAM_INTF_META_FACE_DETECTION, metadata) {
7383 uint8_t numFaces = MIN(
7384 faceDetectionInfo->num_faces_detected, MAX_ROI);
7385 int32_t faceIds[MAX_ROI];
7386 uint8_t faceScores[MAX_ROI];
7387 int32_t faceRectangles[MAX_ROI * 4];
7388 int32_t faceLandmarks[MAX_ROI * 6];
7389 size_t j = 0, k = 0;
7390
7391 for (size_t i = 0; i < numFaces; i++) {
7392 faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
7393 // Adjust crop region from sensor output coordinate system to active
7394 // array coordinate system.
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007395 cam_rect_t rect = faceDetectionInfo->faces[i].face_boundary;
Thierry Strudel3d639192016-09-09 11:52:26 -07007396 mCropRegionMapper.toActiveArray(rect.left, rect.top,
7397 rect.width, rect.height);
7398
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007399 convertToRegions(rect, faceRectangles+j, -1);
Thierry Strudel3d639192016-09-09 11:52:26 -07007400
Jason Lee8ce36fa2017-04-19 19:40:37 -07007401 LOGL("FD_DEBUG : Frame[%d] Face[%d] : top-left (%d, %d), "
7402 "bottom-right (%d, %d)",
7403 faceDetectionInfo->frame_id, i,
7404 faceRectangles[j + FACE_LEFT], faceRectangles[j + FACE_TOP],
7405 faceRectangles[j + FACE_RIGHT], faceRectangles[j + FACE_BOTTOM]);
7406
Thierry Strudel3d639192016-09-09 11:52:26 -07007407 j+= 4;
7408 }
7409 if (numFaces <= 0) {
7410 memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
7411 memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
7412 memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
7413 memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
7414 }
7415
7416 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
7417 numFaces);
7418 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
7419 faceRectangles, numFaces * 4U);
7420 if (fwk_faceDetectMode ==
7421 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
7422 IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
7423 CAM_INTF_META_FACE_LANDMARK, metadata) {
7424
7425 for (size_t i = 0; i < numFaces; i++) {
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007426 cam_face_landmarks_info_t face_landmarks = landmarks->face_landmarks[i];
Thierry Strudel3d639192016-09-09 11:52:26 -07007427 // Map the co-ordinate sensor output coordinate system to active
7428 // array coordinate system.
7429 mCropRegionMapper.toActiveArray(
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007430 face_landmarks.left_eye_center.x,
7431 face_landmarks.left_eye_center.y);
Thierry Strudel3d639192016-09-09 11:52:26 -07007432 mCropRegionMapper.toActiveArray(
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007433 face_landmarks.right_eye_center.x,
7434 face_landmarks.right_eye_center.y);
Thierry Strudel3d639192016-09-09 11:52:26 -07007435 mCropRegionMapper.toActiveArray(
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007436 face_landmarks.mouth_center.x,
7437 face_landmarks.mouth_center.y);
Thierry Strudel3d639192016-09-09 11:52:26 -07007438
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007439 convertLandmarks(face_landmarks, faceLandmarks+k);
Jason Lee8ce36fa2017-04-19 19:40:37 -07007440
7441 LOGL("FD_DEBUG LANDMARK : Frame[%d] Face[%d] : "
7442 "left-eye (%d, %d), right-eye (%d, %d), mouth (%d, %d)",
7443 faceDetectionInfo->frame_id, i,
7444 faceLandmarks[k + LEFT_EYE_X],
7445 faceLandmarks[k + LEFT_EYE_Y],
7446 faceLandmarks[k + RIGHT_EYE_X],
7447 faceLandmarks[k + RIGHT_EYE_Y],
7448 faceLandmarks[k + MOUTH_X],
7449 faceLandmarks[k + MOUTH_Y]);
7450
Thierry Strudel04e026f2016-10-10 11:27:36 -07007451 k+= TOTAL_LANDMARK_INDICES;
7452 }
7453 } else {
7454 for (size_t i = 0; i < numFaces; i++) {
7455 setInvalidLandmarks(faceLandmarks+k);
7456 k+= TOTAL_LANDMARK_INDICES;
Thierry Strudel3d639192016-09-09 11:52:26 -07007457 }
7458 }
7459
Jason Lee49619db2017-04-13 12:07:22 -07007460 for (size_t i = 0; i < numFaces; i++) {
7461 faceIds[i] = faceDetectionInfo->faces[i].face_id;
7462
7463 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : faceIds=%d",
7464 faceDetectionInfo->frame_id, i, faceIds[i]);
7465 }
7466
Thierry Strudel3d639192016-09-09 11:52:26 -07007467 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
7468 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
7469 faceLandmarks, numFaces * 6U);
Jason Lee49619db2017-04-13 12:07:22 -07007470 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007471 IF_META_AVAILABLE(cam_face_blink_data_t, blinks,
7472 CAM_INTF_META_FACE_BLINK, metadata) {
7473 uint8_t detected[MAX_ROI];
7474 uint8_t degree[MAX_ROI * 2];
7475 for (size_t i = 0; i < numFaces; i++) {
7476 detected[i] = blinks->blink[i].blink_detected;
7477 degree[2 * i] = blinks->blink[i].left_blink;
7478 degree[2 * i + 1] = blinks->blink[i].right_blink;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007479
Jason Lee49619db2017-04-13 12:07:22 -07007480 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7481 "blink_detected=%d, leye_blink=%d, reye_blink=%d",
7482 faceDetectionInfo->frame_id, i, detected[i], degree[2 * i],
7483 degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007484 }
7485 camMetadata.update(QCAMERA3_STATS_BLINK_DETECTED,
7486 detected, numFaces);
7487 camMetadata.update(QCAMERA3_STATS_BLINK_DEGREE,
7488 degree, numFaces * 2);
7489 }
7490 IF_META_AVAILABLE(cam_face_smile_data_t, smiles,
7491 CAM_INTF_META_FACE_SMILE, metadata) {
7492 uint8_t degree[MAX_ROI];
7493 uint8_t confidence[MAX_ROI];
7494 for (size_t i = 0; i < numFaces; i++) {
7495 degree[i] = smiles->smile[i].smile_degree;
7496 confidence[i] = smiles->smile[i].smile_confidence;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007497
Jason Lee49619db2017-04-13 12:07:22 -07007498 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7499 "smile_degree=%d, smile_score=%d",
7500 faceDetectionInfo->frame_id, i, degree[i], confidence[i]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007501 }
7502 camMetadata.update(QCAMERA3_STATS_SMILE_DEGREE,
7503 degree, numFaces);
7504 camMetadata.update(QCAMERA3_STATS_SMILE_CONFIDENCE,
7505 confidence, numFaces);
7506 }
7507 IF_META_AVAILABLE(cam_face_gaze_data_t, gazes,
7508 CAM_INTF_META_FACE_GAZE, metadata) {
7509 int8_t angle[MAX_ROI];
7510 int32_t direction[MAX_ROI * 3];
7511 int8_t degree[MAX_ROI * 2];
7512 for (size_t i = 0; i < numFaces; i++) {
7513 angle[i] = gazes->gaze[i].gaze_angle;
7514 direction[3 * i] = gazes->gaze[i].updown_dir;
7515 direction[3 * i + 1] = gazes->gaze[i].leftright_dir;
7516 direction[3 * i + 2] = gazes->gaze[i].roll_dir;
7517 degree[2 * i] = gazes->gaze[i].left_right_gaze;
7518 degree[2 * i + 1] = gazes->gaze[i].top_bottom_gaze;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007519
7520 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : gaze_angle=%d, "
7521 "updown_dir=%d, leftright_dir=%d,, roll_dir=%d, "
7522 "left_right_gaze=%d, top_bottom_gaze=%d",
7523 faceDetectionInfo->frame_id, i, angle[i],
7524 direction[3 * i], direction[3 * i + 1],
7525 direction[3 * i + 2],
7526 degree[2 * i], degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007527 }
7528 camMetadata.update(QCAMERA3_STATS_GAZE_ANGLE,
7529 (uint8_t *)angle, numFaces);
7530 camMetadata.update(QCAMERA3_STATS_GAZE_DIRECTION,
7531 direction, numFaces * 3);
7532 camMetadata.update(QCAMERA3_STATS_GAZE_DEGREE,
7533 (uint8_t *)degree, numFaces * 2);
7534 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007535 }
7536 }
7537 }
7538 }
7539
7540 IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
7541 uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
Shuzhen Wang14415f52016-11-16 18:26:18 -08007542 int32_t histogramBins = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007543 camMetadata.update(QCAMERA3_HISTOGRAM_MODE, &fwk_histogramMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -08007544 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &fwk_histogramMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007545
Shuzhen Wang14415f52016-11-16 18:26:18 -08007546 IF_META_AVAILABLE(int32_t, histBins, CAM_INTF_META_STATS_HISTOGRAM_BINS, metadata) {
7547 histogramBins = *histBins;
7548 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS, &histogramBins, 1);
7549 }
7550
7551 if (fwk_histogramMode == QCAMERA3_HISTOGRAM_MODE_ON && histogramBins > 0) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007552 IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, metadata) {
7553 // process histogram statistics info
Shuzhen Wang14415f52016-11-16 18:26:18 -08007554 int32_t* histogramData = NULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007555
7556 switch (stats_data->type) {
7557 case CAM_HISTOGRAM_TYPE_BAYER:
7558 switch (stats_data->bayer_stats.data_type) {
7559 case CAM_STATS_CHANNEL_GR:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007560 histogramData = (int32_t *)stats_data->bayer_stats.gr_stats.hist_buf;
7561 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007562 case CAM_STATS_CHANNEL_GB:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007563 histogramData = (int32_t *)stats_data->bayer_stats.gb_stats.hist_buf;
7564 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007565 case CAM_STATS_CHANNEL_B:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007566 histogramData = (int32_t *)stats_data->bayer_stats.b_stats.hist_buf;
7567 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007568 case CAM_STATS_CHANNEL_Y:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007569 case CAM_STATS_CHANNEL_ALL:
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007570 case CAM_STATS_CHANNEL_R:
7571 default:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007572 histogramData = (int32_t *)stats_data->bayer_stats.r_stats.hist_buf;
7573 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007574 }
7575 break;
7576 case CAM_HISTOGRAM_TYPE_YUV:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007577 histogramData = (int32_t *)stats_data->yuv_stats.hist_buf;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007578 break;
7579 }
7580
Shuzhen Wang14415f52016-11-16 18:26:18 -08007581 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM, histogramData, histogramBins);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007582 }
7583 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007584 }
7585
7586 IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
7587 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
7588 uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
7589 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
7590 }
7591
7592 IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
7593 CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
7594 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
7595 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
7596 }
7597
7598 IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
7599 CAM_INTF_META_LENS_SHADING_MAP, metadata) {
7600 size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
7601 CAM_MAX_SHADING_MAP_HEIGHT);
7602 size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
7603 CAM_MAX_SHADING_MAP_WIDTH);
7604 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
7605 lensShadingMap->lens_shading, 4U * map_width * map_height);
7606 }
7607
7608 IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
7609 uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
7610 camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
7611 }
7612
7613 IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
7614 //Populate CAM_INTF_META_TONEMAP_CURVES
7615 /* ch0 = G, ch 1 = B, ch 2 = R*/
7616 if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7617 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7618 tonemap->tonemap_points_cnt,
7619 CAM_MAX_TONEMAP_CURVE_SIZE);
7620 tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7621 }
7622
7623 camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
7624 &tonemap->curves[0].tonemap_points[0][0],
7625 tonemap->tonemap_points_cnt * 2);
7626
7627 camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
7628 &tonemap->curves[1].tonemap_points[0][0],
7629 tonemap->tonemap_points_cnt * 2);
7630
7631 camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
7632 &tonemap->curves[2].tonemap_points[0][0],
7633 tonemap->tonemap_points_cnt * 2);
7634 }
7635
7636 IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
7637 CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
7638 camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
7639 CC_GAIN_MAX);
7640 }
7641
7642 IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
7643 CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
7644 camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
7645 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
7646 CC_MATRIX_COLS * CC_MATRIX_ROWS);
7647 }
7648
7649 IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
7650 CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
7651 if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7652 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7653 toneCurve->tonemap_points_cnt,
7654 CAM_MAX_TONEMAP_CURVE_SIZE);
7655 toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7656 }
7657 camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
7658 (float*)toneCurve->curve.tonemap_points,
7659 toneCurve->tonemap_points_cnt * 2);
7660 }
7661
7662 IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
7663 CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
7664 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
7665 predColorCorrectionGains->gains, 4);
7666 }
7667
7668 IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
7669 CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
7670 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
7671 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
7672 CC_MATRIX_ROWS * CC_MATRIX_COLS);
7673 }
7674
7675 IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
7676 camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
7677 }
7678
7679 IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
7680 uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
7681 camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
7682 }
7683
7684 IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
7685 uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
7686 camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
7687 }
7688
7689 IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
7690 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
7691 *effectMode);
7692 if (NAME_NOT_FOUND != val) {
7693 uint8_t fwk_effectMode = (uint8_t)val;
7694 camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
7695 }
7696 }
7697
7698 IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
7699 CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
7700 int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
7701 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
7702 if (NAME_NOT_FOUND != fwk_testPatternMode) {
7703 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
7704 }
7705 int32_t fwk_testPatternData[4];
7706 fwk_testPatternData[0] = testPatternData->r;
7707 fwk_testPatternData[3] = testPatternData->b;
7708 switch (gCamCapability[mCameraId]->color_arrangement) {
7709 case CAM_FILTER_ARRANGEMENT_RGGB:
7710 case CAM_FILTER_ARRANGEMENT_GRBG:
7711 fwk_testPatternData[1] = testPatternData->gr;
7712 fwk_testPatternData[2] = testPatternData->gb;
7713 break;
7714 case CAM_FILTER_ARRANGEMENT_GBRG:
7715 case CAM_FILTER_ARRANGEMENT_BGGR:
7716 fwk_testPatternData[2] = testPatternData->gr;
7717 fwk_testPatternData[1] = testPatternData->gb;
7718 break;
7719 default:
7720 LOGE("color arrangement %d is not supported",
7721 gCamCapability[mCameraId]->color_arrangement);
7722 break;
7723 }
7724 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
7725 }
7726
7727 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
7728 camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
7729 }
7730
7731 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
7732 String8 str((const char *)gps_methods);
7733 camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
7734 }
7735
7736 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
7737 camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
7738 }
7739
7740 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
7741 camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
7742 }
7743
7744 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
7745 uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
7746 camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
7747 }
7748
7749 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
7750 uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
7751 camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
7752 }
7753
7754 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
7755 int32_t fwk_thumb_size[2];
7756 fwk_thumb_size[0] = thumb_size->width;
7757 fwk_thumb_size[1] = thumb_size->height;
7758 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
7759 }
7760
Shuzhen Wang2fea89e2017-05-08 17:02:15 -07007761 // Skip reprocess metadata if there is no input stream.
7762 if (mInputStreamInfo.dim.width > 0 && mInputStreamInfo.dim.height > 0) {
7763 IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
7764 camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
7765 privateData,
7766 MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
7767 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007768 }
7769
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007770 IF_META_AVAILABLE(int32_t, meteringMode, CAM_INTF_PARM_AEC_ALGO_TYPE, metadata) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08007771 camMetadata.update(QCAMERA3_EXPOSURE_METER,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007772 meteringMode, 1);
7773 }
7774
Thierry Strudel54dc9782017-02-15 12:12:10 -08007775 IF_META_AVAILABLE(cam_asd_hdr_scene_data_t, hdr_scene_data,
7776 CAM_INTF_META_ASD_HDR_SCENE_DATA, metadata) {
7777 LOGD("hdr_scene_data: %d %f\n",
7778 hdr_scene_data->is_hdr_scene, hdr_scene_data->hdr_confidence);
7779 uint8_t isHdr = hdr_scene_data->is_hdr_scene;
7780 float isHdrConfidence = hdr_scene_data->hdr_confidence;
7781 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE,
7782 &isHdr, 1);
7783 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE,
7784 &isHdrConfidence, 1);
7785 }
7786
7787
7788
Thierry Strudel3d639192016-09-09 11:52:26 -07007789 if (metadata->is_tuning_params_valid) {
7790 uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
7791 uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
7792 metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
7793
7794
7795 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
7796 sizeof(uint32_t));
7797 data += sizeof(uint32_t);
7798
7799 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
7800 sizeof(uint32_t));
7801 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
7802 data += sizeof(uint32_t);
7803
7804 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
7805 sizeof(uint32_t));
7806 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
7807 data += sizeof(uint32_t);
7808
7809 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
7810 sizeof(uint32_t));
7811 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
7812 data += sizeof(uint32_t);
7813
7814 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
7815 sizeof(uint32_t));
7816 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
7817 data += sizeof(uint32_t);
7818
7819 metadata->tuning_params.tuning_mod3_data_size = 0;
7820 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
7821 sizeof(uint32_t));
7822 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
7823 data += sizeof(uint32_t);
7824
7825 size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
7826 TUNING_SENSOR_DATA_MAX);
7827 memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
7828 count);
7829 data += count;
7830
7831 count = MIN(metadata->tuning_params.tuning_vfe_data_size,
7832 TUNING_VFE_DATA_MAX);
7833 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
7834 count);
7835 data += count;
7836
7837 count = MIN(metadata->tuning_params.tuning_cpp_data_size,
7838 TUNING_CPP_DATA_MAX);
7839 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
7840 count);
7841 data += count;
7842
7843 count = MIN(metadata->tuning_params.tuning_cac_data_size,
7844 TUNING_CAC_DATA_MAX);
7845 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
7846 count);
7847 data += count;
7848
7849 camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
7850 (int32_t *)(void *)tuning_meta_data_blob,
7851 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
7852 }
7853
7854 IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
7855 CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
7856 camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
7857 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
7858 NEUTRAL_COL_POINTS);
7859 }
7860
7861 IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
7862 uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
7863 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
7864 }
7865
7866 IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
7867 int32_t aeRegions[REGIONS_TUPLE_COUNT];
7868 // Adjust crop region from sensor output coordinate system to active
7869 // array coordinate system.
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007870 cam_rect_t hAeRect = hAeRegions->rect;
7871 mCropRegionMapper.toActiveArray(hAeRect.left, hAeRect.top,
7872 hAeRect.width, hAeRect.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07007873
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007874 convertToRegions(hAeRect, aeRegions, hAeRegions->weight);
Thierry Strudel3d639192016-09-09 11:52:26 -07007875 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
7876 REGIONS_TUPLE_COUNT);
7877 LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
7878 aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007879 hAeRect.left, hAeRect.top, hAeRect.width,
7880 hAeRect.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07007881 }
7882
Shuzhen Wang181c57b2017-07-21 11:39:44 -07007883 if (!pendingRequest.focusStateSent) {
7884 if (pendingRequest.focusStateValid) {
7885 camMetadata.update(ANDROID_CONTROL_AF_STATE, &pendingRequest.focusState, 1);
7886 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", pendingRequest.focusState);
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007887 } else {
Shuzhen Wang181c57b2017-07-21 11:39:44 -07007888 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
7889 uint8_t fwk_afState = (uint8_t) *afState;
7890 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
7891 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
7892 }
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007893 }
7894 }
7895
Thierry Strudel3d639192016-09-09 11:52:26 -07007896 IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
7897 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
7898 }
7899
7900 IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
7901 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
7902 }
7903
7904 IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
7905 uint8_t fwk_lensState = *lensState;
7906 camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
7907 }
7908
Thierry Strudel3d639192016-09-09 11:52:26 -07007909 IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007910 uint32_t ab_mode = *hal_ab_mode;
7911 if (ab_mode == CAM_ANTIBANDING_MODE_AUTO_60HZ ||
7912 ab_mode == CAM_ANTIBANDING_MODE_AUTO_50HZ) {
7913 ab_mode = CAM_ANTIBANDING_MODE_AUTO;
7914 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007915 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007916 ab_mode);
Thierry Strudel3d639192016-09-09 11:52:26 -07007917 if (NAME_NOT_FOUND != val) {
7918 uint8_t fwk_ab_mode = (uint8_t)val;
7919 camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
7920 }
7921 }
7922
7923 IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7924 int val = lookupFwkName(SCENE_MODES_MAP,
7925 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
7926 if (NAME_NOT_FOUND != val) {
7927 uint8_t fwkBestshotMode = (uint8_t)val;
7928 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
7929 LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
7930 } else {
7931 LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
7932 }
7933 }
7934
7935 IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
7936 uint8_t fwk_mode = (uint8_t) *mode;
7937 camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
7938 }
7939
7940 /* Constant metadata values to be update*/
7941 uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
7942 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
7943
7944 uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
7945 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
7946
7947 int32_t hotPixelMap[2];
7948 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
7949
7950 // CDS
7951 IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
7952 camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
7953 }
7954
Thierry Strudel04e026f2016-10-10 11:27:36 -07007955 IF_META_AVAILABLE(cam_sensor_hdr_type_t, vhdr, CAM_INTF_PARM_SENSOR_HDR, metadata) {
7956 int32_t fwk_hdr;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007957 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007958 if(*vhdr == CAM_SENSOR_HDR_OFF) {
7959 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_OFF;
7960 } else {
7961 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_ON;
7962 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007963
7964 if(fwk_hdr != curr_hdr_state) {
7965 LOGH("PROFILE_META_HDR_TOGGLED value=%d", fwk_hdr);
7966 if(fwk_hdr)
7967 mCurrFeatureState |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7968 else
7969 mCurrFeatureState &= ~CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7970 }
Thierry Strudel04e026f2016-10-10 11:27:36 -07007971 camMetadata.update(QCAMERA3_VIDEO_HDR_MODE, &fwk_hdr, 1);
7972 }
7973
Thierry Strudel54dc9782017-02-15 12:12:10 -08007974 //binning correction
7975 IF_META_AVAILABLE(cam_binning_correction_mode_t, bin_correction,
7976 CAM_INTF_META_BINNING_CORRECTION_MODE, metadata) {
7977 int32_t fwk_bin_mode = (int32_t) *bin_correction;
7978 camMetadata.update(QCAMERA3_BINNING_CORRECTION_MODE, &fwk_bin_mode, 1);
7979 }
7980
Thierry Strudel04e026f2016-10-10 11:27:36 -07007981 IF_META_AVAILABLE(cam_ir_mode_type_t, ir, CAM_INTF_META_IR_MODE, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007982 int32_t fwk_ir = (int32_t) *ir;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007983 int8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR ) != 0);
7984 int8_t is_ir_on = 0;
7985
7986 (fwk_ir > 0) ? (is_ir_on = 1) : (is_ir_on = 0) ;
7987 if(is_ir_on != curr_ir_state) {
7988 LOGH("PROFILE_META_IR_TOGGLED value=%d", fwk_ir);
7989 if(is_ir_on)
7990 mCurrFeatureState |= CAM_QCOM_FEATURE_IR;
7991 else
7992 mCurrFeatureState &= ~CAM_QCOM_FEATURE_IR;
7993 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007994 camMetadata.update(QCAMERA3_IR_MODE, &fwk_ir, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007995 }
7996
Thierry Strudel269c81a2016-10-12 12:13:59 -07007997 // AEC SPEED
7998 IF_META_AVAILABLE(float, aec, CAM_INTF_META_AEC_CONVERGENCE_SPEED, metadata) {
7999 camMetadata.update(QCAMERA3_AEC_CONVERGENCE_SPEED, aec, 1);
8000 }
8001
8002 // AWB SPEED
8003 IF_META_AVAILABLE(float, awb, CAM_INTF_META_AWB_CONVERGENCE_SPEED, metadata) {
8004 camMetadata.update(QCAMERA3_AWB_CONVERGENCE_SPEED, awb, 1);
8005 }
8006
Thierry Strudel3d639192016-09-09 11:52:26 -07008007 // TNR
8008 IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
8009 uint8_t tnr_enable = tnr->denoise_enable;
8010 int32_t tnr_process_type = (int32_t)tnr->process_plates;
Thierry Strudel54dc9782017-02-15 12:12:10 -08008011 int8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0) ;
8012 int8_t is_tnr_on = 0;
8013
8014 (tnr_enable > 0) ? (is_tnr_on = 1) : (is_tnr_on = 0);
8015 if(is_tnr_on != curr_tnr_state) {
8016 LOGH("PROFILE_META_TNR_TOGGLED value=%d", tnr_enable);
8017 if(is_tnr_on)
8018 mCurrFeatureState |= CAM_QTI_FEATURE_SW_TNR;
8019 else
8020 mCurrFeatureState &= ~CAM_QTI_FEATURE_SW_TNR;
8021 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008022
8023 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
8024 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
8025 }
8026
8027 // Reprocess crop data
8028 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
8029 uint8_t cnt = crop_data->num_of_streams;
8030 if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
8031 // mm-qcamera-daemon only posts crop_data for streams
8032 // not linked to pproc. So no valid crop metadata is not
8033 // necessarily an error case.
8034 LOGD("No valid crop metadata entries");
8035 } else {
8036 uint32_t reproc_stream_id;
8037 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
8038 LOGD("No reprocessible stream found, ignore crop data");
8039 } else {
8040 int rc = NO_ERROR;
8041 Vector<int32_t> roi_map;
8042 int32_t *crop = new int32_t[cnt*4];
8043 if (NULL == crop) {
8044 rc = NO_MEMORY;
8045 }
8046 if (NO_ERROR == rc) {
8047 int32_t streams_found = 0;
8048 for (size_t i = 0; i < cnt; i++) {
8049 if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
8050 if (pprocDone) {
8051 // HAL already does internal reprocessing,
8052 // either via reprocessing before JPEG encoding,
8053 // or offline postprocessing for pproc bypass case.
8054 crop[0] = 0;
8055 crop[1] = 0;
8056 crop[2] = mInputStreamInfo.dim.width;
8057 crop[3] = mInputStreamInfo.dim.height;
8058 } else {
8059 crop[0] = crop_data->crop_info[i].crop.left;
8060 crop[1] = crop_data->crop_info[i].crop.top;
8061 crop[2] = crop_data->crop_info[i].crop.width;
8062 crop[3] = crop_data->crop_info[i].crop.height;
8063 }
8064 roi_map.add(crop_data->crop_info[i].roi_map.left);
8065 roi_map.add(crop_data->crop_info[i].roi_map.top);
8066 roi_map.add(crop_data->crop_info[i].roi_map.width);
8067 roi_map.add(crop_data->crop_info[i].roi_map.height);
8068 streams_found++;
8069 LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
8070 crop[0], crop[1], crop[2], crop[3]);
8071 LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
8072 crop_data->crop_info[i].roi_map.left,
8073 crop_data->crop_info[i].roi_map.top,
8074 crop_data->crop_info[i].roi_map.width,
8075 crop_data->crop_info[i].roi_map.height);
8076 break;
8077
8078 }
8079 }
8080 camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
8081 &streams_found, 1);
8082 camMetadata.update(QCAMERA3_CROP_REPROCESS,
8083 crop, (size_t)(streams_found * 4));
8084 if (roi_map.array()) {
8085 camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
8086 roi_map.array(), roi_map.size());
8087 }
8088 }
8089 if (crop) {
8090 delete [] crop;
8091 }
8092 }
8093 }
8094 }
8095
8096 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
8097 // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
8098 // so hardcoding the CAC result to OFF mode.
8099 uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
8100 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
8101 } else {
8102 IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
8103 int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
8104 *cacMode);
8105 if (NAME_NOT_FOUND != val) {
8106 uint8_t resultCacMode = (uint8_t)val;
8107 // check whether CAC result from CB is equal to Framework set CAC mode
8108 // If not equal then set the CAC mode came in corresponding request
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008109 if (pendingRequest.fwkCacMode != resultCacMode) {
8110 resultCacMode = pendingRequest.fwkCacMode;
Thierry Strudel3d639192016-09-09 11:52:26 -07008111 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08008112 //Check if CAC is disabled by property
8113 if (m_cacModeDisabled) {
8114 resultCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
8115 }
8116
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008117 LOGD("fwk_cacMode=%d resultCacMode=%d", pendingRequest.fwkCacMode, resultCacMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07008118 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
8119 } else {
8120 LOGE("Invalid CAC camera parameter: %d", *cacMode);
8121 }
8122 }
8123 }
8124
8125 // Post blob of cam_cds_data through vendor tag.
8126 IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
8127 uint8_t cnt = cdsInfo->num_of_streams;
8128 cam_cds_data_t cdsDataOverride;
8129 memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
8130 cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
8131 cdsDataOverride.num_of_streams = 1;
8132 if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
8133 uint32_t reproc_stream_id;
8134 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
8135 LOGD("No reprocessible stream found, ignore cds data");
8136 } else {
8137 for (size_t i = 0; i < cnt; i++) {
8138 if (cdsInfo->cds_info[i].stream_id ==
8139 reproc_stream_id) {
8140 cdsDataOverride.cds_info[0].cds_enable =
8141 cdsInfo->cds_info[i].cds_enable;
8142 break;
8143 }
8144 }
8145 }
8146 } else {
8147 LOGD("Invalid stream count %d in CDS_DATA", cnt);
8148 }
8149 camMetadata.update(QCAMERA3_CDS_INFO,
8150 (uint8_t *)&cdsDataOverride,
8151 sizeof(cam_cds_data_t));
8152 }
8153
8154 // Ldaf calibration data
8155 if (!mLdafCalibExist) {
8156 IF_META_AVAILABLE(uint32_t, ldafCalib,
8157 CAM_INTF_META_LDAF_EXIF, metadata) {
8158 mLdafCalibExist = true;
8159 mLdafCalib[0] = ldafCalib[0];
8160 mLdafCalib[1] = ldafCalib[1];
8161 LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
8162 ldafCalib[0], ldafCalib[1]);
8163 }
8164 }
8165
Thierry Strudel54dc9782017-02-15 12:12:10 -08008166 // EXIF debug data through vendor tag
8167 /*
8168 * Mobicat Mask can assume 3 values:
8169 * 1 refers to Mobicat data,
8170 * 2 refers to Stats Debug and Exif Debug Data
8171 * 3 refers to Mobicat and Stats Debug Data
8172 * We want to make sure that we are sending Exif debug data
8173 * only when Mobicat Mask is 2.
8174 */
8175 if ((mExifParams.debug_params != NULL) && (getMobicatMask() == 2)) {
8176 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB,
8177 (uint8_t *)(void *)mExifParams.debug_params,
8178 sizeof(mm_jpeg_debug_exif_params_t));
8179 }
8180
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008181 // Reprocess and DDM debug data through vendor tag
8182 cam_reprocess_info_t repro_info;
8183 memset(&repro_info, 0, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008184 IF_META_AVAILABLE(cam_stream_crop_info_t, sensorCropInfo,
8185 CAM_INTF_META_SNAP_CROP_INFO_SENSOR, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008186 memcpy(&(repro_info.sensor_crop_info), sensorCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008187 }
8188 IF_META_AVAILABLE(cam_stream_crop_info_t, camifCropInfo,
8189 CAM_INTF_META_SNAP_CROP_INFO_CAMIF, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008190 memcpy(&(repro_info.camif_crop_info), camifCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008191 }
8192 IF_META_AVAILABLE(cam_stream_crop_info_t, ispCropInfo,
8193 CAM_INTF_META_SNAP_CROP_INFO_ISP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008194 memcpy(&(repro_info.isp_crop_info), ispCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008195 }
8196 IF_META_AVAILABLE(cam_stream_crop_info_t, cppCropInfo,
8197 CAM_INTF_META_SNAP_CROP_INFO_CPP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008198 memcpy(&(repro_info.cpp_crop_info), cppCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008199 }
8200 IF_META_AVAILABLE(cam_focal_length_ratio_t, ratio,
8201 CAM_INTF_META_AF_FOCAL_LENGTH_RATIO, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008202 memcpy(&(repro_info.af_focal_length_ratio), ratio, sizeof(cam_focal_length_ratio_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008203 }
8204 IF_META_AVAILABLE(int32_t, flip, CAM_INTF_PARM_FLIP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008205 memcpy(&(repro_info.pipeline_flip), flip, sizeof(int32_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008206 }
8207 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
8208 CAM_INTF_PARM_ROTATION, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008209 memcpy(&(repro_info.rotation_info), rotationInfo, sizeof(cam_rotation_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008210 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008211 IF_META_AVAILABLE(cam_area_t, afRoi, CAM_INTF_META_AF_ROI, metadata) {
8212 memcpy(&(repro_info.af_roi), afRoi, sizeof(cam_area_t));
8213 }
8214 IF_META_AVAILABLE(cam_dyn_img_data_t, dynMask, CAM_INTF_META_IMG_DYN_FEAT, metadata) {
8215 memcpy(&(repro_info.dyn_mask), dynMask, sizeof(cam_dyn_img_data_t));
8216 }
8217 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
8218 (uint8_t *)&repro_info, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008219
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008220 // INSTANT AEC MODE
8221 IF_META_AVAILABLE(uint8_t, instant_aec_mode,
8222 CAM_INTF_PARM_INSTANT_AEC, metadata) {
8223 camMetadata.update(QCAMERA3_INSTANT_AEC_MODE, instant_aec_mode, 1);
8224 }
8225
Shuzhen Wange763e802016-03-31 10:24:29 -07008226 // AF scene change
8227 IF_META_AVAILABLE(uint8_t, afSceneChange, CAM_INTF_META_AF_SCENE_CHANGE, metadata) {
8228 camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, afSceneChange, 1);
8229 }
8230
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07008231 // Enable ZSL
8232 if (enableZsl != nullptr) {
8233 uint8_t value = *enableZsl ?
8234 ANDROID_CONTROL_ENABLE_ZSL_TRUE : ANDROID_CONTROL_ENABLE_ZSL_FALSE;
8235 camMetadata.update(ANDROID_CONTROL_ENABLE_ZSL, &value, 1);
8236 }
8237
Xu Han821ea9c2017-05-23 09:00:40 -07008238 // OIS Data
8239 IF_META_AVAILABLE(cam_frame_ois_info_t, frame_ois_data, CAM_INTF_META_FRAME_OIS_DATA, metadata) {
Xu Han821ea9c2017-05-23 09:00:40 -07008240 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_BOOTTIME,
8241 &(frame_ois_data->frame_sof_timestamp_boottime), 1);
8242 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_TIMESTAMPS_BOOTTIME,
8243 frame_ois_data->ois_sample_timestamp_boottime, frame_ois_data->num_ois_sample);
Xue Tu2c3e9142017-08-18 16:23:52 -07008244 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_PIXEL_X,
8245 frame_ois_data->ois_sample_shift_pixel_x, frame_ois_data->num_ois_sample);
8246 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_PIXEL_Y,
8247 frame_ois_data->ois_sample_shift_pixel_y, frame_ois_data->num_ois_sample);
Xu Han821ea9c2017-05-23 09:00:40 -07008248 }
8249
Thierry Strudel3d639192016-09-09 11:52:26 -07008250 resultMetadata = camMetadata.release();
8251 return resultMetadata;
8252}
8253
8254/*===========================================================================
8255 * FUNCTION : saveExifParams
8256 *
8257 * DESCRIPTION:
8258 *
8259 * PARAMETERS :
8260 * @metadata : metadata information from callback
8261 *
8262 * RETURN : none
8263 *
8264 *==========================================================================*/
8265void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
8266{
8267 IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
8268 CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
8269 if (mExifParams.debug_params) {
8270 mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
8271 mExifParams.debug_params->ae_debug_params_valid = TRUE;
8272 }
8273 }
8274 IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
8275 CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
8276 if (mExifParams.debug_params) {
8277 mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
8278 mExifParams.debug_params->awb_debug_params_valid = TRUE;
8279 }
8280 }
8281 IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
8282 CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
8283 if (mExifParams.debug_params) {
8284 mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
8285 mExifParams.debug_params->af_debug_params_valid = TRUE;
8286 }
8287 }
8288 IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
8289 CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
8290 if (mExifParams.debug_params) {
8291 mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
8292 mExifParams.debug_params->asd_debug_params_valid = TRUE;
8293 }
8294 }
8295 IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
8296 CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
8297 if (mExifParams.debug_params) {
8298 mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
8299 mExifParams.debug_params->stats_debug_params_valid = TRUE;
8300 }
8301 }
8302 IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
8303 CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
8304 if (mExifParams.debug_params) {
8305 mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
8306 mExifParams.debug_params->bestats_debug_params_valid = TRUE;
8307 }
8308 }
8309 IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
8310 CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
8311 if (mExifParams.debug_params) {
8312 mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
8313 mExifParams.debug_params->bhist_debug_params_valid = TRUE;
8314 }
8315 }
8316 IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
8317 CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
8318 if (mExifParams.debug_params) {
8319 mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
8320 mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
8321 }
8322 }
8323}
8324
8325/*===========================================================================
8326 * FUNCTION : get3AExifParams
8327 *
8328 * DESCRIPTION:
8329 *
8330 * PARAMETERS : none
8331 *
8332 *
8333 * RETURN : mm_jpeg_exif_params_t
8334 *
8335 *==========================================================================*/
8336mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
8337{
8338 return mExifParams;
8339}
8340
8341/*===========================================================================
8342 * FUNCTION : translateCbUrgentMetadataToResultMetadata
8343 *
8344 * DESCRIPTION:
8345 *
8346 * PARAMETERS :
8347 * @metadata : metadata information from callback
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008348 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the last
8349 * urgent metadata in a batch. Always true for
8350 * non-batch mode.
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008351 * @frame_number : frame number for this urgent metadata
Shuzhen Wang485e2442017-08-02 12:21:08 -07008352 * @isJumpstartMetadata: Whether this is a partial metadata for jumpstart,
8353 * i.e. even though it doesn't map to a valid partial
8354 * frame number, its metadata entries should be kept.
Thierry Strudel3d639192016-09-09 11:52:26 -07008355 * RETURN : camera_metadata_t*
8356 * metadata in a format specified by fwk
8357 *==========================================================================*/
8358camera_metadata_t*
8359QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008360 (metadata_buffer_t *metadata, bool lastUrgentMetadataInBatch,
Shuzhen Wang485e2442017-08-02 12:21:08 -07008361 uint32_t frame_number, bool isJumpstartMetadata)
Thierry Strudel3d639192016-09-09 11:52:26 -07008362{
8363 CameraMetadata camMetadata;
8364 camera_metadata_t *resultMetadata;
8365
Shuzhen Wang485e2442017-08-02 12:21:08 -07008366 if (!lastUrgentMetadataInBatch && !isJumpstartMetadata) {
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008367 /* In batch mode, use empty metadata if this is not the last in batch
8368 */
8369 resultMetadata = allocate_camera_metadata(0, 0);
8370 return resultMetadata;
8371 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008372
8373 IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
8374 uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
8375 camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
8376 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
8377 }
8378
8379 IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
8380 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
8381 &aecTrigger->trigger, 1);
8382 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
8383 &aecTrigger->trigger_id, 1);
8384 LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
8385 aecTrigger->trigger);
8386 LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
8387 aecTrigger->trigger_id);
8388 }
8389
8390 IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
8391 uint8_t fwk_ae_state = (uint8_t) *ae_state;
8392 camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
8393 LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
8394 }
8395
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008396 IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
8397 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
8398 if (NAME_NOT_FOUND != val) {
8399 uint8_t fwkAfMode = (uint8_t)val;
8400 camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
8401 LOGD("urgent Metadata : ANDROID_CONTROL_AF_MODE %d", val);
8402 } else {
8403 LOGH("urgent Metadata not found : ANDROID_CONTROL_AF_MODE %d",
8404 val);
8405 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008406 }
8407
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008408 IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
8409 LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
8410 af_trigger->trigger);
8411 LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
8412 af_trigger->trigger_id);
8413
8414 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
8415 mAfTrigger = *af_trigger;
8416 uint32_t fwk_AfState = (uint32_t) *afState;
8417
8418 // If this is the result for a new trigger, check if there is new early
8419 // af state. If there is, use the last af state for all results
8420 // preceding current partial frame number.
8421 for (auto & pendingRequest : mPendingRequestsList) {
8422 if (pendingRequest.frame_number < frame_number) {
8423 pendingRequest.focusStateValid = true;
8424 pendingRequest.focusState = fwk_AfState;
8425 } else if (pendingRequest.frame_number == frame_number) {
8426 IF_META_AVAILABLE(uint32_t, earlyAfState, CAM_INTF_META_EARLY_AF_STATE, metadata) {
8427 // Check if early AF state for trigger exists. If yes, send AF state as
8428 // partial result for better latency.
8429 uint8_t fwkEarlyAfState = (uint8_t) *earlyAfState;
8430 pendingRequest.focusStateSent = true;
8431 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwkEarlyAfState, 1);
8432 LOGD("urgent Metadata(%d) : ANDROID_CONTROL_AF_STATE %u",
8433 frame_number, fwkEarlyAfState);
8434 }
8435 }
8436 }
8437 }
8438 }
8439 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
8440 &mAfTrigger.trigger, 1);
8441 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &mAfTrigger.trigger_id, 1);
8442
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008443 IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
8444 /*af regions*/
Shuzhen Wang91c14e82017-08-21 17:56:57 -07008445 cam_rect_t hAfRect = hAfRegions->rect;
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008446 int32_t afRegions[REGIONS_TUPLE_COUNT];
8447 // Adjust crop region from sensor output coordinate system to active
8448 // array coordinate system.
Shuzhen Wang91c14e82017-08-21 17:56:57 -07008449 mCropRegionMapper.toActiveArray(hAfRect.left, hAfRect.top,
8450 hAfRect.width, hAfRect.height);
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008451
Shuzhen Wang91c14e82017-08-21 17:56:57 -07008452 convertToRegions(hAfRect, afRegions, hAfRegions->weight);
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008453 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
8454 REGIONS_TUPLE_COUNT);
8455 LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
8456 afRegions[0], afRegions[1], afRegions[2], afRegions[3],
Shuzhen Wang91c14e82017-08-21 17:56:57 -07008457 hAfRect.left, hAfRect.top, hAfRect.width,
8458 hAfRect.height);
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008459 }
8460
Shuzhen Wangcc386c52017-03-29 09:28:08 -07008461 // AF region confidence
8462 IF_META_AVAILABLE(int32_t, afRegionConfidence, CAM_INTF_META_AF_REGIONS_CONFIDENCE, metadata) {
8463 camMetadata.update(NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE, afRegionConfidence, 1);
8464 }
8465
Thierry Strudel3d639192016-09-09 11:52:26 -07008466 IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
8467 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
8468 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
8469 if (NAME_NOT_FOUND != val) {
8470 uint8_t fwkWhiteBalanceMode = (uint8_t)val;
8471 camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
8472 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
8473 } else {
8474 LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
8475 }
8476 }
8477
8478 uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8479 uint32_t aeMode = CAM_AE_MODE_MAX;
8480 int32_t flashMode = CAM_FLASH_MODE_MAX;
8481 int32_t redeye = -1;
8482 IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
8483 aeMode = *pAeMode;
8484 }
8485 IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
8486 flashMode = *pFlashMode;
8487 }
8488 IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
8489 redeye = *pRedeye;
8490 }
8491
8492 if (1 == redeye) {
8493 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
8494 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8495 } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
8496 int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
8497 flashMode);
8498 if (NAME_NOT_FOUND != val) {
8499 fwk_aeMode = (uint8_t)val;
8500 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8501 } else {
8502 LOGE("Unsupported flash mode %d", flashMode);
8503 }
8504 } else if (aeMode == CAM_AE_MODE_ON) {
8505 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
8506 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8507 } else if (aeMode == CAM_AE_MODE_OFF) {
8508 fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8509 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08008510 } else if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
8511 fwk_aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
8512 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07008513 } else {
8514 LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
8515 "flashMode:%d, aeMode:%u!!!",
8516 redeye, flashMode, aeMode);
8517 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008518 if (mInstantAEC) {
8519 // Increment frame Idx count untill a bound reached for instant AEC.
8520 mInstantAecFrameIdxCount++;
8521 IF_META_AVAILABLE(cam_3a_params_t, ae_params,
8522 CAM_INTF_META_AEC_INFO, metadata) {
8523 LOGH("ae_params->settled = %d",ae_params->settled);
8524 // If AEC settled, or if number of frames reached bound value,
8525 // should reset instant AEC.
8526 if (ae_params->settled ||
8527 (mInstantAecFrameIdxCount > mAecSkipDisplayFrameBound)) {
8528 LOGH("AEC settled or Frames reached instantAEC bound, resetting instantAEC");
8529 mInstantAEC = false;
8530 mResetInstantAEC = true;
8531 mInstantAecFrameIdxCount = 0;
8532 }
8533 }
8534 }
Shuzhen Wang3569d4a2017-09-04 19:10:28 -07008535
8536 IF_META_AVAILABLE(int32_t, af_tof_confidence,
8537 CAM_INTF_META_AF_TOF_CONFIDENCE, metadata) {
8538 IF_META_AVAILABLE(int32_t, af_tof_distance,
8539 CAM_INTF_META_AF_TOF_DISTANCE, metadata) {
8540 int32_t fwk_af_tof_confidence = *af_tof_confidence;
8541 int32_t fwk_af_tof_distance = *af_tof_distance;
8542 if (fwk_af_tof_confidence == 1) {
8543 mSceneDistance = fwk_af_tof_distance;
8544 } else {
8545 mSceneDistance = -1;
8546 }
8547 LOGD("tof_distance %d, tof_confidence %d, mSceneDistance %d",
8548 fwk_af_tof_distance, fwk_af_tof_confidence, mSceneDistance);
8549 }
8550 }
8551 camMetadata.update(NEXUS_EXPERIMENTAL_2017_SCENE_DISTANCE, &mSceneDistance, 1);
8552
Thierry Strudel3d639192016-09-09 11:52:26 -07008553 resultMetadata = camMetadata.release();
8554 return resultMetadata;
8555}
8556
8557/*===========================================================================
8558 * FUNCTION : dumpMetadataToFile
8559 *
8560 * DESCRIPTION: Dumps tuning metadata to file system
8561 *
8562 * PARAMETERS :
8563 * @meta : tuning metadata
8564 * @dumpFrameCount : current dump frame count
8565 * @enabled : Enable mask
8566 *
8567 *==========================================================================*/
8568void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
8569 uint32_t &dumpFrameCount,
8570 bool enabled,
8571 const char *type,
8572 uint32_t frameNumber)
8573{
8574 //Some sanity checks
8575 if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
8576 LOGE("Tuning sensor data size bigger than expected %d: %d",
8577 meta.tuning_sensor_data_size,
8578 TUNING_SENSOR_DATA_MAX);
8579 return;
8580 }
8581
8582 if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
8583 LOGE("Tuning VFE data size bigger than expected %d: %d",
8584 meta.tuning_vfe_data_size,
8585 TUNING_VFE_DATA_MAX);
8586 return;
8587 }
8588
8589 if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
8590 LOGE("Tuning CPP data size bigger than expected %d: %d",
8591 meta.tuning_cpp_data_size,
8592 TUNING_CPP_DATA_MAX);
8593 return;
8594 }
8595
8596 if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
8597 LOGE("Tuning CAC data size bigger than expected %d: %d",
8598 meta.tuning_cac_data_size,
8599 TUNING_CAC_DATA_MAX);
8600 return;
8601 }
8602 //
8603
8604 if(enabled){
8605 char timeBuf[FILENAME_MAX];
8606 char buf[FILENAME_MAX];
8607 memset(buf, 0, sizeof(buf));
8608 memset(timeBuf, 0, sizeof(timeBuf));
8609 time_t current_time;
8610 struct tm * timeinfo;
8611 time (&current_time);
8612 timeinfo = localtime (&current_time);
8613 if (timeinfo != NULL) {
8614 strftime (timeBuf, sizeof(timeBuf),
8615 QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
8616 }
8617 String8 filePath(timeBuf);
8618 snprintf(buf,
8619 sizeof(buf),
8620 "%dm_%s_%d.bin",
8621 dumpFrameCount,
8622 type,
8623 frameNumber);
8624 filePath.append(buf);
8625 int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
8626 if (file_fd >= 0) {
8627 ssize_t written_len = 0;
8628 meta.tuning_data_version = TUNING_DATA_VERSION;
8629 void *data = (void *)((uint8_t *)&meta.tuning_data_version);
8630 written_len += write(file_fd, data, sizeof(uint32_t));
8631 data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
8632 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
8633 written_len += write(file_fd, data, sizeof(uint32_t));
8634 data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
8635 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
8636 written_len += write(file_fd, data, sizeof(uint32_t));
8637 data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
8638 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
8639 written_len += write(file_fd, data, sizeof(uint32_t));
8640 data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
8641 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
8642 written_len += write(file_fd, data, sizeof(uint32_t));
8643 meta.tuning_mod3_data_size = 0;
8644 data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
8645 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
8646 written_len += write(file_fd, data, sizeof(uint32_t));
8647 size_t total_size = meta.tuning_sensor_data_size;
8648 data = (void *)((uint8_t *)&meta.data);
8649 written_len += write(file_fd, data, total_size);
8650 total_size = meta.tuning_vfe_data_size;
8651 data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
8652 written_len += write(file_fd, data, total_size);
8653 total_size = meta.tuning_cpp_data_size;
8654 data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
8655 written_len += write(file_fd, data, total_size);
8656 total_size = meta.tuning_cac_data_size;
8657 data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
8658 written_len += write(file_fd, data, total_size);
8659 close(file_fd);
8660 }else {
8661 LOGE("fail to open file for metadata dumping");
8662 }
8663 }
8664}
8665
8666/*===========================================================================
8667 * FUNCTION : cleanAndSortStreamInfo
8668 *
8669 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
8670 * and sort them such that raw stream is at the end of the list
8671 * This is a workaround for camera daemon constraint.
8672 *
8673 * PARAMETERS : None
8674 *
8675 *==========================================================================*/
8676void QCamera3HardwareInterface::cleanAndSortStreamInfo()
8677{
8678 List<stream_info_t *> newStreamInfo;
8679
8680 /*clean up invalid streams*/
8681 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
8682 it != mStreamInfo.end();) {
8683 if(((*it)->status) == INVALID){
8684 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
8685 delete channel;
8686 free(*it);
8687 it = mStreamInfo.erase(it);
8688 } else {
8689 it++;
8690 }
8691 }
8692
8693 // Move preview/video/callback/snapshot streams into newList
8694 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8695 it != mStreamInfo.end();) {
8696 if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
8697 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
8698 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
8699 newStreamInfo.push_back(*it);
8700 it = mStreamInfo.erase(it);
8701 } else
8702 it++;
8703 }
8704 // Move raw streams into newList
8705 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8706 it != mStreamInfo.end();) {
8707 newStreamInfo.push_back(*it);
8708 it = mStreamInfo.erase(it);
8709 }
8710
8711 mStreamInfo = newStreamInfo;
Chien-Yu Chen3d836272017-09-20 11:10:21 -07008712
8713 // Make sure that stream IDs are unique.
8714 uint32_t id = 0;
8715 for (auto streamInfo : mStreamInfo) {
8716 streamInfo->id = id++;
8717 }
8718
Thierry Strudel3d639192016-09-09 11:52:26 -07008719}
8720
8721/*===========================================================================
8722 * FUNCTION : extractJpegMetadata
8723 *
8724 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
8725 * JPEG metadata is cached in HAL, and return as part of capture
8726 * result when metadata is returned from camera daemon.
8727 *
8728 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
8729 * @request: capture request
8730 *
8731 *==========================================================================*/
8732void QCamera3HardwareInterface::extractJpegMetadata(
8733 CameraMetadata& jpegMetadata,
8734 const camera3_capture_request_t *request)
8735{
8736 CameraMetadata frame_settings;
8737 frame_settings = request->settings;
8738
8739 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
8740 jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
8741 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
8742 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
8743
8744 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
8745 jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
8746 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
8747 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
8748
8749 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
8750 jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
8751 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
8752 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
8753
8754 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
8755 jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
8756 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
8757 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
8758
8759 if (frame_settings.exists(ANDROID_JPEG_QUALITY))
8760 jpegMetadata.update(ANDROID_JPEG_QUALITY,
8761 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
8762 frame_settings.find(ANDROID_JPEG_QUALITY).count);
8763
8764 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
8765 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
8766 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
8767 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
8768
8769 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
8770 int32_t thumbnail_size[2];
8771 thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
8772 thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
8773 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
8774 int32_t orientation =
8775 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008776 if ((!needJpegExifRotation()) && ((orientation == 90) || (orientation == 270))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07008777 //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
8778 int32_t temp;
8779 temp = thumbnail_size[0];
8780 thumbnail_size[0] = thumbnail_size[1];
8781 thumbnail_size[1] = temp;
8782 }
8783 }
8784 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
8785 thumbnail_size,
8786 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
8787 }
8788
8789}
8790
8791/*===========================================================================
8792 * FUNCTION : convertToRegions
8793 *
8794 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
8795 *
8796 * PARAMETERS :
8797 * @rect : cam_rect_t struct to convert
8798 * @region : int32_t destination array
8799 * @weight : if we are converting from cam_area_t, weight is valid
8800 * else weight = -1
8801 *
8802 *==========================================================================*/
8803void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
8804 int32_t *region, int weight)
8805{
Jason Lee8ce36fa2017-04-19 19:40:37 -07008806 region[FACE_LEFT] = rect.left;
8807 region[FACE_TOP] = rect.top;
8808 region[FACE_RIGHT] = rect.left + rect.width;
8809 region[FACE_BOTTOM] = rect.top + rect.height;
Thierry Strudel3d639192016-09-09 11:52:26 -07008810 if (weight > -1) {
Jason Lee8ce36fa2017-04-19 19:40:37 -07008811 region[FACE_WEIGHT] = weight;
Thierry Strudel3d639192016-09-09 11:52:26 -07008812 }
8813}
8814
8815/*===========================================================================
8816 * FUNCTION : convertFromRegions
8817 *
8818 * DESCRIPTION: helper method to convert from array to cam_rect_t
8819 *
8820 * PARAMETERS :
8821 * @rect : cam_rect_t struct to convert
8822 * @region : int32_t destination array
8823 * @weight : if we are converting from cam_area_t, weight is valid
8824 * else weight = -1
8825 *
8826 *==========================================================================*/
8827void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
Chien-Yu Chen92724a82017-01-06 11:50:30 -08008828 const CameraMetadata &frame_settings, uint32_t tag)
Thierry Strudel3d639192016-09-09 11:52:26 -07008829{
Thierry Strudel3d639192016-09-09 11:52:26 -07008830 int32_t x_min = frame_settings.find(tag).data.i32[0];
8831 int32_t y_min = frame_settings.find(tag).data.i32[1];
8832 int32_t x_max = frame_settings.find(tag).data.i32[2];
8833 int32_t y_max = frame_settings.find(tag).data.i32[3];
8834 roi.weight = frame_settings.find(tag).data.i32[4];
8835 roi.rect.left = x_min;
8836 roi.rect.top = y_min;
8837 roi.rect.width = x_max - x_min;
8838 roi.rect.height = y_max - y_min;
8839}
8840
8841/*===========================================================================
8842 * FUNCTION : resetIfNeededROI
8843 *
8844 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
8845 * crop region
8846 *
8847 * PARAMETERS :
8848 * @roi : cam_area_t struct to resize
8849 * @scalerCropRegion : cam_crop_region_t region to compare against
8850 *
8851 *
8852 *==========================================================================*/
8853bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
8854 const cam_crop_region_t* scalerCropRegion)
8855{
8856 int32_t roi_x_max = roi->rect.width + roi->rect.left;
8857 int32_t roi_y_max = roi->rect.height + roi->rect.top;
8858 int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
8859 int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
8860
8861 /* According to spec weight = 0 is used to indicate roi needs to be disabled
8862 * without having this check the calculations below to validate if the roi
8863 * is inside scalar crop region will fail resulting in the roi not being
8864 * reset causing algorithm to continue to use stale roi window
8865 */
8866 if (roi->weight == 0) {
8867 return true;
8868 }
8869
8870 if ((roi_x_max < scalerCropRegion->left) ||
8871 // right edge of roi window is left of scalar crop's left edge
8872 (roi_y_max < scalerCropRegion->top) ||
8873 // bottom edge of roi window is above scalar crop's top edge
8874 (roi->rect.left > crop_x_max) ||
8875 // left edge of roi window is beyond(right) of scalar crop's right edge
8876 (roi->rect.top > crop_y_max)){
8877 // top edge of roi windo is above scalar crop's top edge
8878 return false;
8879 }
8880 if (roi->rect.left < scalerCropRegion->left) {
8881 roi->rect.left = scalerCropRegion->left;
8882 }
8883 if (roi->rect.top < scalerCropRegion->top) {
8884 roi->rect.top = scalerCropRegion->top;
8885 }
8886 if (roi_x_max > crop_x_max) {
8887 roi_x_max = crop_x_max;
8888 }
8889 if (roi_y_max > crop_y_max) {
8890 roi_y_max = crop_y_max;
8891 }
8892 roi->rect.width = roi_x_max - roi->rect.left;
8893 roi->rect.height = roi_y_max - roi->rect.top;
8894 return true;
8895}
8896
8897/*===========================================================================
8898 * FUNCTION : convertLandmarks
8899 *
8900 * DESCRIPTION: helper method to extract the landmarks from face detection info
8901 *
8902 * PARAMETERS :
8903 * @landmark_data : input landmark data to be converted
8904 * @landmarks : int32_t destination array
8905 *
8906 *
8907 *==========================================================================*/
8908void QCamera3HardwareInterface::convertLandmarks(
8909 cam_face_landmarks_info_t landmark_data,
8910 int32_t *landmarks)
8911{
Thierry Strudel04e026f2016-10-10 11:27:36 -07008912 if (landmark_data.is_left_eye_valid) {
8913 landmarks[LEFT_EYE_X] = (int32_t)landmark_data.left_eye_center.x;
8914 landmarks[LEFT_EYE_Y] = (int32_t)landmark_data.left_eye_center.y;
8915 } else {
8916 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8917 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8918 }
8919
8920 if (landmark_data.is_right_eye_valid) {
8921 landmarks[RIGHT_EYE_X] = (int32_t)landmark_data.right_eye_center.x;
8922 landmarks[RIGHT_EYE_Y] = (int32_t)landmark_data.right_eye_center.y;
8923 } else {
8924 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8925 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8926 }
8927
8928 if (landmark_data.is_mouth_valid) {
8929 landmarks[MOUTH_X] = (int32_t)landmark_data.mouth_center.x;
8930 landmarks[MOUTH_Y] = (int32_t)landmark_data.mouth_center.y;
8931 } else {
8932 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8933 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
8934 }
8935}
8936
8937/*===========================================================================
8938 * FUNCTION : setInvalidLandmarks
8939 *
8940 * DESCRIPTION: helper method to set invalid landmarks
8941 *
8942 * PARAMETERS :
8943 * @landmarks : int32_t destination array
8944 *
8945 *
8946 *==========================================================================*/
8947void QCamera3HardwareInterface::setInvalidLandmarks(
8948 int32_t *landmarks)
8949{
8950 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8951 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8952 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8953 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8954 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8955 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
Thierry Strudel3d639192016-09-09 11:52:26 -07008956}
8957
8958#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008959
8960/*===========================================================================
8961 * FUNCTION : getCapabilities
8962 *
8963 * DESCRIPTION: query camera capability from back-end
8964 *
8965 * PARAMETERS :
8966 * @ops : mm-interface ops structure
8967 * @cam_handle : camera handle for which we need capability
8968 *
8969 * RETURN : ptr type of capability structure
8970 * capability for success
8971 * NULL for failure
8972 *==========================================================================*/
8973cam_capability_t *QCamera3HardwareInterface::getCapabilities(mm_camera_ops_t *ops,
8974 uint32_t cam_handle)
8975{
8976 int rc = NO_ERROR;
8977 QCamera3HeapMemory *capabilityHeap = NULL;
8978 cam_capability_t *cap_ptr = NULL;
8979
8980 if (ops == NULL) {
8981 LOGE("Invalid arguments");
8982 return NULL;
8983 }
8984
8985 capabilityHeap = new QCamera3HeapMemory(1);
8986 if (capabilityHeap == NULL) {
8987 LOGE("creation of capabilityHeap failed");
8988 return NULL;
8989 }
8990
8991 /* Allocate memory for capability buffer */
8992 rc = capabilityHeap->allocate(sizeof(cam_capability_t));
8993 if(rc != OK) {
8994 LOGE("No memory for cappability");
8995 goto allocate_failed;
8996 }
8997
8998 /* Map memory for capability buffer */
8999 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
9000
9001 rc = ops->map_buf(cam_handle,
9002 CAM_MAPPING_BUF_TYPE_CAPABILITY, capabilityHeap->getFd(0),
9003 sizeof(cam_capability_t), capabilityHeap->getPtr(0));
9004 if(rc < 0) {
9005 LOGE("failed to map capability buffer");
9006 rc = FAILED_TRANSACTION;
9007 goto map_failed;
9008 }
9009
9010 /* Query Capability */
9011 rc = ops->query_capability(cam_handle);
9012 if(rc < 0) {
9013 LOGE("failed to query capability");
9014 rc = FAILED_TRANSACTION;
9015 goto query_failed;
9016 }
9017
9018 cap_ptr = (cam_capability_t *)malloc(sizeof(cam_capability_t));
9019 if (cap_ptr == NULL) {
9020 LOGE("out of memory");
9021 rc = NO_MEMORY;
9022 goto query_failed;
9023 }
9024
9025 memset(cap_ptr, 0, sizeof(cam_capability_t));
9026 memcpy(cap_ptr, DATA_PTR(capabilityHeap, 0), sizeof(cam_capability_t));
9027
9028 int index;
9029 for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
9030 cam_analysis_info_t *p_analysis_info = &cap_ptr->analysis_info[index];
9031 p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
9032 p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
9033 }
9034
9035query_failed:
9036 ops->unmap_buf(cam_handle, CAM_MAPPING_BUF_TYPE_CAPABILITY);
9037map_failed:
9038 capabilityHeap->deallocate();
9039allocate_failed:
9040 delete capabilityHeap;
9041
9042 if (rc != NO_ERROR) {
9043 return NULL;
9044 } else {
9045 return cap_ptr;
9046 }
9047}
9048
Thierry Strudel3d639192016-09-09 11:52:26 -07009049/*===========================================================================
9050 * FUNCTION : initCapabilities
9051 *
9052 * DESCRIPTION: initialize camera capabilities in static data struct
9053 *
9054 * PARAMETERS :
9055 * @cameraId : camera Id
9056 *
9057 * RETURN : int32_t type of status
9058 * NO_ERROR -- success
9059 * none-zero failure code
9060 *==========================================================================*/
9061int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
9062{
9063 int rc = 0;
9064 mm_camera_vtbl_t *cameraHandle = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07009065 uint32_t handle = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07009066
9067 rc = camera_open((uint8_t)cameraId, &cameraHandle);
9068 if (rc) {
9069 LOGE("camera_open failed. rc = %d", rc);
9070 goto open_failed;
9071 }
9072 if (!cameraHandle) {
9073 LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
9074 goto open_failed;
9075 }
9076
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07009077 handle = get_main_camera_handle(cameraHandle->camera_handle);
9078 gCamCapability[cameraId] = getCapabilities(cameraHandle->ops, handle);
9079 if (gCamCapability[cameraId] == NULL) {
9080 rc = FAILED_TRANSACTION;
9081 goto failed_op;
Thierry Strudel3d639192016-09-09 11:52:26 -07009082 }
9083
Thierry Strudel295a0ca2016-11-03 18:38:47 -07009084 gCamCapability[cameraId]->camera_index = cameraId;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07009085 if (is_dual_camera_by_idx(cameraId)) {
9086 handle = get_aux_camera_handle(cameraHandle->camera_handle);
9087 gCamCapability[cameraId]->aux_cam_cap =
9088 getCapabilities(cameraHandle->ops, handle);
9089 if (gCamCapability[cameraId]->aux_cam_cap == NULL) {
9090 rc = FAILED_TRANSACTION;
9091 free(gCamCapability[cameraId]);
9092 goto failed_op;
9093 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08009094
9095 // Copy the main camera capability to main_cam_cap struct
9096 gCamCapability[cameraId]->main_cam_cap =
9097 (cam_capability_t *)malloc(sizeof(cam_capability_t));
9098 if (gCamCapability[cameraId]->main_cam_cap == NULL) {
9099 LOGE("out of memory");
9100 rc = NO_MEMORY;
9101 goto failed_op;
9102 }
9103 memcpy(gCamCapability[cameraId]->main_cam_cap, gCamCapability[cameraId],
9104 sizeof(cam_capability_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07009105 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07009106failed_op:
Thierry Strudel3d639192016-09-09 11:52:26 -07009107 cameraHandle->ops->close_camera(cameraHandle->camera_handle);
9108 cameraHandle = NULL;
9109open_failed:
9110 return rc;
9111}
9112
9113/*==========================================================================
9114 * FUNCTION : get3Aversion
9115 *
9116 * DESCRIPTION: get the Q3A S/W version
9117 *
9118 * PARAMETERS :
9119 * @sw_version: Reference of Q3A structure which will hold version info upon
9120 * return
9121 *
9122 * RETURN : None
9123 *
9124 *==========================================================================*/
9125void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
9126{
9127 if(gCamCapability[mCameraId])
9128 sw_version = gCamCapability[mCameraId]->q3a_version;
9129 else
9130 LOGE("Capability structure NULL!");
9131}
9132
9133
9134/*===========================================================================
9135 * FUNCTION : initParameters
9136 *
9137 * DESCRIPTION: initialize camera parameters
9138 *
9139 * PARAMETERS :
9140 *
9141 * RETURN : int32_t type of status
9142 * NO_ERROR -- success
9143 * none-zero failure code
9144 *==========================================================================*/
9145int QCamera3HardwareInterface::initParameters()
9146{
9147 int rc = 0;
9148
9149 //Allocate Set Param Buffer
9150 mParamHeap = new QCamera3HeapMemory(1);
9151 rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
9152 if(rc != OK) {
9153 rc = NO_MEMORY;
9154 LOGE("Failed to allocate SETPARM Heap memory");
9155 delete mParamHeap;
9156 mParamHeap = NULL;
9157 return rc;
9158 }
9159
9160 //Map memory for parameters buffer
9161 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
9162 CAM_MAPPING_BUF_TYPE_PARM_BUF,
9163 mParamHeap->getFd(0),
9164 sizeof(metadata_buffer_t),
9165 (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
9166 if(rc < 0) {
9167 LOGE("failed to map SETPARM buffer");
9168 rc = FAILED_TRANSACTION;
9169 mParamHeap->deallocate();
9170 delete mParamHeap;
9171 mParamHeap = NULL;
9172 return rc;
9173 }
9174
9175 mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
9176
9177 mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
9178 return rc;
9179}
9180
9181/*===========================================================================
9182 * FUNCTION : deinitParameters
9183 *
9184 * DESCRIPTION: de-initialize camera parameters
9185 *
9186 * PARAMETERS :
9187 *
9188 * RETURN : NONE
9189 *==========================================================================*/
9190void QCamera3HardwareInterface::deinitParameters()
9191{
9192 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
9193 CAM_MAPPING_BUF_TYPE_PARM_BUF);
9194
9195 mParamHeap->deallocate();
9196 delete mParamHeap;
9197 mParamHeap = NULL;
9198
9199 mParameters = NULL;
9200
9201 free(mPrevParameters);
9202 mPrevParameters = NULL;
9203}
9204
9205/*===========================================================================
9206 * FUNCTION : calcMaxJpegSize
9207 *
9208 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
9209 *
9210 * PARAMETERS :
9211 *
9212 * RETURN : max_jpeg_size
9213 *==========================================================================*/
9214size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
9215{
9216 size_t max_jpeg_size = 0;
9217 size_t temp_width, temp_height;
9218 size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
9219 MAX_SIZES_CNT);
9220 for (size_t i = 0; i < count; i++) {
9221 temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
9222 temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
9223 if (temp_width * temp_height > max_jpeg_size ) {
9224 max_jpeg_size = temp_width * temp_height;
9225 }
9226 }
9227 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
9228 return max_jpeg_size;
9229}
9230
9231/*===========================================================================
9232 * FUNCTION : getMaxRawSize
9233 *
9234 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
9235 *
9236 * PARAMETERS :
9237 *
9238 * RETURN : Largest supported Raw Dimension
9239 *==========================================================================*/
9240cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
9241{
9242 int max_width = 0;
9243 cam_dimension_t maxRawSize;
9244
9245 memset(&maxRawSize, 0, sizeof(cam_dimension_t));
9246 for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
9247 if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
9248 max_width = gCamCapability[camera_id]->raw_dim[i].width;
9249 maxRawSize = gCamCapability[camera_id]->raw_dim[i];
9250 }
9251 }
9252 return maxRawSize;
9253}
9254
9255
9256/*===========================================================================
9257 * FUNCTION : calcMaxJpegDim
9258 *
9259 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
9260 *
9261 * PARAMETERS :
9262 *
9263 * RETURN : max_jpeg_dim
9264 *==========================================================================*/
9265cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
9266{
9267 cam_dimension_t max_jpeg_dim;
9268 cam_dimension_t curr_jpeg_dim;
9269 max_jpeg_dim.width = 0;
9270 max_jpeg_dim.height = 0;
9271 curr_jpeg_dim.width = 0;
9272 curr_jpeg_dim.height = 0;
9273 for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
9274 curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
9275 curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
9276 if (curr_jpeg_dim.width * curr_jpeg_dim.height >
9277 max_jpeg_dim.width * max_jpeg_dim.height ) {
9278 max_jpeg_dim.width = curr_jpeg_dim.width;
9279 max_jpeg_dim.height = curr_jpeg_dim.height;
9280 }
9281 }
9282 return max_jpeg_dim;
9283}
9284
9285/*===========================================================================
9286 * FUNCTION : addStreamConfig
9287 *
9288 * DESCRIPTION: adds the stream configuration to the array
9289 *
9290 * PARAMETERS :
9291 * @available_stream_configs : pointer to stream configuration array
9292 * @scalar_format : scalar format
9293 * @dim : configuration dimension
9294 * @config_type : input or output configuration type
9295 *
9296 * RETURN : NONE
9297 *==========================================================================*/
9298void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
9299 int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
9300{
9301 available_stream_configs.add(scalar_format);
9302 available_stream_configs.add(dim.width);
9303 available_stream_configs.add(dim.height);
9304 available_stream_configs.add(config_type);
9305}
9306
9307/*===========================================================================
9308 * FUNCTION : suppportBurstCapture
9309 *
9310 * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
9311 *
9312 * PARAMETERS :
9313 * @cameraId : camera Id
9314 *
9315 * RETURN : true if camera supports BURST_CAPTURE
9316 * false otherwise
9317 *==========================================================================*/
9318bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
9319{
9320 const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
9321 const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
9322 const int32_t highResWidth = 3264;
9323 const int32_t highResHeight = 2448;
9324
9325 if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
9326 // Maximum resolution images cannot be captured at >= 10fps
9327 // -> not supporting BURST_CAPTURE
9328 return false;
9329 }
9330
9331 if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
9332 // Maximum resolution images can be captured at >= 20fps
9333 // --> supporting BURST_CAPTURE
9334 return true;
9335 }
9336
9337 // Find the smallest highRes resolution, or largest resolution if there is none
9338 size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
9339 MAX_SIZES_CNT);
9340 size_t highRes = 0;
9341 while ((highRes + 1 < totalCnt) &&
9342 (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
9343 gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
9344 highResWidth * highResHeight)) {
9345 highRes++;
9346 }
9347 if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
9348 return true;
9349 } else {
9350 return false;
9351 }
9352}
9353
9354/*===========================================================================
Emilian Peev0f3c3162017-03-15 12:57:46 +00009355 * FUNCTION : getPDStatIndex
9356 *
9357 * DESCRIPTION: Return the meta raw phase detection statistics index if present
9358 *
9359 * PARAMETERS :
9360 * @caps : camera capabilities
9361 *
9362 * RETURN : int32_t type
9363 * non-negative - on success
9364 * -1 - on failure
9365 *==========================================================================*/
9366int32_t QCamera3HardwareInterface::getPDStatIndex(cam_capability_t *caps) {
9367 if (nullptr == caps) {
9368 return -1;
9369 }
9370
9371 uint32_t metaRawCount = caps->meta_raw_channel_count;
9372 int32_t ret = -1;
9373 for (size_t i = 0; i < metaRawCount; i++) {
9374 if (CAM_FORMAT_SUBTYPE_PDAF_STATS == caps->sub_fmt[i]) {
9375 ret = i;
9376 break;
9377 }
9378 }
9379
9380 return ret;
9381}
9382
9383/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07009384 * FUNCTION : initStaticMetadata
9385 *
9386 * DESCRIPTION: initialize the static metadata
9387 *
9388 * PARAMETERS :
9389 * @cameraId : camera Id
9390 *
9391 * RETURN : int32_t type of status
9392 * 0 -- success
9393 * non-zero failure code
9394 *==========================================================================*/
9395int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
9396{
9397 int rc = 0;
9398 CameraMetadata staticInfo;
9399 size_t count = 0;
9400 bool limitedDevice = false;
9401 char prop[PROPERTY_VALUE_MAX];
9402 bool supportBurst = false;
9403
9404 supportBurst = supportBurstCapture(cameraId);
9405
9406 /* If sensor is YUV sensor (no raw support) or if per-frame control is not
9407 * guaranteed or if min fps of max resolution is less than 20 fps, its
9408 * advertised as limited device*/
9409 limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
9410 (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
9411 (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
9412 !supportBurst;
9413
9414 uint8_t supportedHwLvl = limitedDevice ?
9415 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009416#ifndef USE_HAL_3_3
9417 // LEVEL_3 - This device will support level 3.
9418 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
9419#else
Thierry Strudel3d639192016-09-09 11:52:26 -07009420 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009421#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009422
9423 staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
9424 &supportedHwLvl, 1);
9425
9426 bool facingBack = false;
9427 if ((gCamCapability[cameraId]->position == CAM_POSITION_BACK) ||
9428 (gCamCapability[cameraId]->position == CAM_POSITION_BACK_AUX)) {
9429 facingBack = true;
9430 }
9431 /*HAL 3 only*/
9432 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
9433 &gCamCapability[cameraId]->min_focus_distance, 1);
9434
9435 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
9436 &gCamCapability[cameraId]->hyper_focal_distance, 1);
9437
9438 /*should be using focal lengths but sensor doesn't provide that info now*/
9439 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
9440 &gCamCapability[cameraId]->focal_length,
9441 1);
9442
9443 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
9444 gCamCapability[cameraId]->apertures,
9445 MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
9446
9447 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
9448 gCamCapability[cameraId]->filter_densities,
9449 MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
9450
9451
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009452 uint8_t available_opt_stab_modes[CAM_OPT_STAB_MAX];
9453 size_t mode_count =
9454 MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count);
9455 for (size_t i = 0; i < mode_count; i++) {
9456 available_opt_stab_modes[i] = gCamCapability[cameraId]->optical_stab_modes[i];
9457 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009458 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009459 available_opt_stab_modes, mode_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009460
9461 int32_t lens_shading_map_size[] = {
9462 MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
9463 MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
9464 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
9465 lens_shading_map_size,
9466 sizeof(lens_shading_map_size)/sizeof(int32_t));
9467
9468 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
9469 gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
9470
9471 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
9472 gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
9473
9474 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
9475 &gCamCapability[cameraId]->max_frame_duration, 1);
9476
9477 camera_metadata_rational baseGainFactor = {
9478 gCamCapability[cameraId]->base_gain_factor.numerator,
9479 gCamCapability[cameraId]->base_gain_factor.denominator};
9480 staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
9481 &baseGainFactor, 1);
9482
9483 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
9484 (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
9485
9486 int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
9487 gCamCapability[cameraId]->pixel_array_size.height};
9488 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
9489 pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
9490
9491 int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
9492 gCamCapability[cameraId]->active_array_size.top,
9493 gCamCapability[cameraId]->active_array_size.width,
9494 gCamCapability[cameraId]->active_array_size.height};
9495 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
9496 active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
9497
9498 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
9499 &gCamCapability[cameraId]->white_level, 1);
9500
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009501 int32_t adjusted_bl_per_cfa[BLACK_LEVEL_PATTERN_CNT];
9502 adjustBlackLevelForCFA(gCamCapability[cameraId]->black_level_pattern, adjusted_bl_per_cfa,
9503 gCamCapability[cameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07009504 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009505 adjusted_bl_per_cfa, BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel3d639192016-09-09 11:52:26 -07009506
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009507#ifndef USE_HAL_3_3
9508 bool hasBlackRegions = false;
9509 if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
9510 LOGW("black_region_count: %d is bounded to %d",
9511 gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
9512 gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
9513 }
9514 if (gCamCapability[cameraId]->optical_black_region_count != 0) {
9515 int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
9516 for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
9517 opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
9518 }
9519 staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
9520 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
9521 hasBlackRegions = true;
9522 }
9523#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009524 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
9525 &gCamCapability[cameraId]->flash_charge_duration, 1);
9526
9527 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
9528 &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
9529
Shuzhen Wang98d5efb2016-09-07 18:08:22 -07009530 uint8_t timestampSource = (gCamCapability[cameraId]->timestamp_calibrated ?
9531 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME :
9532 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN);
Thierry Strudel3d639192016-09-09 11:52:26 -07009533 staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
9534 &timestampSource, 1);
9535
Thierry Strudel54dc9782017-02-15 12:12:10 -08009536 //update histogram vendor data
9537 staticInfo.update(QCAMERA3_HISTOGRAM_BUCKETS,
Thierry Strudel3d639192016-09-09 11:52:26 -07009538 &gCamCapability[cameraId]->histogram_size, 1);
9539
Thierry Strudel54dc9782017-02-15 12:12:10 -08009540 staticInfo.update(QCAMERA3_HISTOGRAM_MAX_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -07009541 &gCamCapability[cameraId]->max_histogram_count, 1);
9542
Shuzhen Wang14415f52016-11-16 18:26:18 -08009543 //Set supported bins to be {max_bins, max_bins/2, max_bins/4, ...}
9544 //so that app can request fewer number of bins than the maximum supported.
9545 std::vector<int32_t> histBins;
9546 int32_t maxHistBins = gCamCapability[cameraId]->max_histogram_count;
9547 histBins.push_back(maxHistBins);
9548 while ((maxHistBins >> 1) >= MIN_CAM_HISTOGRAM_STATS_SIZE &&
9549 (maxHistBins & 0x1) == 0) {
9550 histBins.push_back(maxHistBins >> 1);
9551 maxHistBins >>= 1;
9552 }
9553 staticInfo.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_SUPPORTED_BINS,
9554 histBins.data(), histBins.size());
9555
Thierry Strudel3d639192016-09-09 11:52:26 -07009556 int32_t sharpness_map_size[] = {
9557 gCamCapability[cameraId]->sharpness_map_size.width,
9558 gCamCapability[cameraId]->sharpness_map_size.height};
9559
9560 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
9561 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
9562
9563 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
9564 &gCamCapability[cameraId]->max_sharpness_map_value, 1);
9565
Emilian Peev0f3c3162017-03-15 12:57:46 +00009566 int32_t indexPD = getPDStatIndex(gCamCapability[cameraId]);
9567 if (0 <= indexPD) {
9568 // Advertise PD stats data as part of the Depth capabilities
9569 int32_t depthWidth =
9570 gCamCapability[cameraId]->raw_meta_dim[indexPD].width;
9571 int32_t depthHeight =
9572 gCamCapability[cameraId]->raw_meta_dim[indexPD].height;
Emilian Peev656e4fa2017-06-02 16:47:04 +01009573 int32_t depthStride =
9574 gCamCapability[cameraId]->raw_meta_dim[indexPD].width * 2;
Emilian Peev0f3c3162017-03-15 12:57:46 +00009575 int32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
9576 assert(0 < depthSamplesCount);
9577 staticInfo.update(ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
9578 &depthSamplesCount, 1);
9579
9580 int32_t depthConfigs[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9581 depthHeight,
9582 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
9583 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1,
9584 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT};
9585 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
9586 depthConfigs, sizeof(depthConfigs)/sizeof(depthConfigs[0]));
9587
9588 int64_t depthMinDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9589 depthHeight, 33333333,
9590 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 33333333};
9591 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
9592 depthMinDuration,
9593 sizeof(depthMinDuration) / sizeof(depthMinDuration[0]));
9594
9595 int64_t depthStallDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9596 depthHeight, 0,
9597 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 0};
9598 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
9599 depthStallDuration,
9600 sizeof(depthStallDuration) / sizeof(depthStallDuration[0]));
9601
9602 uint8_t depthExclusive = ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_FALSE;
9603 staticInfo.update(ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE, &depthExclusive, 1);
Emilian Peev656e4fa2017-06-02 16:47:04 +01009604
9605 int32_t pd_dimensions [] = {depthWidth, depthHeight, depthStride};
9606 staticInfo.update(NEXUS_EXPERIMENTAL_2017_PD_DATA_DIMENSIONS,
9607 pd_dimensions, sizeof(pd_dimensions) / sizeof(pd_dimensions[0]));
Emilian Peev835938b2017-08-31 16:59:54 +01009608
9609 staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_PDAF_CALIB_RIGHT_GAINS,
9610 reinterpret_cast<uint8_t *>(gCamCapability[cameraId]->pdaf_cal.right_gain_map),
9611 sizeof(gCamCapability[cameraId]->pdaf_cal.right_gain_map));
9612
9613 staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_PDAF_CALIB_LEFT_GAINS,
9614 reinterpret_cast<uint8_t *>(gCamCapability[cameraId]->pdaf_cal.left_gain_map),
9615 sizeof(gCamCapability[cameraId]->pdaf_cal.left_gain_map));
9616
9617 staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_PDAF_CALIB_CONV_COEFF,
9618 reinterpret_cast<uint8_t *>(gCamCapability[cameraId]->pdaf_cal.conversion_coeff),
9619 sizeof(gCamCapability[cameraId]->pdaf_cal.conversion_coeff));
Emilian Peev0f3c3162017-03-15 12:57:46 +00009620 }
9621
Thierry Strudel3d639192016-09-09 11:52:26 -07009622 int32_t scalar_formats[] = {
9623 ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
9624 ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
9625 ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
9626 ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
9627 HAL_PIXEL_FORMAT_RAW10,
9628 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
Emilian Peev0f3c3162017-03-15 12:57:46 +00009629 size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(scalar_formats[0]);
9630 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS, scalar_formats,
9631 scalar_formats_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009632
9633 int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
9634 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9635 makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
9636 count, MAX_SIZES_CNT, available_processed_sizes);
9637 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
9638 available_processed_sizes, count * 2);
9639
9640 int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
9641 count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
9642 makeTable(gCamCapability[cameraId]->raw_dim,
9643 count, MAX_SIZES_CNT, available_raw_sizes);
9644 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
9645 available_raw_sizes, count * 2);
9646
9647 int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
9648 count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
9649 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
9650 count, MAX_SIZES_CNT, available_fps_ranges);
9651 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
9652 available_fps_ranges, count * 2);
9653
9654 camera_metadata_rational exposureCompensationStep = {
9655 gCamCapability[cameraId]->exp_compensation_step.numerator,
9656 gCamCapability[cameraId]->exp_compensation_step.denominator};
9657 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
9658 &exposureCompensationStep, 1);
9659
9660 Vector<uint8_t> availableVstabModes;
9661 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
9662 char eis_prop[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009663 bool eisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07009664 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009665 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07009666 uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009667 count = IS_TYPE_MAX;
9668 count = MIN(gCamCapability[cameraId]->supported_is_types_cnt, count);
9669 for (size_t i = 0; i < count; i++) {
9670 if ((gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
9671 (gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
9672 eisSupported = true;
9673 break;
9674 }
9675 }
9676 if (facingBack && eis_prop_set && eisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07009677 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
9678 }
9679 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
9680 availableVstabModes.array(), availableVstabModes.size());
9681
9682 /*HAL 1 and HAL 3 common*/
9683 uint32_t zoomSteps = gCamCapability[cameraId]->zoom_ratio_tbl_cnt;
9684 uint32_t maxZoomStep = gCamCapability[cameraId]->zoom_ratio_tbl[zoomSteps - 1];
9685 uint32_t minZoomStep = 100; //as per HAL1/API1 spec
Zhijun He2a5df222017-04-04 18:20:38 -07009686 // Cap the max zoom to the max preferred value
9687 float maxZoom = MIN(maxZoomStep/minZoomStep, MAX_PREFERRED_ZOOM_RATIO);
Thierry Strudel3d639192016-09-09 11:52:26 -07009688 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
9689 &maxZoom, 1);
9690
9691 uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
9692 staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
9693
9694 int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
9695 if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
9696 max3aRegions[2] = 0; /* AF not supported */
9697 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
9698 max3aRegions, 3);
9699
9700 /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
9701 memset(prop, 0, sizeof(prop));
9702 property_get("persist.camera.facedetect", prop, "1");
9703 uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
9704 LOGD("Support face detection mode: %d",
9705 supportedFaceDetectMode);
9706
9707 int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
Thierry Strudel04e026f2016-10-10 11:27:36 -07009708 /* support mode should be OFF if max number of face is 0 */
9709 if (maxFaces <= 0) {
9710 supportedFaceDetectMode = 0;
9711 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009712 Vector<uint8_t> availableFaceDetectModes;
9713 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
9714 if (supportedFaceDetectMode == 1) {
9715 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9716 } else if (supportedFaceDetectMode == 2) {
9717 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9718 } else if (supportedFaceDetectMode == 3) {
9719 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9720 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9721 } else {
9722 maxFaces = 0;
9723 }
9724 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
9725 availableFaceDetectModes.array(),
9726 availableFaceDetectModes.size());
9727 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
9728 (int32_t *)&maxFaces, 1);
Thierry Strudel54dc9782017-02-15 12:12:10 -08009729 uint8_t face_bsgc = gCamCapability[cameraId]->face_bsgc;
9730 staticInfo.update(QCAMERA3_STATS_BSGC_AVAILABLE,
9731 &face_bsgc, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07009732
9733 int32_t exposureCompensationRange[] = {
9734 gCamCapability[cameraId]->exposure_compensation_min,
9735 gCamCapability[cameraId]->exposure_compensation_max};
9736 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
9737 exposureCompensationRange,
9738 sizeof(exposureCompensationRange)/sizeof(int32_t));
9739
9740 uint8_t lensFacing = (facingBack) ?
9741 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
9742 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
9743
9744 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
9745 available_thumbnail_sizes,
9746 sizeof(available_thumbnail_sizes)/sizeof(int32_t));
9747
9748 /*all sizes will be clubbed into this tag*/
9749 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9750 /*android.scaler.availableStreamConfigurations*/
9751 Vector<int32_t> available_stream_configs;
9752 cam_dimension_t active_array_dim;
9753 active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
9754 active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
Thierry Strudel2896d122017-02-23 19:18:03 -08009755
9756 /*advertise list of input dimensions supported based on below property.
9757 By default all sizes upto 5MP will be advertised.
9758 Note that the setprop resolution format should be WxH.
9759 e.g: adb shell setprop persist.camera.input.minsize 1280x720
9760 To list all supported sizes, setprop needs to be set with "0x0" */
9761 cam_dimension_t minInputSize = {2592,1944}; //5MP
9762 memset(prop, 0, sizeof(prop));
9763 property_get("persist.camera.input.minsize", prop, "2592x1944");
9764 if (strlen(prop) > 0) {
9765 char *saveptr = NULL;
9766 char *token = strtok_r(prop, "x", &saveptr);
9767 if (token != NULL) {
9768 minInputSize.width = atoi(token);
9769 }
9770 token = strtok_r(NULL, "x", &saveptr);
9771 if (token != NULL) {
9772 minInputSize.height = atoi(token);
9773 }
9774 }
9775
Thierry Strudel3d639192016-09-09 11:52:26 -07009776 /* Add input/output stream configurations for each scalar formats*/
9777 for (size_t j = 0; j < scalar_formats_count; j++) {
9778 switch (scalar_formats[j]) {
9779 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9780 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9781 case HAL_PIXEL_FORMAT_RAW10:
9782 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9783 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9784 addStreamConfig(available_stream_configs, scalar_formats[j],
9785 gCamCapability[cameraId]->raw_dim[i],
9786 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9787 }
9788 break;
9789 case HAL_PIXEL_FORMAT_BLOB:
9790 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9791 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9792 addStreamConfig(available_stream_configs, scalar_formats[j],
9793 gCamCapability[cameraId]->picture_sizes_tbl[i],
9794 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9795 }
9796 break;
9797 case HAL_PIXEL_FORMAT_YCbCr_420_888:
9798 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
9799 default:
9800 cam_dimension_t largest_picture_size;
9801 memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
9802 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9803 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9804 addStreamConfig(available_stream_configs, scalar_formats[j],
9805 gCamCapability[cameraId]->picture_sizes_tbl[i],
9806 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
Thierry Strudel2896d122017-02-23 19:18:03 -08009807 /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
Zhijun Hee0cc0ae2017-05-19 22:19:27 -07009808 if ((scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
9809 scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) && i == 0) {
Thierry Strudel2896d122017-02-23 19:18:03 -08009810 if ((gCamCapability[cameraId]->picture_sizes_tbl[i].width
9811 >= minInputSize.width) || (gCamCapability[cameraId]->
9812 picture_sizes_tbl[i].height >= minInputSize.height)) {
9813 addStreamConfig(available_stream_configs, scalar_formats[j],
9814 gCamCapability[cameraId]->picture_sizes_tbl[i],
9815 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
9816 }
9817 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009818 }
Thierry Strudel2896d122017-02-23 19:18:03 -08009819
Thierry Strudel3d639192016-09-09 11:52:26 -07009820 break;
9821 }
9822 }
9823
9824 staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
9825 available_stream_configs.array(), available_stream_configs.size());
9826 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
9827 staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
9828
9829 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
9830 staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
9831
9832 /* android.scaler.availableMinFrameDurations */
9833 Vector<int64_t> available_min_durations;
9834 for (size_t j = 0; j < scalar_formats_count; j++) {
9835 switch (scalar_formats[j]) {
9836 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9837 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9838 case HAL_PIXEL_FORMAT_RAW10:
9839 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9840 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9841 available_min_durations.add(scalar_formats[j]);
9842 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
9843 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
9844 available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
9845 }
9846 break;
9847 default:
9848 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9849 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9850 available_min_durations.add(scalar_formats[j]);
9851 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
9852 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
9853 available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
9854 }
9855 break;
9856 }
9857 }
9858 staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
9859 available_min_durations.array(), available_min_durations.size());
9860
9861 Vector<int32_t> available_hfr_configs;
9862 for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
9863 int32_t fps = 0;
9864 switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
9865 case CAM_HFR_MODE_60FPS:
9866 fps = 60;
9867 break;
9868 case CAM_HFR_MODE_90FPS:
9869 fps = 90;
9870 break;
9871 case CAM_HFR_MODE_120FPS:
9872 fps = 120;
9873 break;
9874 case CAM_HFR_MODE_150FPS:
9875 fps = 150;
9876 break;
9877 case CAM_HFR_MODE_180FPS:
9878 fps = 180;
9879 break;
9880 case CAM_HFR_MODE_210FPS:
9881 fps = 210;
9882 break;
9883 case CAM_HFR_MODE_240FPS:
9884 fps = 240;
9885 break;
9886 case CAM_HFR_MODE_480FPS:
9887 fps = 480;
9888 break;
9889 case CAM_HFR_MODE_OFF:
9890 case CAM_HFR_MODE_MAX:
9891 default:
9892 break;
9893 }
9894
9895 /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
9896 if (fps >= MIN_FPS_FOR_BATCH_MODE) {
9897 /* For each HFR frame rate, need to advertise one variable fps range
9898 * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
9899 * and [120, 120]. While camcorder preview alone is running [30, 120] is
9900 * set by the app. When video recording is started, [120, 120] is
9901 * set. This way sensor configuration does not change when recording
9902 * is started */
9903
9904 /* (width, height, fps_min, fps_max, batch_size_max) */
9905 for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
9906 j < MAX_SIZES_CNT; j++) {
9907 available_hfr_configs.add(
9908 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9909 available_hfr_configs.add(
9910 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9911 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
9912 available_hfr_configs.add(fps);
9913 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9914
9915 /* (width, height, fps_min, fps_max, batch_size_max) */
9916 available_hfr_configs.add(
9917 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9918 available_hfr_configs.add(
9919 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9920 available_hfr_configs.add(fps);
9921 available_hfr_configs.add(fps);
9922 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9923 }
9924 }
9925 }
9926 //Advertise HFR capability only if the property is set
9927 memset(prop, 0, sizeof(prop));
9928 property_get("persist.camera.hal3hfr.enable", prop, "1");
9929 uint8_t hfrEnable = (uint8_t)atoi(prop);
9930
9931 if(hfrEnable && available_hfr_configs.array()) {
9932 staticInfo.update(
9933 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
9934 available_hfr_configs.array(), available_hfr_configs.size());
9935 }
9936
9937 int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
9938 staticInfo.update(ANDROID_JPEG_MAX_SIZE,
9939 &max_jpeg_size, 1);
9940
9941 uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
9942 size_t size = 0;
9943 count = CAM_EFFECT_MODE_MAX;
9944 count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
9945 for (size_t i = 0; i < count; i++) {
9946 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
9947 gCamCapability[cameraId]->supported_effects[i]);
9948 if (NAME_NOT_FOUND != val) {
9949 avail_effects[size] = (uint8_t)val;
9950 size++;
9951 }
9952 }
9953 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
9954 avail_effects,
9955 size);
9956
9957 uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
9958 uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
9959 size_t supported_scene_modes_cnt = 0;
9960 count = CAM_SCENE_MODE_MAX;
9961 count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
9962 for (size_t i = 0; i < count; i++) {
9963 if (gCamCapability[cameraId]->supported_scene_modes[i] !=
9964 CAM_SCENE_MODE_OFF) {
9965 int val = lookupFwkName(SCENE_MODES_MAP,
9966 METADATA_MAP_SIZE(SCENE_MODES_MAP),
9967 gCamCapability[cameraId]->supported_scene_modes[i]);
Mansoor Aftab58465fa2017-01-26 15:02:44 -08009968
Thierry Strudel3d639192016-09-09 11:52:26 -07009969 if (NAME_NOT_FOUND != val) {
9970 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
9971 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
9972 supported_scene_modes_cnt++;
9973 }
9974 }
9975 }
9976 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
9977 avail_scene_modes,
9978 supported_scene_modes_cnt);
9979
9980 uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
9981 makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
9982 supported_scene_modes_cnt,
9983 CAM_SCENE_MODE_MAX,
9984 scene_mode_overrides,
9985 supported_indexes,
9986 cameraId);
9987
9988 if (supported_scene_modes_cnt == 0) {
9989 supported_scene_modes_cnt = 1;
9990 avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
9991 }
9992
9993 staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
9994 scene_mode_overrides, supported_scene_modes_cnt * 3);
9995
9996 uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
9997 ANDROID_CONTROL_MODE_AUTO,
9998 ANDROID_CONTROL_MODE_USE_SCENE_MODE};
9999 staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
10000 available_control_modes,
10001 3);
10002
10003 uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
10004 size = 0;
10005 count = CAM_ANTIBANDING_MODE_MAX;
10006 count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
10007 for (size_t i = 0; i < count; i++) {
10008 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
10009 gCamCapability[cameraId]->supported_antibandings[i]);
10010 if (NAME_NOT_FOUND != val) {
10011 avail_antibanding_modes[size] = (uint8_t)val;
10012 size++;
10013 }
10014
10015 }
10016 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
10017 avail_antibanding_modes,
10018 size);
10019
10020 uint8_t avail_abberation_modes[] = {
10021 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
10022 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
10023 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
10024 count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
10025 count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
10026 if (0 == count) {
10027 // If no aberration correction modes are available for a device, this advertise OFF mode
10028 size = 1;
10029 } else {
10030 // If count is not zero then atleast one among the FAST or HIGH quality is supported
10031 // So, advertize all 3 modes if atleast any one mode is supported as per the
10032 // new M requirement
10033 size = 3;
10034 }
10035 staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
10036 avail_abberation_modes,
10037 size);
10038
10039 uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
10040 size = 0;
10041 count = CAM_FOCUS_MODE_MAX;
10042 count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
10043 for (size_t i = 0; i < count; i++) {
10044 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
10045 gCamCapability[cameraId]->supported_focus_modes[i]);
10046 if (NAME_NOT_FOUND != val) {
10047 avail_af_modes[size] = (uint8_t)val;
10048 size++;
10049 }
10050 }
10051 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
10052 avail_af_modes,
10053 size);
10054
10055 uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
10056 size = 0;
10057 count = CAM_WB_MODE_MAX;
10058 count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
10059 for (size_t i = 0; i < count; i++) {
10060 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
10061 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
10062 gCamCapability[cameraId]->supported_white_balances[i]);
10063 if (NAME_NOT_FOUND != val) {
10064 avail_awb_modes[size] = (uint8_t)val;
10065 size++;
10066 }
10067 }
10068 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
10069 avail_awb_modes,
10070 size);
10071
10072 uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
10073 count = CAM_FLASH_FIRING_LEVEL_MAX;
10074 count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
10075 count);
10076 for (size_t i = 0; i < count; i++) {
10077 available_flash_levels[i] =
10078 gCamCapability[cameraId]->supported_firing_levels[i];
10079 }
10080 staticInfo.update(ANDROID_FLASH_FIRING_POWER,
10081 available_flash_levels, count);
10082
10083 uint8_t flashAvailable;
10084 if (gCamCapability[cameraId]->flash_available)
10085 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
10086 else
10087 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
10088 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
10089 &flashAvailable, 1);
10090
10091 Vector<uint8_t> avail_ae_modes;
10092 count = CAM_AE_MODE_MAX;
10093 count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
10094 for (size_t i = 0; i < count; i++) {
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080010095 uint8_t aeMode = gCamCapability[cameraId]->supported_ae_modes[i];
10096 if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
10097 aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
10098 }
10099 avail_ae_modes.add(aeMode);
Thierry Strudel3d639192016-09-09 11:52:26 -070010100 }
10101 if (flashAvailable) {
10102 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
10103 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
10104 }
10105 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
10106 avail_ae_modes.array(),
10107 avail_ae_modes.size());
10108
10109 int32_t sensitivity_range[2];
10110 sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
10111 sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
10112 staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
10113 sensitivity_range,
10114 sizeof(sensitivity_range) / sizeof(int32_t));
10115
10116 staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
10117 &gCamCapability[cameraId]->max_analog_sensitivity,
10118 1);
10119
10120 int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
10121 staticInfo.update(ANDROID_SENSOR_ORIENTATION,
10122 &sensor_orientation,
10123 1);
10124
10125 int32_t max_output_streams[] = {
10126 MAX_STALLING_STREAMS,
10127 MAX_PROCESSED_STREAMS,
10128 MAX_RAW_STREAMS};
10129 staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
10130 max_output_streams,
10131 sizeof(max_output_streams)/sizeof(max_output_streams[0]));
10132
10133 uint8_t avail_leds = 0;
10134 staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
10135 &avail_leds, 0);
10136
10137 uint8_t focus_dist_calibrated;
10138 int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
10139 gCamCapability[cameraId]->focus_dist_calibrated);
10140 if (NAME_NOT_FOUND != val) {
10141 focus_dist_calibrated = (uint8_t)val;
10142 staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
10143 &focus_dist_calibrated, 1);
10144 }
10145
10146 int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
10147 size = 0;
10148 count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
10149 MAX_TEST_PATTERN_CNT);
10150 for (size_t i = 0; i < count; i++) {
10151 int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
10152 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
10153 if (NAME_NOT_FOUND != testpatternMode) {
10154 avail_testpattern_modes[size] = testpatternMode;
10155 size++;
10156 }
10157 }
10158 staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
10159 avail_testpattern_modes,
10160 size);
10161
10162 uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
10163 staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
10164 &max_pipeline_depth,
10165 1);
10166
10167 int32_t partial_result_count = PARTIAL_RESULT_COUNT;
10168 staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
10169 &partial_result_count,
10170 1);
10171
10172 int32_t max_stall_duration = MAX_REPROCESS_STALL;
10173 staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
10174
10175 Vector<uint8_t> available_capabilities;
10176 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
10177 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
10178 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
10179 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
10180 if (supportBurst) {
10181 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
10182 }
10183 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
10184 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
10185 if (hfrEnable && available_hfr_configs.array()) {
10186 available_capabilities.add(
10187 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
10188 }
10189
10190 if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
10191 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
10192 }
10193 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
10194 available_capabilities.array(),
10195 available_capabilities.size());
10196
10197 //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
10198 //Assumption is that all bayer cameras support MANUAL_SENSOR.
10199 uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
10200 ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
10201
10202 staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
10203 &aeLockAvailable, 1);
10204
10205 //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
10206 //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
10207 uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
10208 ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
10209
10210 staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
10211 &awbLockAvailable, 1);
10212
10213 int32_t max_input_streams = 1;
10214 staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
10215 &max_input_streams,
10216 1);
10217
10218 /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
10219 int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
10220 HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
10221 HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
10222 HAL_PIXEL_FORMAT_YCbCr_420_888};
10223 staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
10224 io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
10225
10226 int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
10227 staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
10228 &max_latency,
10229 1);
10230
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010231#ifndef USE_HAL_3_3
10232 int32_t isp_sensitivity_range[2];
10233 isp_sensitivity_range[0] =
10234 gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
10235 isp_sensitivity_range[1] =
10236 gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
10237 staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
10238 isp_sensitivity_range,
10239 sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
10240#endif
10241
Thierry Strudel3d639192016-09-09 11:52:26 -070010242 uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
10243 ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
10244 staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
10245 available_hot_pixel_modes,
10246 sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
10247
10248 uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
10249 ANDROID_SHADING_MODE_FAST,
10250 ANDROID_SHADING_MODE_HIGH_QUALITY};
10251 staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
10252 available_shading_modes,
10253 3);
10254
10255 uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
10256 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
10257 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
10258 available_lens_shading_map_modes,
10259 2);
10260
10261 uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
10262 ANDROID_EDGE_MODE_FAST,
10263 ANDROID_EDGE_MODE_HIGH_QUALITY,
10264 ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
10265 staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
10266 available_edge_modes,
10267 sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
10268
10269 uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
10270 ANDROID_NOISE_REDUCTION_MODE_FAST,
10271 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
10272 ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
10273 ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
10274 staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
10275 available_noise_red_modes,
10276 sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
10277
10278 uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
10279 ANDROID_TONEMAP_MODE_FAST,
10280 ANDROID_TONEMAP_MODE_HIGH_QUALITY};
10281 staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
10282 available_tonemap_modes,
10283 sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
10284
10285 uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
10286 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
10287 available_hot_pixel_map_modes,
10288 sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
10289
10290 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
10291 gCamCapability[cameraId]->reference_illuminant1);
10292 if (NAME_NOT_FOUND != val) {
10293 uint8_t fwkReferenceIlluminant = (uint8_t)val;
10294 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
10295 }
10296
10297 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
10298 gCamCapability[cameraId]->reference_illuminant2);
10299 if (NAME_NOT_FOUND != val) {
10300 uint8_t fwkReferenceIlluminant = (uint8_t)val;
10301 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
10302 }
10303
10304 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
10305 (void *)gCamCapability[cameraId]->forward_matrix1,
10306 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
10307
10308 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
10309 (void *)gCamCapability[cameraId]->forward_matrix2,
10310 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
10311
10312 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
10313 (void *)gCamCapability[cameraId]->color_transform1,
10314 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
10315
10316 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
10317 (void *)gCamCapability[cameraId]->color_transform2,
10318 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
10319
10320 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
10321 (void *)gCamCapability[cameraId]->calibration_transform1,
10322 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
10323
10324 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
10325 (void *)gCamCapability[cameraId]->calibration_transform2,
10326 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
10327
10328 int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
10329 ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
10330 ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
10331 ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
10332 ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
10333 ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
10334 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
10335 ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
10336 ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
10337 ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
10338 ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
10339 ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
10340 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
10341 ANDROID_JPEG_GPS_COORDINATES,
10342 ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
10343 ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
10344 ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
10345 ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10346 ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
10347 ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
10348 ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
10349 ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
10350 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
10351 ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010352#ifndef USE_HAL_3_3
10353 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10354#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010355 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010356 ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -070010357 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
10358 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -070010359 ANDROID_BLACK_LEVEL_LOCK, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010360 /* DevCamDebug metadata request_keys_basic */
10361 DEVCAMDEBUG_META_ENABLE,
10362 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010363 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -070010364 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -070010365 TANGO_MODE_DATA_SENSOR_FULLFOV,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -070010366 NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER,
Emilian Peev656e4fa2017-06-02 16:47:04 +010010367 NEXUS_EXPERIMENTAL_2017_PD_DATA_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010368 };
Thierry Strudel3d639192016-09-09 11:52:26 -070010369
10370 size_t request_keys_cnt =
10371 sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
10372 Vector<int32_t> available_request_keys;
10373 available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
10374 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10375 available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
10376 }
10377
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010378 if (gExposeEnableZslKey) {
Chenjie Luo4a761802017-06-13 17:35:54 +000010379 available_request_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
Chien-Yu Chen0a921f92017-08-27 17:25:33 -070010380 available_request_keys.add(NEXUS_EXPERIMENTAL_2017_POSTVIEW);
Chien-Yu Chenb0981e32017-08-28 19:27:35 -070010381 available_request_keys.add(NEXUS_EXPERIMENTAL_2017_CONTINUOUS_ZSL_CAPTURE);
Chien-Yu Chenec328c82017-08-30 16:41:08 -070010382 available_request_keys.add(NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010383 }
10384
Thierry Strudel3d639192016-09-09 11:52:26 -070010385 staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
10386 available_request_keys.array(), available_request_keys.size());
10387
10388 int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
10389 ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
10390 ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
10391 ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
10392 ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
10393 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
10394 ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
10395 ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
10396 ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
10397 ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10398 ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
10399 ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
10400 ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
10401 ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
10402 ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
10403 ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
10404 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010405 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -070010406 ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
10407 ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
10408 ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010409 ANDROID_STATISTICS_FACE_SCORES,
10410#ifndef USE_HAL_3_3
10411 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10412#endif
Shuzhen Wang2abea3d2016-03-31 11:09:27 -070010413 NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Shuzhen Wange763e802016-03-31 10:24:29 -070010414 NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010415 // DevCamDebug metadata result_keys_basic
10416 DEVCAMDEBUG_META_ENABLE,
10417 // DevCamDebug metadata result_keys AF
10418 DEVCAMDEBUG_AF_LENS_POSITION,
10419 DEVCAMDEBUG_AF_TOF_CONFIDENCE,
10420 DEVCAMDEBUG_AF_TOF_DISTANCE,
10421 DEVCAMDEBUG_AF_LUMA,
10422 DEVCAMDEBUG_AF_HAF_STATE,
10423 DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
10424 DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
10425 DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
10426 DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
10427 DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
10428 DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
10429 DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
10430 DEVCAMDEBUG_AF_MONITOR_REFOCUS,
10431 DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
10432 DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
10433 DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
10434 DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
10435 DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
10436 DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
10437 DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
10438 DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
10439 DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
10440 DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
10441 DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
10442 DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
10443 DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
10444 DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
10445 // DevCamDebug metadata result_keys AEC
10446 DEVCAMDEBUG_AEC_TARGET_LUMA,
10447 DEVCAMDEBUG_AEC_COMP_LUMA,
10448 DEVCAMDEBUG_AEC_AVG_LUMA,
10449 DEVCAMDEBUG_AEC_CUR_LUMA,
10450 DEVCAMDEBUG_AEC_LINECOUNT,
10451 DEVCAMDEBUG_AEC_REAL_GAIN,
10452 DEVCAMDEBUG_AEC_EXP_INDEX,
10453 DEVCAMDEBUG_AEC_LUX_IDX,
Samuel Ha34229982017-02-17 13:51:11 -080010454 // DevCamDebug metadata result_keys zzHDR
10455 DEVCAMDEBUG_AEC_L_REAL_GAIN,
10456 DEVCAMDEBUG_AEC_L_LINECOUNT,
10457 DEVCAMDEBUG_AEC_S_REAL_GAIN,
10458 DEVCAMDEBUG_AEC_S_LINECOUNT,
10459 DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
10460 DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
10461 // DevCamDebug metadata result_keys ADRC
10462 DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
10463 DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
10464 DEVCAMDEBUG_AEC_GTM_RATIO,
10465 DEVCAMDEBUG_AEC_LTM_RATIO,
10466 DEVCAMDEBUG_AEC_LA_RATIO,
10467 DEVCAMDEBUG_AEC_GAMMA_RATIO,
Samuel Habdf4fac2017-07-28 17:21:18 -070010468 // DevCamDebug metadata result_keys AEC MOTION
10469 DEVCAMDEBUG_AEC_CAMERA_MOTION_DX,
10470 DEVCAMDEBUG_AEC_CAMERA_MOTION_DY,
10471 DEVCAMDEBUG_AEC_SUBJECT_MOTION,
Samuel Ha68ba5172016-12-15 18:41:12 -080010472 // DevCamDebug metadata result_keys AWB
10473 DEVCAMDEBUG_AWB_R_GAIN,
10474 DEVCAMDEBUG_AWB_G_GAIN,
10475 DEVCAMDEBUG_AWB_B_GAIN,
10476 DEVCAMDEBUG_AWB_CCT,
10477 DEVCAMDEBUG_AWB_DECISION,
10478 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010479 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
10480 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
10481 NEXUS_EXPERIMENTAL_2017_HISTOGRAM,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -070010482 NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE,
Shuzhen Wangc89c77e2017-08-07 15:50:12 -070010483 NEXUS_EXPERIMENTAL_2017_EXP_TIME_BOOST,
Shuzhen Wang3569d4a2017-09-04 19:10:28 -070010484 NEXUS_EXPERIMENTAL_2017_SCENE_DISTANCE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010485 };
10486
Thierry Strudel3d639192016-09-09 11:52:26 -070010487 size_t result_keys_cnt =
10488 sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
10489
10490 Vector<int32_t> available_result_keys;
10491 available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
10492 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10493 available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
10494 }
10495 if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
10496 available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
10497 available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
10498 }
10499 if (supportedFaceDetectMode == 1) {
10500 available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
10501 available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
10502 } else if ((supportedFaceDetectMode == 2) ||
10503 (supportedFaceDetectMode == 3)) {
10504 available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
10505 available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
10506 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010507#ifndef USE_HAL_3_3
Shuzhen Wanga1bc9de2017-09-14 16:54:02 -070010508 {
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010509 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
10510 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
10511 }
10512#endif
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010513
10514 if (gExposeEnableZslKey) {
10515 available_result_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
Chien-Yu Chendaf68892017-08-25 12:56:40 -070010516 available_result_keys.add(NEXUS_EXPERIMENTAL_2017_NEXT_STILL_INTENT_REQUEST_READY);
Chien-Yu Chen0a921f92017-08-27 17:25:33 -070010517 available_result_keys.add(NEXUS_EXPERIMENTAL_2017_POSTVIEW_CONFIG);
10518 available_result_keys.add(NEXUS_EXPERIMENTAL_2017_POSTVIEW_DATA);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010519 }
10520
Thierry Strudel3d639192016-09-09 11:52:26 -070010521 staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10522 available_result_keys.array(), available_result_keys.size());
10523
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010524 int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
Thierry Strudel3d639192016-09-09 11:52:26 -070010525 ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
10526 ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
10527 ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
10528 ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
10529 ANDROID_SCALER_CROPPING_TYPE,
10530 ANDROID_SYNC_MAX_LATENCY,
10531 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
10532 ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
10533 ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
10534 ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
10535 ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
10536 ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
10537 ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
10538 ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
10539 ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
10540 ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
10541 ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
10542 ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
10543 ANDROID_LENS_FACING,
10544 ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
10545 ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
10546 ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10547 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
10548 ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
10549 ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
10550 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
10551 /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
10552 ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
10553 ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
10554 ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
10555 ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
10556 ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
10557 ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
10558 ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
10559 ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
10560 ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
10561 ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
10562 ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
10563 ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010564 ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -070010565 ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
10566 ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
10567 ANDROID_EDGE_AVAILABLE_EDGE_MODES,
10568 ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
10569 ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
10570 ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
10571 ANDROID_TONEMAP_MAX_CURVE_POINTS,
10572 ANDROID_CONTROL_AVAILABLE_MODES,
10573 ANDROID_CONTROL_AE_LOCK_AVAILABLE,
10574 ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
10575 ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
10576 ANDROID_SHADING_AVAILABLE_MODES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010577 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
10578#ifndef USE_HAL_3_3
10579 ANDROID_SENSOR_OPAQUE_RAW_SIZE,
10580 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
10581#endif
10582 };
10583
10584 Vector<int32_t> available_characteristics_keys;
10585 available_characteristics_keys.appendArray(characteristics_keys_basic,
10586 sizeof(characteristics_keys_basic)/sizeof(int32_t));
10587#ifndef USE_HAL_3_3
10588 if (hasBlackRegions) {
10589 available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
10590 }
10591#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +000010592
10593 if (0 <= indexPD) {
10594 int32_t depthKeys[] = {
10595 ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
10596 ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
10597 ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
10598 ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
10599 ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE
10600 };
10601 available_characteristics_keys.appendArray(depthKeys,
10602 sizeof(depthKeys) / sizeof(depthKeys[0]));
10603 }
10604
Thierry Strudel3d639192016-09-09 11:52:26 -070010605 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010606 available_characteristics_keys.array(),
10607 available_characteristics_keys.size());
Thierry Strudel3d639192016-09-09 11:52:26 -070010608
10609 /*available stall durations depend on the hw + sw and will be different for different devices */
10610 /*have to add for raw after implementation*/
10611 int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
10612 size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
10613
10614 Vector<int64_t> available_stall_durations;
10615 for (uint32_t j = 0; j < stall_formats_count; j++) {
10616 if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
10617 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10618 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
10619 available_stall_durations.add(stall_formats[j]);
10620 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
10621 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
10622 available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
10623 }
10624 } else {
10625 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10626 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10627 available_stall_durations.add(stall_formats[j]);
10628 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
10629 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
10630 available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
10631 }
10632 }
10633 }
10634 staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
10635 available_stall_durations.array(),
10636 available_stall_durations.size());
10637
10638 //QCAMERA3_OPAQUE_RAW
10639 uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10640 cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10641 switch (gCamCapability[cameraId]->opaque_raw_fmt) {
10642 case LEGACY_RAW:
10643 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10644 fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
10645 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10646 fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10647 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10648 fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
10649 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10650 break;
10651 case MIPI_RAW:
10652 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10653 fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
10654 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10655 fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
10656 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10657 fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
10658 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
10659 break;
10660 default:
10661 LOGE("unknown opaque_raw_format %d",
10662 gCamCapability[cameraId]->opaque_raw_fmt);
10663 break;
10664 }
10665 staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
10666
10667 Vector<int32_t> strides;
10668 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10669 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10670 cam_stream_buf_plane_info_t buf_planes;
10671 strides.add(gCamCapability[cameraId]->raw_dim[i].width);
10672 strides.add(gCamCapability[cameraId]->raw_dim[i].height);
10673 mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10674 &gCamCapability[cameraId]->padding_info, &buf_planes);
10675 strides.add(buf_planes.plane_info.mp[0].stride);
10676 }
10677 staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
10678 strides.size());
10679
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010680 //TBD: remove the following line once backend advertises zzHDR in feature mask
10681 gCamCapability[cameraId]->qcom_supported_feature_mask |= CAM_QCOM_FEATURE_ZIGZAG_HDR;
Thierry Strudel04e026f2016-10-10 11:27:36 -070010682 //Video HDR default
10683 if ((gCamCapability[cameraId]->qcom_supported_feature_mask) &
10684 (CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR |
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010685 CAM_QCOM_FEATURE_ZIGZAG_HDR | CAM_QCOM_FEATURE_SENSOR_HDR)) {
Thierry Strudel04e026f2016-10-10 11:27:36 -070010686 int32_t vhdr_mode[] = {
10687 QCAMERA3_VIDEO_HDR_MODE_OFF,
10688 QCAMERA3_VIDEO_HDR_MODE_ON};
10689
10690 size_t vhdr_mode_count = sizeof(vhdr_mode) / sizeof(int32_t);
10691 staticInfo.update(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES,
10692 vhdr_mode, vhdr_mode_count);
10693 }
10694
Thierry Strudel3d639192016-09-09 11:52:26 -070010695 staticInfo.update(QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
10696 (const uint8_t*)&gCamCapability[cameraId]->related_cam_calibration,
10697 sizeof(gCamCapability[cameraId]->related_cam_calibration));
10698
10699 uint8_t isMonoOnly =
10700 (gCamCapability[cameraId]->color_arrangement == CAM_FILTER_ARRANGEMENT_Y);
10701 staticInfo.update(QCAMERA3_SENSOR_IS_MONO_ONLY,
10702 &isMonoOnly, 1);
10703
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010704#ifndef USE_HAL_3_3
10705 Vector<int32_t> opaque_size;
10706 for (size_t j = 0; j < scalar_formats_count; j++) {
10707 if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
10708 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10709 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10710 cam_stream_buf_plane_info_t buf_planes;
10711
10712 rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10713 &gCamCapability[cameraId]->padding_info, &buf_planes);
10714
10715 if (rc == 0) {
10716 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
10717 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
10718 opaque_size.add(buf_planes.plane_info.frame_len);
10719 }else {
10720 LOGE("raw frame calculation failed!");
10721 }
10722 }
10723 }
10724 }
10725
10726 if ((opaque_size.size() > 0) &&
10727 (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
10728 staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
10729 else
10730 LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
10731#endif
10732
Thierry Strudel04e026f2016-10-10 11:27:36 -070010733 if (gCamCapability[cameraId]->supported_ir_mode_cnt > 0) {
10734 int32_t avail_ir_modes[CAM_IR_MODE_MAX];
10735 size = 0;
10736 count = CAM_IR_MODE_MAX;
10737 count = MIN(gCamCapability[cameraId]->supported_ir_mode_cnt, count);
10738 for (size_t i = 0; i < count; i++) {
10739 int val = lookupFwkName(IR_MODES_MAP, METADATA_MAP_SIZE(IR_MODES_MAP),
10740 gCamCapability[cameraId]->supported_ir_modes[i]);
10741 if (NAME_NOT_FOUND != val) {
10742 avail_ir_modes[size] = (int32_t)val;
10743 size++;
10744 }
10745 }
10746 staticInfo.update(QCAMERA3_IR_AVAILABLE_MODES,
10747 avail_ir_modes, size);
10748 }
10749
Thierry Strudel295a0ca2016-11-03 18:38:47 -070010750 if (gCamCapability[cameraId]->supported_instant_aec_modes_cnt > 0) {
10751 int32_t available_instant_aec_modes[CAM_AEC_CONVERGENCE_MAX];
10752 size = 0;
10753 count = CAM_AEC_CONVERGENCE_MAX;
10754 count = MIN(gCamCapability[cameraId]->supported_instant_aec_modes_cnt, count);
10755 for (size_t i = 0; i < count; i++) {
10756 int val = lookupFwkName(INSTANT_AEC_MODES_MAP, METADATA_MAP_SIZE(INSTANT_AEC_MODES_MAP),
10757 gCamCapability[cameraId]->supported_instant_aec_modes[i]);
10758 if (NAME_NOT_FOUND != val) {
10759 available_instant_aec_modes[size] = (int32_t)val;
10760 size++;
10761 }
10762 }
10763 staticInfo.update(QCAMERA3_INSTANT_AEC_AVAILABLE_MODES,
10764 available_instant_aec_modes, size);
10765 }
10766
Thierry Strudel54dc9782017-02-15 12:12:10 -080010767 int32_t sharpness_range[] = {
10768 gCamCapability[cameraId]->sharpness_ctrl.min_value,
10769 gCamCapability[cameraId]->sharpness_ctrl.max_value};
10770 staticInfo.update(QCAMERA3_SHARPNESS_RANGE, sharpness_range, 2);
10771
10772 if (gCamCapability[cameraId]->supported_binning_correction_mode_cnt > 0) {
10773 int32_t avail_binning_modes[CAM_BINNING_CORRECTION_MODE_MAX];
10774 size = 0;
10775 count = CAM_BINNING_CORRECTION_MODE_MAX;
10776 count = MIN(gCamCapability[cameraId]->supported_binning_correction_mode_cnt, count);
10777 for (size_t i = 0; i < count; i++) {
10778 int val = lookupFwkName(BINNING_CORRECTION_MODES_MAP,
10779 METADATA_MAP_SIZE(BINNING_CORRECTION_MODES_MAP),
10780 gCamCapability[cameraId]->supported_binning_modes[i]);
10781 if (NAME_NOT_FOUND != val) {
10782 avail_binning_modes[size] = (int32_t)val;
10783 size++;
10784 }
10785 }
10786 staticInfo.update(QCAMERA3_AVAILABLE_BINNING_CORRECTION_MODES,
10787 avail_binning_modes, size);
10788 }
10789
10790 if (gCamCapability[cameraId]->supported_aec_modes_cnt > 0) {
10791 int32_t available_aec_modes[CAM_AEC_MODE_MAX];
10792 size = 0;
10793 count = MIN(gCamCapability[cameraId]->supported_aec_modes_cnt, CAM_AEC_MODE_MAX);
10794 for (size_t i = 0; i < count; i++) {
10795 int32_t val = lookupFwkName(AEC_MODES_MAP, METADATA_MAP_SIZE(AEC_MODES_MAP),
10796 gCamCapability[cameraId]->supported_aec_modes[i]);
10797 if (NAME_NOT_FOUND != val)
10798 available_aec_modes[size++] = val;
10799 }
10800 staticInfo.update(QCAMERA3_EXPOSURE_METER_AVAILABLE_MODES,
10801 available_aec_modes, size);
10802 }
10803
10804 if (gCamCapability[cameraId]->supported_iso_modes_cnt > 0) {
10805 int32_t available_iso_modes[CAM_ISO_MODE_MAX];
10806 size = 0;
10807 count = MIN(gCamCapability[cameraId]->supported_iso_modes_cnt, CAM_ISO_MODE_MAX);
10808 for (size_t i = 0; i < count; i++) {
10809 int32_t val = lookupFwkName(ISO_MODES_MAP, METADATA_MAP_SIZE(ISO_MODES_MAP),
10810 gCamCapability[cameraId]->supported_iso_modes[i]);
10811 if (NAME_NOT_FOUND != val)
10812 available_iso_modes[size++] = val;
10813 }
10814 staticInfo.update(QCAMERA3_ISO_AVAILABLE_MODES,
10815 available_iso_modes, size);
10816 }
10817
10818 int64_t available_exp_time_range[EXPOSURE_TIME_RANGE_CNT];
Jason Lee805955a2017-05-04 10:29:14 -070010819 for (size_t i = 0; i < EXPOSURE_TIME_RANGE_CNT; i++)
Thierry Strudel54dc9782017-02-15 12:12:10 -080010820 available_exp_time_range[i] = gCamCapability[cameraId]->exposure_time_range[i];
10821 staticInfo.update(QCAMERA3_EXP_TIME_RANGE,
10822 available_exp_time_range, EXPOSURE_TIME_RANGE_CNT);
10823
10824 int32_t available_saturation_range[4];
10825 available_saturation_range[0] = gCamCapability[cameraId]->saturation_ctrl.min_value;
10826 available_saturation_range[1] = gCamCapability[cameraId]->saturation_ctrl.max_value;
10827 available_saturation_range[2] = gCamCapability[cameraId]->saturation_ctrl.def_value;
10828 available_saturation_range[3] = gCamCapability[cameraId]->saturation_ctrl.step;
10829 staticInfo.update(QCAMERA3_SATURATION_RANGE,
10830 available_saturation_range, 4);
10831
10832 uint8_t is_hdr_values[2];
10833 is_hdr_values[0] = 0;
10834 is_hdr_values[1] = 1;
10835 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_VALUES,
10836 is_hdr_values, 2);
10837
10838 float is_hdr_confidence_range[2];
10839 is_hdr_confidence_range[0] = 0.0;
10840 is_hdr_confidence_range[1] = 1.0;
10841 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE_RANGE,
10842 is_hdr_confidence_range, 2);
10843
Emilian Peev0a972ef2017-03-16 10:25:53 +000010844 size_t eepromLength = strnlen(
10845 reinterpret_cast<const char *>(
10846 gCamCapability[cameraId]->eeprom_version_info),
10847 sizeof(gCamCapability[cameraId]->eeprom_version_info));
10848 if (0 < eepromLength) {
Zhijun Hea557c4c2017-03-16 18:37:53 -070010849 char easelInfo[] = ",E:N";
10850 char *eepromInfo = reinterpret_cast<char *>(gCamCapability[cameraId]->eeprom_version_info);
10851 if (eepromLength + sizeof(easelInfo) < MAX_EEPROM_VERSION_INFO_LEN) {
10852 eepromLength += sizeof(easelInfo);
Chien-Yu Chend77a5462017-06-02 18:00:38 -070010853 strlcat(eepromInfo, ((gEaselManagerClient != nullptr &&
Arnd Geis082a4d72017-08-24 10:33:07 -070010854 gEaselManagerClient->isEaselPresentOnDevice()) ? ",E-ver" : ",E:N"),
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010855 MAX_EEPROM_VERSION_INFO_LEN);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010856 }
Emilian Peev0a972ef2017-03-16 10:25:53 +000010857 staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_VERSION_INFO,
10858 gCamCapability[cameraId]->eeprom_version_info, eepromLength);
10859 }
10860
Thierry Strudel3d639192016-09-09 11:52:26 -070010861 gStaticMetadata[cameraId] = staticInfo.release();
10862 return rc;
10863}
10864
10865/*===========================================================================
10866 * FUNCTION : makeTable
10867 *
10868 * DESCRIPTION: make a table of sizes
10869 *
10870 * PARAMETERS :
10871 *
10872 *
10873 *==========================================================================*/
10874void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
10875 size_t max_size, int32_t *sizeTable)
10876{
10877 size_t j = 0;
10878 if (size > max_size) {
10879 size = max_size;
10880 }
10881 for (size_t i = 0; i < size; i++) {
10882 sizeTable[j] = dimTable[i].width;
10883 sizeTable[j+1] = dimTable[i].height;
10884 j+=2;
10885 }
10886}
10887
10888/*===========================================================================
10889 * FUNCTION : makeFPSTable
10890 *
10891 * DESCRIPTION: make a table of fps ranges
10892 *
10893 * PARAMETERS :
10894 *
10895 *==========================================================================*/
10896void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
10897 size_t max_size, int32_t *fpsRangesTable)
10898{
10899 size_t j = 0;
10900 if (size > max_size) {
10901 size = max_size;
10902 }
10903 for (size_t i = 0; i < size; i++) {
10904 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
10905 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
10906 j+=2;
10907 }
10908}
10909
10910/*===========================================================================
10911 * FUNCTION : makeOverridesList
10912 *
10913 * DESCRIPTION: make a list of scene mode overrides
10914 *
10915 * PARAMETERS :
10916 *
10917 *
10918 *==========================================================================*/
10919void QCamera3HardwareInterface::makeOverridesList(
10920 cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
10921 uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
10922{
10923 /*daemon will give a list of overrides for all scene modes.
10924 However we should send the fwk only the overrides for the scene modes
10925 supported by the framework*/
10926 size_t j = 0;
10927 if (size > max_size) {
10928 size = max_size;
10929 }
10930 size_t focus_count = CAM_FOCUS_MODE_MAX;
10931 focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
10932 focus_count);
10933 for (size_t i = 0; i < size; i++) {
10934 bool supt = false;
10935 size_t index = supported_indexes[i];
10936 overridesList[j] = gCamCapability[camera_id]->flash_available ?
10937 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
10938 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
10939 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
10940 overridesTable[index].awb_mode);
10941 if (NAME_NOT_FOUND != val) {
10942 overridesList[j+1] = (uint8_t)val;
10943 }
10944 uint8_t focus_override = overridesTable[index].af_mode;
10945 for (size_t k = 0; k < focus_count; k++) {
10946 if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
10947 supt = true;
10948 break;
10949 }
10950 }
10951 if (supt) {
10952 val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
10953 focus_override);
10954 if (NAME_NOT_FOUND != val) {
10955 overridesList[j+2] = (uint8_t)val;
10956 }
10957 } else {
10958 overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
10959 }
10960 j+=3;
10961 }
10962}
10963
10964/*===========================================================================
10965 * FUNCTION : filterJpegSizes
10966 *
10967 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
10968 * could be downscaled to
10969 *
10970 * PARAMETERS :
10971 *
10972 * RETURN : length of jpegSizes array
10973 *==========================================================================*/
10974
10975size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
10976 size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
10977 uint8_t downscale_factor)
10978{
10979 if (0 == downscale_factor) {
10980 downscale_factor = 1;
10981 }
10982
10983 int32_t min_width = active_array_size.width / downscale_factor;
10984 int32_t min_height = active_array_size.height / downscale_factor;
10985 size_t jpegSizesCnt = 0;
10986 if (processedSizesCnt > maxCount) {
10987 processedSizesCnt = maxCount;
10988 }
10989 for (size_t i = 0; i < processedSizesCnt; i+=2) {
10990 if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
10991 jpegSizes[jpegSizesCnt] = processedSizes[i];
10992 jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
10993 jpegSizesCnt += 2;
10994 }
10995 }
10996 return jpegSizesCnt;
10997}
10998
10999/*===========================================================================
11000 * FUNCTION : computeNoiseModelEntryS
11001 *
11002 * DESCRIPTION: function to map a given sensitivity to the S noise
11003 * model parameters in the DNG noise model.
11004 *
11005 * PARAMETERS : sens : the sensor sensitivity
11006 *
11007 ** RETURN : S (sensor amplification) noise
11008 *
11009 *==========================================================================*/
11010double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
11011 double s = gCamCapability[mCameraId]->gradient_S * sens +
11012 gCamCapability[mCameraId]->offset_S;
11013 return ((s < 0.0) ? 0.0 : s);
11014}
11015
11016/*===========================================================================
11017 * FUNCTION : computeNoiseModelEntryO
11018 *
11019 * DESCRIPTION: function to map a given sensitivity to the O noise
11020 * model parameters in the DNG noise model.
11021 *
11022 * PARAMETERS : sens : the sensor sensitivity
11023 *
11024 ** RETURN : O (sensor readout) noise
11025 *
11026 *==========================================================================*/
11027double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
11028 int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
11029 double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
11030 1.0 : (1.0 * sens / max_analog_sens);
11031 double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
11032 gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
11033 return ((o < 0.0) ? 0.0 : o);
11034}
11035
11036/*===========================================================================
11037 * FUNCTION : getSensorSensitivity
11038 *
11039 * DESCRIPTION: convert iso_mode to an integer value
11040 *
11041 * PARAMETERS : iso_mode : the iso_mode supported by sensor
11042 *
11043 ** RETURN : sensitivity supported by sensor
11044 *
11045 *==========================================================================*/
11046int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
11047{
11048 int32_t sensitivity;
11049
11050 switch (iso_mode) {
11051 case CAM_ISO_MODE_100:
11052 sensitivity = 100;
11053 break;
11054 case CAM_ISO_MODE_200:
11055 sensitivity = 200;
11056 break;
11057 case CAM_ISO_MODE_400:
11058 sensitivity = 400;
11059 break;
11060 case CAM_ISO_MODE_800:
11061 sensitivity = 800;
11062 break;
11063 case CAM_ISO_MODE_1600:
11064 sensitivity = 1600;
11065 break;
11066 default:
11067 sensitivity = -1;
11068 break;
11069 }
11070 return sensitivity;
11071}
11072
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080011073int QCamera3HardwareInterface::initHdrPlusClientLocked() {
Chien-Yu Chend77a5462017-06-02 18:00:38 -070011074 if (gEaselManagerClient == nullptr) {
11075 gEaselManagerClient = EaselManagerClient::create();
11076 if (gEaselManagerClient == nullptr) {
11077 ALOGE("%s: Failed to create Easel manager client.", __FUNCTION__);
11078 return -ENODEV;
11079 }
11080 }
11081
11082 if (!EaselManagerClientOpened && gEaselManagerClient->isEaselPresentOnDevice()) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070011083 // Check if HAL should not power on Easel even if it's present. This is to allow HDR+ tests
11084 // to connect to Easel.
11085 bool doNotpowerOnEasel =
11086 property_get_bool("camera.hdrplus.donotpoweroneasel", false);
11087
11088 if (doNotpowerOnEasel) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070011089 ALOGI("%s: Easel is present but not powered on.", __FUNCTION__);
11090 return OK;
11091 }
11092
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080011093 // If Easel is present, power on Easel and suspend it immediately.
Chien-Yu Chend77a5462017-06-02 18:00:38 -070011094 status_t res = gEaselManagerClient->open();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080011095 if (res != OK) {
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070011096 ALOGE("%s: Opening Easel manager client failed: %s (%d)", __FUNCTION__, strerror(-res),
11097 res);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080011098 return res;
11099 }
11100
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070011101 EaselManagerClientOpened = true;
11102
Chien-Yu Chend77a5462017-06-02 18:00:38 -070011103 res = gEaselManagerClient->suspend();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080011104 if (res != OK) {
11105 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-res), res);
11106 }
11107
Zhijun Hedaacd8a2017-09-14 12:07:42 -070011108 gEaselBypassOnly = !property_get_bool("persist.camera.hdrplus.enable", false);
Chien-Yu Chen509314b2017-04-07 15:27:55 -070011109 gEaselProfilingEnabled = property_get_bool("persist.camera.hdrplus.profiling", false);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011110
11111 // Expose enableZsl key only when HDR+ mode is enabled.
11112 gExposeEnableZslKey = !gEaselBypassOnly;
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080011113 }
11114
11115 return OK;
11116}
11117
Thierry Strudel3d639192016-09-09 11:52:26 -070011118/*===========================================================================
11119 * FUNCTION : getCamInfo
11120 *
11121 * DESCRIPTION: query camera capabilities
11122 *
11123 * PARAMETERS :
11124 * @cameraId : camera Id
11125 * @info : camera info struct to be filled in with camera capabilities
11126 *
11127 * RETURN : int type of status
11128 * NO_ERROR -- success
11129 * none-zero failure code
11130 *==========================================================================*/
11131int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
11132 struct camera_info *info)
11133{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011134 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_GET_CAM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070011135 int rc = 0;
11136
11137 pthread_mutex_lock(&gCamLock);
Zhijun Hea557c4c2017-03-16 18:37:53 -070011138
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070011139 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070011140 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070011141 rc = initHdrPlusClientLocked();
11142 if (rc != OK) {
11143 ALOGE("%s: initHdrPlusClientLocked failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
11144 pthread_mutex_unlock(&gCamLock);
11145 return rc;
11146 }
Zhijun Hea557c4c2017-03-16 18:37:53 -070011147 }
11148
Thierry Strudel3d639192016-09-09 11:52:26 -070011149 if (NULL == gCamCapability[cameraId]) {
11150 rc = initCapabilities(cameraId);
11151 if (rc < 0) {
11152 pthread_mutex_unlock(&gCamLock);
11153 return rc;
11154 }
11155 }
11156
11157 if (NULL == gStaticMetadata[cameraId]) {
11158 rc = initStaticMetadata(cameraId);
11159 if (rc < 0) {
11160 pthread_mutex_unlock(&gCamLock);
11161 return rc;
11162 }
11163 }
11164
11165 switch(gCamCapability[cameraId]->position) {
11166 case CAM_POSITION_BACK:
11167 case CAM_POSITION_BACK_AUX:
11168 info->facing = CAMERA_FACING_BACK;
11169 break;
11170
11171 case CAM_POSITION_FRONT:
11172 case CAM_POSITION_FRONT_AUX:
11173 info->facing = CAMERA_FACING_FRONT;
11174 break;
11175
11176 default:
11177 LOGE("Unknown position type %d for camera id:%d",
11178 gCamCapability[cameraId]->position, cameraId);
11179 rc = -1;
11180 break;
11181 }
11182
11183
11184 info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011185#ifndef USE_HAL_3_3
11186 info->device_version = CAMERA_DEVICE_API_VERSION_3_4;
11187#else
Thierry Strudel3d639192016-09-09 11:52:26 -070011188 info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011189#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070011190 info->static_camera_characteristics = gStaticMetadata[cameraId];
11191
11192 //For now assume both cameras can operate independently.
11193 info->conflicting_devices = NULL;
11194 info->conflicting_devices_length = 0;
11195
11196 //resource cost is 100 * MIN(1.0, m/M),
11197 //where m is throughput requirement with maximum stream configuration
11198 //and M is CPP maximum throughput.
11199 float max_fps = 0.0;
11200 for (uint32_t i = 0;
11201 i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
11202 if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
11203 max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
11204 }
11205 float ratio = 1.0 * MAX_PROCESSED_STREAMS *
11206 gCamCapability[cameraId]->active_array_size.width *
11207 gCamCapability[cameraId]->active_array_size.height * max_fps /
11208 gCamCapability[cameraId]->max_pixel_bandwidth;
11209 info->resource_cost = 100 * MIN(1.0, ratio);
11210 LOGI("camera %d resource cost is %d", cameraId,
11211 info->resource_cost);
11212
11213 pthread_mutex_unlock(&gCamLock);
11214 return rc;
11215}
11216
11217/*===========================================================================
11218 * FUNCTION : translateCapabilityToMetadata
11219 *
11220 * DESCRIPTION: translate the capability into camera_metadata_t
11221 *
11222 * PARAMETERS : type of the request
11223 *
11224 *
11225 * RETURN : success: camera_metadata_t*
11226 * failure: NULL
11227 *
11228 *==========================================================================*/
11229camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
11230{
11231 if (mDefaultMetadata[type] != NULL) {
11232 return mDefaultMetadata[type];
11233 }
11234 //first time we are handling this request
11235 //fill up the metadata structure using the wrapper class
11236 CameraMetadata settings;
11237 //translate from cam_capability_t to camera_metadata_tag_t
11238 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
11239 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
11240 int32_t defaultRequestID = 0;
11241 settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
11242
11243 /* OIS disable */
11244 char ois_prop[PROPERTY_VALUE_MAX];
11245 memset(ois_prop, 0, sizeof(ois_prop));
11246 property_get("persist.camera.ois.disable", ois_prop, "0");
11247 uint8_t ois_disable = (uint8_t)atoi(ois_prop);
11248
11249 /* Force video to use OIS */
11250 char videoOisProp[PROPERTY_VALUE_MAX];
11251 memset(videoOisProp, 0, sizeof(videoOisProp));
11252 property_get("persist.camera.ois.video", videoOisProp, "1");
11253 uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
Shuzhen Wang19463d72016-03-08 11:09:52 -080011254
11255 // Hybrid AE enable/disable
11256 char hybrid_ae_prop[PROPERTY_VALUE_MAX];
11257 memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
11258 property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
Shuzhen Wang77b049a2017-08-30 12:24:36 -070011259 uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
Shuzhen Wang19463d72016-03-08 11:09:52 -080011260
Thierry Strudel3d639192016-09-09 11:52:26 -070011261 uint8_t controlIntent = 0;
11262 uint8_t focusMode;
11263 uint8_t vsMode;
11264 uint8_t optStabMode;
11265 uint8_t cacMode;
11266 uint8_t edge_mode;
11267 uint8_t noise_red_mode;
11268 uint8_t tonemap_mode;
11269 bool highQualityModeEntryAvailable = FALSE;
11270 bool fastModeEntryAvailable = FALSE;
Shuzhen Wang14415f52016-11-16 18:26:18 -080011271 uint8_t histogramEnable = false;
Thierry Strudel3d639192016-09-09 11:52:26 -070011272 vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
11273 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011274 uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
Shuzhen Wangcc386c52017-03-29 09:28:08 -070011275 uint8_t trackingAfTrigger = NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER_IDLE;
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011276 uint8_t enableZsl = ANDROID_CONTROL_ENABLE_ZSL_FALSE;
Mansoor Aftabea39eba2017-01-26 14:58:25 -080011277
Thierry Strudel3d639192016-09-09 11:52:26 -070011278 switch (type) {
11279 case CAMERA3_TEMPLATE_PREVIEW:
11280 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
11281 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11282 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11283 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11284 edge_mode = ANDROID_EDGE_MODE_FAST;
11285 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11286 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11287 break;
11288 case CAMERA3_TEMPLATE_STILL_CAPTURE:
11289 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
11290 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11291 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11292 edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
11293 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
11294 tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
11295 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
11296 // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
11297 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
11298 if (gCamCapability[mCameraId]->aberration_modes[i] ==
11299 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
11300 highQualityModeEntryAvailable = TRUE;
11301 } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
11302 CAM_COLOR_CORRECTION_ABERRATION_FAST) {
11303 fastModeEntryAvailable = TRUE;
11304 }
11305 }
11306 if (highQualityModeEntryAvailable) {
11307 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
11308 } else if (fastModeEntryAvailable) {
11309 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11310 }
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011311 if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
11312 shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
11313 }
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011314 enableZsl = ANDROID_CONTROL_ENABLE_ZSL_TRUE;
Thierry Strudel3d639192016-09-09 11:52:26 -070011315 break;
11316 case CAMERA3_TEMPLATE_VIDEO_RECORD:
11317 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
11318 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
11319 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070011320 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11321 edge_mode = ANDROID_EDGE_MODE_FAST;
11322 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11323 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11324 if (forceVideoOis)
11325 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11326 break;
11327 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
11328 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
11329 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
11330 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070011331 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11332 edge_mode = ANDROID_EDGE_MODE_FAST;
11333 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11334 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11335 if (forceVideoOis)
11336 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11337 break;
11338 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
11339 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
11340 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11341 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11342 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11343 edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
11344 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
11345 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11346 break;
11347 case CAMERA3_TEMPLATE_MANUAL:
11348 edge_mode = ANDROID_EDGE_MODE_FAST;
11349 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11350 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11351 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11352 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
11353 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
11354 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11355 break;
11356 default:
11357 edge_mode = ANDROID_EDGE_MODE_FAST;
11358 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11359 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11360 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11361 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
11362 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11363 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11364 break;
11365 }
Thierry Strudel04e026f2016-10-10 11:27:36 -070011366 // Set CAC to OFF if underlying device doesn't support
11367 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
11368 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
11369 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011370 settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
11371 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
11372 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
11373 if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
11374 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
11375 }
11376 settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -080011377 settings.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &histogramEnable, 1);
Shuzhen Wangcc386c52017-03-29 09:28:08 -070011378 settings.update(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER, &trackingAfTrigger, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011379
11380 if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
11381 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
11382 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11383 else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
11384 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
11385 || ois_disable)
11386 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11387 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011388 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011389
11390 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
11391 &gCamCapability[mCameraId]->exposure_compensation_default, 1);
11392
11393 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
11394 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
11395
11396 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
11397 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
11398
11399 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
11400 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
11401
11402 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
11403 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
11404
11405 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
11406 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
11407
11408 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
11409 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
11410
11411 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
11412 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
11413
11414 /*flash*/
11415 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
11416 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
11417
11418 static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
11419 settings.update(ANDROID_FLASH_FIRING_POWER,
11420 &flashFiringLevel, 1);
11421
11422 /* lens */
11423 float default_aperture = gCamCapability[mCameraId]->apertures[0];
11424 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
11425
11426 if (gCamCapability[mCameraId]->filter_densities_count) {
11427 float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
11428 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
11429 gCamCapability[mCameraId]->filter_densities_count);
11430 }
11431
11432 float default_focal_length = gCamCapability[mCameraId]->focal_length;
11433 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
11434
Thierry Strudel3d639192016-09-09 11:52:26 -070011435 static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
11436 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
11437
11438 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
11439 settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
11440
11441 static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
11442 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
11443
11444 /* face detection (default to OFF) */
11445 static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
11446 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
11447
Thierry Strudel54dc9782017-02-15 12:12:10 -080011448 static const uint8_t histogramMode = QCAMERA3_HISTOGRAM_MODE_OFF;
11449 settings.update(QCAMERA3_HISTOGRAM_MODE, &histogramMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011450
11451 static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
11452 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
11453
11454 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
11455 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
11456
Thierry Strudel3d639192016-09-09 11:52:26 -070011457
11458 static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11459 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
11460
11461 /* Exposure time(Update the Min Exposure Time)*/
11462 int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
11463 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
11464
11465 /* frame duration */
11466 static const int64_t default_frame_duration = NSEC_PER_33MSEC;
11467 settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
11468
11469 /* sensitivity */
11470 static const int32_t default_sensitivity = 100;
11471 settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011472#ifndef USE_HAL_3_3
11473 static const int32_t default_isp_sensitivity =
11474 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
11475 settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
11476#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070011477
11478 /*edge mode*/
11479 settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
11480
11481 /*noise reduction mode*/
11482 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
11483
11484 /*color correction mode*/
11485 static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
11486 settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
11487
11488 /*transform matrix mode*/
11489 settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
11490
11491 int32_t scaler_crop_region[4];
11492 scaler_crop_region[0] = 0;
11493 scaler_crop_region[1] = 0;
11494 scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
11495 scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
11496 settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
11497
11498 static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
11499 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
11500
11501 /*focus distance*/
11502 float focus_distance = 0.0;
11503 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
11504
11505 /*target fps range: use maximum range for picture, and maximum fixed range for video*/
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011506 /* Restrict template max_fps to 30 */
Thierry Strudel3d639192016-09-09 11:52:26 -070011507 float max_range = 0.0;
11508 float max_fixed_fps = 0.0;
11509 int32_t fps_range[2] = {0, 0};
11510 for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
11511 i++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011512 if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
11513 TEMPLATE_MAX_PREVIEW_FPS) {
11514 continue;
11515 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011516 float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
11517 gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11518 if (type == CAMERA3_TEMPLATE_PREVIEW ||
11519 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
11520 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
11521 if (range > max_range) {
11522 fps_range[0] =
11523 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11524 fps_range[1] =
11525 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11526 max_range = range;
11527 }
11528 } else {
11529 if (range < 0.01 && max_fixed_fps <
11530 gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
11531 fps_range[0] =
11532 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11533 fps_range[1] =
11534 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11535 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11536 }
11537 }
11538 }
11539 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
11540
11541 /*precapture trigger*/
11542 uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
11543 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
11544
11545 /*af trigger*/
11546 uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
11547 settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
11548
11549 /* ae & af regions */
11550 int32_t active_region[] = {
11551 gCamCapability[mCameraId]->active_array_size.left,
11552 gCamCapability[mCameraId]->active_array_size.top,
11553 gCamCapability[mCameraId]->active_array_size.left +
11554 gCamCapability[mCameraId]->active_array_size.width,
11555 gCamCapability[mCameraId]->active_array_size.top +
11556 gCamCapability[mCameraId]->active_array_size.height,
11557 0};
11558 settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
11559 sizeof(active_region) / sizeof(active_region[0]));
11560 settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
11561 sizeof(active_region) / sizeof(active_region[0]));
11562
11563 /* black level lock */
11564 uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11565 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
11566
Thierry Strudel3d639192016-09-09 11:52:26 -070011567 //special defaults for manual template
11568 if (type == CAMERA3_TEMPLATE_MANUAL) {
11569 static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
11570 settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
11571
11572 static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
11573 settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
11574
11575 static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
11576 settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
11577
11578 static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
11579 settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
11580
11581 static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
11582 settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
11583
11584 static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
11585 settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
11586 }
11587
11588
11589 /* TNR
11590 * We'll use this location to determine which modes TNR will be set.
11591 * We will enable TNR to be on if either of the Preview/Video stream requires TNR
11592 * This is not to be confused with linking on a per stream basis that decision
11593 * is still on per-session basis and will be handled as part of config stream
11594 */
11595 uint8_t tnr_enable = 0;
11596
11597 if (m_bTnrPreview || m_bTnrVideo) {
11598
11599 switch (type) {
11600 case CAMERA3_TEMPLATE_VIDEO_RECORD:
11601 tnr_enable = 1;
11602 break;
11603
11604 default:
11605 tnr_enable = 0;
11606 break;
11607 }
11608
11609 int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
11610 settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
11611 settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
11612
11613 LOGD("TNR:%d with process plate %d for template:%d",
11614 tnr_enable, tnr_process_type, type);
11615 }
11616
11617 //Update Link tags to default
Shuzhen Wang920ea402017-05-03 08:49:39 -070011618 uint8_t sync_type = CAM_TYPE_STANDALONE;
Thierry Strudel3d639192016-09-09 11:52:26 -070011619 settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
11620
Chien-Yu Chena3bbdc02017-05-05 11:31:47 -070011621 uint8_t is_main = 1;
Thierry Strudel3d639192016-09-09 11:52:26 -070011622 settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
11623
Shuzhen Wang920ea402017-05-03 08:49:39 -070011624 uint8_t related_camera_id = mCameraId;
11625 settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &related_camera_id, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011626
11627 /* CDS default */
11628 char prop[PROPERTY_VALUE_MAX];
11629 memset(prop, 0, sizeof(prop));
11630 property_get("persist.camera.CDS", prop, "Auto");
11631 cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
11632 cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
11633 if (CAM_CDS_MODE_MAX == cds_mode) {
11634 cds_mode = CAM_CDS_MODE_AUTO;
11635 }
11636
11637 /* Disabling CDS in templates which have TNR enabled*/
11638 if (tnr_enable)
11639 cds_mode = CAM_CDS_MODE_OFF;
11640
11641 int32_t mode = cds_mode;
11642 settings.update(QCAMERA3_CDS_MODE, &mode, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -070011643
Thierry Strudel269c81a2016-10-12 12:13:59 -070011644 /* Manual Convergence AEC Speed is disabled by default*/
11645 float default_aec_speed = 0;
11646 settings.update(QCAMERA3_AEC_CONVERGENCE_SPEED, &default_aec_speed, 1);
11647
11648 /* Manual Convergence AWB Speed is disabled by default*/
11649 float default_awb_speed = 0;
11650 settings.update(QCAMERA3_AWB_CONVERGENCE_SPEED, &default_awb_speed, 1);
11651
Thierry Strudel295a0ca2016-11-03 18:38:47 -070011652 // Set instant AEC to normal convergence by default
11653 int32_t instant_aec_mode = (int32_t)QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE;
11654 settings.update(QCAMERA3_INSTANT_AEC_MODE, &instant_aec_mode, 1);
11655
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011656 if (gExposeEnableZslKey) {
11657 settings.update(ANDROID_CONTROL_ENABLE_ZSL, &enableZsl, 1);
Chien-Yu Chen0a921f92017-08-27 17:25:33 -070011658 int32_t postview = 0;
11659 settings.update(NEXUS_EXPERIMENTAL_2017_POSTVIEW, &postview, 1);
Chien-Yu Chenb0981e32017-08-28 19:27:35 -070011660 int32_t continuousZslCapture = 0;
11661 settings.update(NEXUS_EXPERIMENTAL_2017_CONTINUOUS_ZSL_CAPTURE, &continuousZslCapture, 1);
Chien-Yu Chenfadf40e2017-09-15 14:33:57 -070011662 // Disable HDR+ for templates other than CAMERA3_TEMPLATE_STILL_CAPTURE and
11663 // CAMERA3_TEMPLATE_PREVIEW.
11664 int32_t disableHdrplus = (type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
11665 type == CAMERA3_TEMPLATE_PREVIEW) ? 0 : 1;
Chien-Yu Chenec328c82017-08-30 16:41:08 -070011666 settings.update(NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS, &disableHdrplus, 1);
11667
Shuzhen Wang77b049a2017-08-30 12:24:36 -070011668 // Set hybrid_ae tag in PREVIEW and STILL_CAPTURE templates to 1 so that
11669 // hybrid ae is enabled for 3rd party app HDR+.
11670 if (type == CAMERA3_TEMPLATE_PREVIEW ||
11671 type == CAMERA3_TEMPLATE_STILL_CAPTURE) {
11672 hybrid_ae = 1;
11673 }
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011674 }
Shuzhen Wang77b049a2017-08-30 12:24:36 -070011675 /* hybrid ae */
11676 settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011677
Thierry Strudel3d639192016-09-09 11:52:26 -070011678 mDefaultMetadata[type] = settings.release();
11679
11680 return mDefaultMetadata[type];
11681}
11682
11683/*===========================================================================
Emilian Peev30522a12017-08-03 14:36:33 +010011684 * FUNCTION : getExpectedFrameDuration
11685 *
11686 * DESCRIPTION: Extract the maximum frame duration from either exposure or frame
11687 * duration
11688 *
11689 * PARAMETERS :
11690 * @request : request settings
11691 * @frameDuration : The maximum frame duration in nanoseconds
11692 *
11693 * RETURN : None
11694 *==========================================================================*/
11695void QCamera3HardwareInterface::getExpectedFrameDuration(
11696 const camera_metadata_t *request, nsecs_t *frameDuration /*out*/) {
11697 if (nullptr == frameDuration) {
11698 return;
11699 }
11700
11701 camera_metadata_ro_entry_t e = camera_metadata_ro_entry_t();
11702 find_camera_metadata_ro_entry(request,
11703 ANDROID_SENSOR_EXPOSURE_TIME,
11704 &e);
11705 if (e.count > 0) {
11706 *frameDuration = e.data.i64[0];
11707 }
11708 find_camera_metadata_ro_entry(request,
11709 ANDROID_SENSOR_FRAME_DURATION,
11710 &e);
11711 if (e.count > 0) {
11712 *frameDuration = std::max(e.data.i64[0], *frameDuration);
11713 }
11714}
11715
11716/*===========================================================================
11717 * FUNCTION : calculateMaxExpectedDuration
11718 *
11719 * DESCRIPTION: Calculate the expected frame duration in nanoseconds given the
11720 * current camera settings.
11721 *
11722 * PARAMETERS :
11723 * @request : request settings
11724 *
11725 * RETURN : Expected frame duration in nanoseconds.
11726 *==========================================================================*/
11727nsecs_t QCamera3HardwareInterface::calculateMaxExpectedDuration(
11728 const camera_metadata_t *request) {
11729 nsecs_t maxExpectedDuration = kDefaultExpectedDuration;
11730 camera_metadata_ro_entry_t e = camera_metadata_ro_entry_t();
11731 find_camera_metadata_ro_entry(request, ANDROID_CONTROL_MODE, &e);
11732 if (e.count == 0) {
11733 return maxExpectedDuration;
11734 }
11735
11736 if (e.data.u8[0] == ANDROID_CONTROL_MODE_OFF) {
11737 getExpectedFrameDuration(request, &maxExpectedDuration /*out*/);
11738 }
11739
11740 if (e.data.u8[0] != ANDROID_CONTROL_MODE_AUTO) {
11741 return maxExpectedDuration;
11742 }
11743
11744 find_camera_metadata_ro_entry(request, ANDROID_CONTROL_AE_MODE, &e);
11745 if (e.count == 0) {
11746 return maxExpectedDuration;
11747 }
11748
11749 switch (e.data.u8[0]) {
11750 case ANDROID_CONTROL_AE_MODE_OFF:
11751 getExpectedFrameDuration(request, &maxExpectedDuration /*out*/);
11752 break;
11753 default:
11754 find_camera_metadata_ro_entry(request,
11755 ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
11756 &e);
11757 if (e.count > 1) {
11758 maxExpectedDuration = 1e9 / e.data.u8[0];
11759 }
11760 break;
11761 }
11762
11763 return maxExpectedDuration;
11764}
11765
11766/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070011767 * FUNCTION : setFrameParameters
11768 *
11769 * DESCRIPTION: set parameters per frame as requested in the metadata from
11770 * framework
11771 *
11772 * PARAMETERS :
11773 * @request : request that needs to be serviced
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011774 * @streamsArray : Stream ID of all the requested streams
Thierry Strudel3d639192016-09-09 11:52:26 -070011775 * @blob_request: Whether this request is a blob request or not
11776 *
11777 * RETURN : success: NO_ERROR
11778 * failure:
11779 *==========================================================================*/
11780int QCamera3HardwareInterface::setFrameParameters(
11781 camera3_capture_request_t *request,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011782 cam_stream_ID_t streamsArray,
Thierry Strudel3d639192016-09-09 11:52:26 -070011783 int blob_request,
11784 uint32_t snapshotStreamId)
11785{
11786 /*translate from camera_metadata_t type to parm_type_t*/
11787 int rc = 0;
11788 int32_t hal_version = CAM_HAL_V3;
11789
11790 clear_metadata_buffer(mParameters);
11791 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
11792 LOGE("Failed to set hal version in the parameters");
11793 return BAD_VALUE;
11794 }
11795
11796 /*we need to update the frame number in the parameters*/
11797 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
11798 request->frame_number)) {
11799 LOGE("Failed to set the frame number in the parameters");
11800 return BAD_VALUE;
11801 }
11802
11803 /* Update stream id of all the requested buffers */
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011804 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011805 LOGE("Failed to set stream type mask in the parameters");
11806 return BAD_VALUE;
11807 }
11808
11809 if (mUpdateDebugLevel) {
11810 uint32_t dummyDebugLevel = 0;
11811 /* The value of dummyDebugLevel is irrelavent. On
11812 * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
11813 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
11814 dummyDebugLevel)) {
11815 LOGE("Failed to set UPDATE_DEBUG_LEVEL");
11816 return BAD_VALUE;
11817 }
11818 mUpdateDebugLevel = false;
11819 }
11820
11821 if(request->settings != NULL){
Emilian Peev30522a12017-08-03 14:36:33 +010011822 mExpectedFrameDuration = calculateMaxExpectedDuration(request->settings);
Thierry Strudel3d639192016-09-09 11:52:26 -070011823 rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
11824 if (blob_request)
11825 memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
11826 }
11827
11828 return rc;
11829}
11830
11831/*===========================================================================
11832 * FUNCTION : setReprocParameters
11833 *
11834 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
11835 * return it.
11836 *
11837 * PARAMETERS :
11838 * @request : request that needs to be serviced
11839 *
11840 * RETURN : success: NO_ERROR
11841 * failure:
11842 *==========================================================================*/
11843int32_t QCamera3HardwareInterface::setReprocParameters(
11844 camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
11845 uint32_t snapshotStreamId)
11846{
11847 /*translate from camera_metadata_t type to parm_type_t*/
11848 int rc = 0;
11849
11850 if (NULL == request->settings){
11851 LOGE("Reprocess settings cannot be NULL");
11852 return BAD_VALUE;
11853 }
11854
11855 if (NULL == reprocParam) {
11856 LOGE("Invalid reprocessing metadata buffer");
11857 return BAD_VALUE;
11858 }
11859 clear_metadata_buffer(reprocParam);
11860
11861 /*we need to update the frame number in the parameters*/
11862 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
11863 request->frame_number)) {
11864 LOGE("Failed to set the frame number in the parameters");
11865 return BAD_VALUE;
11866 }
11867
11868 rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
11869 if (rc < 0) {
11870 LOGE("Failed to translate reproc request");
11871 return rc;
11872 }
11873
11874 CameraMetadata frame_settings;
11875 frame_settings = request->settings;
11876 if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
11877 frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
11878 int32_t *crop_count =
11879 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
11880 int32_t *crop_data =
11881 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
11882 int32_t *roi_map =
11883 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
11884 if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
11885 cam_crop_data_t crop_meta;
11886 memset(&crop_meta, 0, sizeof(cam_crop_data_t));
11887 crop_meta.num_of_streams = 1;
11888 crop_meta.crop_info[0].crop.left = crop_data[0];
11889 crop_meta.crop_info[0].crop.top = crop_data[1];
11890 crop_meta.crop_info[0].crop.width = crop_data[2];
11891 crop_meta.crop_info[0].crop.height = crop_data[3];
11892
11893 crop_meta.crop_info[0].roi_map.left =
11894 roi_map[0];
11895 crop_meta.crop_info[0].roi_map.top =
11896 roi_map[1];
11897 crop_meta.crop_info[0].roi_map.width =
11898 roi_map[2];
11899 crop_meta.crop_info[0].roi_map.height =
11900 roi_map[3];
11901
11902 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
11903 rc = BAD_VALUE;
11904 }
11905 LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
11906 request->input_buffer->stream,
11907 crop_meta.crop_info[0].crop.left,
11908 crop_meta.crop_info[0].crop.top,
11909 crop_meta.crop_info[0].crop.width,
11910 crop_meta.crop_info[0].crop.height);
11911 LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
11912 request->input_buffer->stream,
11913 crop_meta.crop_info[0].roi_map.left,
11914 crop_meta.crop_info[0].roi_map.top,
11915 crop_meta.crop_info[0].roi_map.width,
11916 crop_meta.crop_info[0].roi_map.height);
11917 } else {
11918 LOGE("Invalid reprocess crop count %d!", *crop_count);
11919 }
11920 } else {
11921 LOGE("No crop data from matching output stream");
11922 }
11923
11924 /* These settings are not needed for regular requests so handle them specially for
11925 reprocess requests; information needed for EXIF tags */
11926 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
11927 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
11928 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11929 if (NAME_NOT_FOUND != val) {
11930 uint32_t flashMode = (uint32_t)val;
11931 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
11932 rc = BAD_VALUE;
11933 }
11934 } else {
11935 LOGE("Could not map fwk flash mode %d to correct hal flash mode",
11936 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11937 }
11938 } else {
11939 LOGH("No flash mode in reprocess settings");
11940 }
11941
11942 if (frame_settings.exists(ANDROID_FLASH_STATE)) {
11943 int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
11944 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
11945 rc = BAD_VALUE;
11946 }
11947 } else {
11948 LOGH("No flash state in reprocess settings");
11949 }
11950
11951 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS)) {
11952 uint8_t *reprocessFlags =
11953 frame_settings.find(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS).data.u8;
11954 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_REPROCESS_FLAGS,
11955 *reprocessFlags)) {
11956 rc = BAD_VALUE;
11957 }
11958 }
11959
Thierry Strudel54dc9782017-02-15 12:12:10 -080011960 // Add exif debug data to internal metadata
11961 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB)) {
11962 mm_jpeg_debug_exif_params_t *debug_params =
11963 (mm_jpeg_debug_exif_params_t *)frame_settings.find
11964 (QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB).data.u8;
11965 // AE
11966 if (debug_params->ae_debug_params_valid == TRUE) {
11967 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AE,
11968 debug_params->ae_debug_params);
11969 }
11970 // AWB
11971 if (debug_params->awb_debug_params_valid == TRUE) {
11972 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AWB,
11973 debug_params->awb_debug_params);
11974 }
11975 // AF
11976 if (debug_params->af_debug_params_valid == TRUE) {
11977 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AF,
11978 debug_params->af_debug_params);
11979 }
11980 // ASD
11981 if (debug_params->asd_debug_params_valid == TRUE) {
11982 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_ASD,
11983 debug_params->asd_debug_params);
11984 }
11985 // Stats
11986 if (debug_params->stats_debug_params_valid == TRUE) {
11987 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_STATS,
11988 debug_params->stats_debug_params);
11989 }
11990 // BE Stats
11991 if (debug_params->bestats_debug_params_valid == TRUE) {
11992 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BESTATS,
11993 debug_params->bestats_debug_params);
11994 }
11995 // BHIST
11996 if (debug_params->bhist_debug_params_valid == TRUE) {
11997 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BHIST,
11998 debug_params->bhist_debug_params);
11999 }
12000 // 3A Tuning
12001 if (debug_params->q3a_tuning_debug_params_valid == TRUE) {
12002 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_3A_TUNING,
12003 debug_params->q3a_tuning_debug_params);
12004 }
12005 }
12006
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070012007 // Add metadata which reprocess needs
12008 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB)) {
12009 cam_reprocess_info_t *repro_info =
12010 (cam_reprocess_info_t *)frame_settings.find
12011 (QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB).data.u8;
Thierry Strudel3d639192016-09-09 11:52:26 -070012012 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_SENSOR,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070012013 repro_info->sensor_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070012014 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CAMIF,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070012015 repro_info->camif_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070012016 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_ISP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070012017 repro_info->isp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070012018 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CPP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070012019 repro_info->cpp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070012020 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_FOCAL_LENGTH_RATIO,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070012021 repro_info->af_focal_length_ratio);
Thierry Strudel3d639192016-09-09 11:52:26 -070012022 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_FLIP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070012023 repro_info->pipeline_flip);
12024 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_ROI,
12025 repro_info->af_roi);
12026 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_IMG_DYN_FEAT,
12027 repro_info->dyn_mask);
Thierry Strudel3d639192016-09-09 11:52:26 -070012028 /* If there is ANDROID_JPEG_ORIENTATION in frame setting,
12029 CAM_INTF_PARM_ROTATION metadata then has been added in
12030 translateToHalMetadata. HAL need to keep this new rotation
12031 metadata. Otherwise, the old rotation info saved in the vendor tag
12032 would be used */
12033 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
12034 CAM_INTF_PARM_ROTATION, reprocParam) {
12035 LOGD("CAM_INTF_PARM_ROTATION metadata is added in translateToHalMetadata");
12036 } else {
12037 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_ROTATION,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070012038 repro_info->rotation_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070012039 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012040 }
12041
12042 /* Add additional JPEG cropping information. App add QCAMERA3_JPEG_ENCODE_CROP_RECT
12043 to ask for cropping and use ROI for downscale/upscale during HW JPEG encoding.
12044 roi.width and roi.height would be the final JPEG size.
12045 For now, HAL only checks this for reprocess request */
12046 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ENABLE) &&
12047 frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_RECT)) {
12048 uint8_t *enable =
12049 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ENABLE).data.u8;
12050 if (*enable == TRUE) {
12051 int32_t *crop_data =
12052 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_RECT).data.i32;
12053 cam_stream_crop_info_t crop_meta;
12054 memset(&crop_meta, 0, sizeof(cam_stream_crop_info_t));
12055 crop_meta.stream_id = 0;
12056 crop_meta.crop.left = crop_data[0];
12057 crop_meta.crop.top = crop_data[1];
12058 crop_meta.crop.width = crop_data[2];
12059 crop_meta.crop.height = crop_data[3];
Thierry Strudel9e74aae2016-09-22 17:10:18 -070012060 // The JPEG crop roi should match cpp output size
12061 IF_META_AVAILABLE(cam_stream_crop_info_t, cpp_crop,
12062 CAM_INTF_META_SNAP_CROP_INFO_CPP, reprocParam) {
12063 crop_meta.roi_map.left = 0;
12064 crop_meta.roi_map.top = 0;
12065 crop_meta.roi_map.width = cpp_crop->crop.width;
12066 crop_meta.roi_map.height = cpp_crop->crop.height;
Thierry Strudel3d639192016-09-09 11:52:26 -070012067 }
12068 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_ENCODE_CROP,
12069 crop_meta);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070012070 LOGH("Add JPEG encode crop left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070012071 crop_meta.crop.left, crop_meta.crop.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070012072 crop_meta.crop.width, crop_meta.crop.height, mCameraId);
12073 LOGH("Add JPEG encode crop ROI left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070012074 crop_meta.roi_map.left, crop_meta.roi_map.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070012075 crop_meta.roi_map.width, crop_meta.roi_map.height, mCameraId);
12076
12077 // Add JPEG scale information
12078 cam_dimension_t scale_dim;
12079 memset(&scale_dim, 0, sizeof(cam_dimension_t));
12080 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ROI)) {
12081 int32_t *roi =
12082 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ROI).data.i32;
12083 scale_dim.width = roi[2];
12084 scale_dim.height = roi[3];
12085 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_SCALE_DIMENSION,
12086 scale_dim);
12087 LOGH("Add JPEG encode scale width %d, height %d, mCameraId %d",
12088 scale_dim.width, scale_dim.height, mCameraId);
12089 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012090 }
12091 }
12092
12093 return rc;
12094}
12095
12096/*===========================================================================
12097 * FUNCTION : saveRequestSettings
12098 *
12099 * DESCRIPTION: Add any settings that might have changed to the request settings
12100 * and save the settings to be applied on the frame
12101 *
12102 * PARAMETERS :
12103 * @jpegMetadata : the extracted and/or modified jpeg metadata
12104 * @request : request with initial settings
12105 *
12106 * RETURN :
12107 * camera_metadata_t* : pointer to the saved request settings
12108 *==========================================================================*/
12109camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
12110 const CameraMetadata &jpegMetadata,
12111 camera3_capture_request_t *request)
12112{
12113 camera_metadata_t *resultMetadata;
12114 CameraMetadata camMetadata;
12115 camMetadata = request->settings;
12116
12117 if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
12118 int32_t thumbnail_size[2];
12119 thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
12120 thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
12121 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
12122 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
12123 }
12124
12125 if (request->input_buffer != NULL) {
12126 uint8_t reprocessFlags = 1;
12127 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
12128 (uint8_t*)&reprocessFlags,
12129 sizeof(reprocessFlags));
12130 }
12131
12132 resultMetadata = camMetadata.release();
12133 return resultMetadata;
12134}
12135
12136/*===========================================================================
12137 * FUNCTION : setHalFpsRange
12138 *
12139 * DESCRIPTION: set FPS range parameter
12140 *
12141 *
12142 * PARAMETERS :
12143 * @settings : Metadata from framework
12144 * @hal_metadata: Metadata buffer
12145 *
12146 *
12147 * RETURN : success: NO_ERROR
12148 * failure:
12149 *==========================================================================*/
12150int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
12151 metadata_buffer_t *hal_metadata)
12152{
12153 int32_t rc = NO_ERROR;
12154 cam_fps_range_t fps_range;
12155 fps_range.min_fps = (float)
12156 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
12157 fps_range.max_fps = (float)
12158 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
12159 fps_range.video_min_fps = fps_range.min_fps;
12160 fps_range.video_max_fps = fps_range.max_fps;
12161
12162 LOGD("aeTargetFpsRange fps: [%f %f]",
12163 fps_range.min_fps, fps_range.max_fps);
12164 /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
12165 * follows:
12166 * ---------------------------------------------------------------|
12167 * Video stream is absent in configure_streams |
12168 * (Camcorder preview before the first video record |
12169 * ---------------------------------------------------------------|
12170 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
12171 * | | | vid_min/max_fps|
12172 * ---------------------------------------------------------------|
12173 * NO | [ 30, 240] | 240 | [240, 240] |
12174 * |-------------|-------------|----------------|
12175 * | [240, 240] | 240 | [240, 240] |
12176 * ---------------------------------------------------------------|
12177 * Video stream is present in configure_streams |
12178 * ---------------------------------------------------------------|
12179 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
12180 * | | | vid_min/max_fps|
12181 * ---------------------------------------------------------------|
12182 * NO | [ 30, 240] | 240 | [240, 240] |
12183 * (camcorder prev |-------------|-------------|----------------|
12184 * after video rec | [240, 240] | 240 | [240, 240] |
12185 * is stopped) | | | |
12186 * ---------------------------------------------------------------|
12187 * YES | [ 30, 240] | 240 | [240, 240] |
12188 * |-------------|-------------|----------------|
12189 * | [240, 240] | 240 | [240, 240] |
12190 * ---------------------------------------------------------------|
12191 * When Video stream is absent in configure_streams,
12192 * preview fps = sensor_fps / batchsize
12193 * Eg: for 240fps at batchSize 4, preview = 60fps
12194 * for 120fps at batchSize 4, preview = 30fps
12195 *
12196 * When video stream is present in configure_streams, preview fps is as per
12197 * the ratio of preview buffers to video buffers requested in process
12198 * capture request
12199 */
12200 mBatchSize = 0;
12201 if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
12202 fps_range.min_fps = fps_range.video_max_fps;
12203 fps_range.video_min_fps = fps_range.video_max_fps;
12204 int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
12205 fps_range.max_fps);
12206 if (NAME_NOT_FOUND != val) {
12207 cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
12208 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
12209 return BAD_VALUE;
12210 }
12211
12212 if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
12213 /* If batchmode is currently in progress and the fps changes,
12214 * set the flag to restart the sensor */
12215 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
12216 (mHFRVideoFps != fps_range.max_fps)) {
12217 mNeedSensorRestart = true;
12218 }
12219 mHFRVideoFps = fps_range.max_fps;
12220 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
12221 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
12222 mBatchSize = MAX_HFR_BATCH_SIZE;
12223 }
12224 }
12225 LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
12226
12227 }
12228 } else {
12229 /* HFR mode is session param in backend/ISP. This should be reset when
12230 * in non-HFR mode */
12231 cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
12232 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
12233 return BAD_VALUE;
12234 }
12235 }
12236 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
12237 return BAD_VALUE;
12238 }
12239 LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
12240 fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
12241 return rc;
12242}
12243
12244/*===========================================================================
12245 * FUNCTION : translateToHalMetadata
12246 *
12247 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
12248 *
12249 *
12250 * PARAMETERS :
12251 * @request : request sent from framework
12252 *
12253 *
12254 * RETURN : success: NO_ERROR
12255 * failure:
12256 *==========================================================================*/
12257int QCamera3HardwareInterface::translateToHalMetadata
12258 (const camera3_capture_request_t *request,
12259 metadata_buffer_t *hal_metadata,
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012260 uint32_t snapshotStreamId) {
12261 if (request == nullptr || hal_metadata == nullptr) {
12262 return BAD_VALUE;
12263 }
12264
12265 int64_t minFrameDuration = getMinFrameDuration(request);
12266
12267 return translateFwkMetadataToHalMetadata(request->settings, hal_metadata, snapshotStreamId,
12268 minFrameDuration);
12269}
12270
12271int QCamera3HardwareInterface::translateFwkMetadataToHalMetadata(
12272 const camera_metadata_t *frameworkMetadata, metadata_buffer_t *hal_metadata,
12273 uint32_t snapshotStreamId, int64_t minFrameDuration) {
12274
Thierry Strudel3d639192016-09-09 11:52:26 -070012275 int rc = 0;
12276 CameraMetadata frame_settings;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012277 frame_settings = frameworkMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070012278
12279 /* Do not change the order of the following list unless you know what you are
12280 * doing.
12281 * The order is laid out in such a way that parameters in the front of the table
12282 * may be used to override the parameters later in the table. Examples are:
12283 * 1. META_MODE should precede AEC/AWB/AF MODE
12284 * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
12285 * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
12286 * 4. Any mode should precede it's corresponding settings
12287 */
12288 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
12289 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
12290 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
12291 rc = BAD_VALUE;
12292 }
12293 rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
12294 if (rc != NO_ERROR) {
12295 LOGE("extractSceneMode failed");
12296 }
12297 }
12298
12299 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
12300 uint8_t fwk_aeMode =
12301 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
12302 uint8_t aeMode;
12303 int32_t redeye;
12304
12305 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
12306 aeMode = CAM_AE_MODE_OFF;
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080012307 } else if (fwk_aeMode == NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH) {
12308 aeMode = CAM_AE_MODE_ON_EXTERNAL_FLASH;
Thierry Strudel3d639192016-09-09 11:52:26 -070012309 } else {
12310 aeMode = CAM_AE_MODE_ON;
12311 }
12312 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
12313 redeye = 1;
12314 } else {
12315 redeye = 0;
12316 }
12317
12318 int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
12319 fwk_aeMode);
12320 if (NAME_NOT_FOUND != val) {
12321 int32_t flashMode = (int32_t)val;
12322 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
12323 }
12324
12325 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
12326 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
12327 rc = BAD_VALUE;
12328 }
12329 }
12330
12331 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
12332 uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
12333 int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
12334 fwk_whiteLevel);
12335 if (NAME_NOT_FOUND != val) {
12336 uint8_t whiteLevel = (uint8_t)val;
12337 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
12338 rc = BAD_VALUE;
12339 }
12340 }
12341 }
12342
12343 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
12344 uint8_t fwk_cacMode =
12345 frame_settings.find(
12346 ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
12347 int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
12348 fwk_cacMode);
12349 if (NAME_NOT_FOUND != val) {
12350 cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
12351 bool entryAvailable = FALSE;
12352 // Check whether Frameworks set CAC mode is supported in device or not
12353 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
12354 if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
12355 entryAvailable = TRUE;
12356 break;
12357 }
12358 }
12359 LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
12360 // If entry not found then set the device supported mode instead of frameworks mode i.e,
12361 // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
12362 // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
12363 if (entryAvailable == FALSE) {
12364 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
12365 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
12366 } else {
12367 if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
12368 // High is not supported and so set the FAST as spec say's underlying
12369 // device implementation can be the same for both modes.
12370 cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
12371 } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
12372 // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
12373 // in order to avoid the fps drop due to high quality
12374 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
12375 } else {
12376 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
12377 }
12378 }
12379 }
12380 LOGD("Final cacMode is %d", cacMode);
12381 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
12382 rc = BAD_VALUE;
12383 }
12384 } else {
12385 LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
12386 }
12387 }
12388
Jason Lee84ae9972017-02-24 13:24:24 -080012389 uint8_t fwk_focusMode = 0;
Shuzhen Wangb57ec912017-07-31 13:24:27 -070012390 if (m_bForceInfinityAf == 0) {
Thierry Strudel2896d122017-02-23 19:18:03 -080012391 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
Jason Lee84ae9972017-02-24 13:24:24 -080012392 fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
Thierry Strudel2896d122017-02-23 19:18:03 -080012393 int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
12394 fwk_focusMode);
12395 if (NAME_NOT_FOUND != val) {
12396 uint8_t focusMode = (uint8_t)val;
12397 LOGD("set focus mode %d", focusMode);
12398 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12399 CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
12400 rc = BAD_VALUE;
12401 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012402 }
Shuzhen Wanga1d82a92017-09-19 14:39:43 -070012403 } else {
12404 LOGE("Fatal: Missing ANDROID_CONTROL_AF_MODE");
Thierry Strudel3d639192016-09-09 11:52:26 -070012405 }
Thierry Strudel2896d122017-02-23 19:18:03 -080012406 } else {
12407 uint8_t focusMode = (uint8_t)CAM_FOCUS_MODE_INFINITY;
12408 LOGE("Focus forced to infinity %d", focusMode);
12409 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
12410 rc = BAD_VALUE;
12411 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012412 }
12413
Jason Lee84ae9972017-02-24 13:24:24 -080012414 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE) &&
12415 fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012416 float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
12417 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
12418 focalDistance)) {
12419 rc = BAD_VALUE;
12420 }
12421 }
12422
12423 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
12424 uint8_t fwk_antibandingMode =
12425 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
12426 int val = lookupHalName(ANTIBANDING_MODES_MAP,
12427 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
12428 if (NAME_NOT_FOUND != val) {
12429 uint32_t hal_antibandingMode = (uint32_t)val;
Shuzhen Wangf6890e02016-08-12 14:28:54 -070012430 if (hal_antibandingMode == CAM_ANTIBANDING_MODE_AUTO) {
12431 if (m60HzZone) {
12432 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_60HZ;
12433 } else {
12434 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_50HZ;
12435 }
12436 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012437 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
12438 hal_antibandingMode)) {
12439 rc = BAD_VALUE;
12440 }
12441 }
12442 }
12443
12444 if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
12445 int32_t expCompensation = frame_settings.find(
12446 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
12447 if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
12448 expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
12449 if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
12450 expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012451 LOGD("Setting compensation:%d", expCompensation);
Thierry Strudel3d639192016-09-09 11:52:26 -070012452 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
12453 expCompensation)) {
12454 rc = BAD_VALUE;
12455 }
12456 }
12457
12458 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
12459 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
12460 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
12461 rc = BAD_VALUE;
12462 }
12463 }
12464 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
12465 rc = setHalFpsRange(frame_settings, hal_metadata);
12466 if (rc != NO_ERROR) {
12467 LOGE("setHalFpsRange failed");
12468 }
12469 }
12470
12471 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
12472 uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
12473 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
12474 rc = BAD_VALUE;
12475 }
12476 }
12477
12478 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
12479 uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
12480 int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
12481 fwk_effectMode);
12482 if (NAME_NOT_FOUND != val) {
12483 uint8_t effectMode = (uint8_t)val;
12484 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
12485 rc = BAD_VALUE;
12486 }
12487 }
12488 }
12489
12490 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
12491 uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
12492 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
12493 colorCorrectMode)) {
12494 rc = BAD_VALUE;
12495 }
12496 }
12497
12498 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
12499 cam_color_correct_gains_t colorCorrectGains;
12500 for (size_t i = 0; i < CC_GAIN_MAX; i++) {
12501 colorCorrectGains.gains[i] =
12502 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
12503 }
12504 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
12505 colorCorrectGains)) {
12506 rc = BAD_VALUE;
12507 }
12508 }
12509
12510 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
12511 cam_color_correct_matrix_t colorCorrectTransform;
12512 cam_rational_type_t transform_elem;
12513 size_t num = 0;
12514 for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
12515 for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
12516 transform_elem.numerator =
12517 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
12518 transform_elem.denominator =
12519 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
12520 colorCorrectTransform.transform_matrix[i][j] = transform_elem;
12521 num++;
12522 }
12523 }
12524 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
12525 colorCorrectTransform)) {
12526 rc = BAD_VALUE;
12527 }
12528 }
12529
12530 cam_trigger_t aecTrigger;
12531 aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
12532 aecTrigger.trigger_id = -1;
12533 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
12534 frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
12535 aecTrigger.trigger =
12536 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
12537 aecTrigger.trigger_id =
12538 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
12539 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
12540 aecTrigger)) {
12541 rc = BAD_VALUE;
12542 }
12543 LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
12544 aecTrigger.trigger, aecTrigger.trigger_id);
12545 }
12546
12547 /*af_trigger must come with a trigger id*/
12548 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
12549 frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
12550 cam_trigger_t af_trigger;
12551 af_trigger.trigger =
12552 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
12553 af_trigger.trigger_id =
12554 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
12555 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
12556 rc = BAD_VALUE;
12557 }
12558 LOGD("AfTrigger: %d AfTriggerID: %d",
12559 af_trigger.trigger, af_trigger.trigger_id);
12560 }
12561
12562 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
12563 int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
12564 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
12565 rc = BAD_VALUE;
12566 }
12567 }
12568 if (frame_settings.exists(ANDROID_EDGE_MODE)) {
12569 cam_edge_application_t edge_application;
12570 edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012571
Thierry Strudel3d639192016-09-09 11:52:26 -070012572 if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
12573 edge_application.sharpness = 0;
12574 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012575 edge_application.sharpness =
12576 gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
12577 if (frame_settings.exists(QCAMERA3_SHARPNESS_STRENGTH)) {
12578 int32_t sharpness =
12579 frame_settings.find(QCAMERA3_SHARPNESS_STRENGTH).data.i32[0];
12580 if (sharpness >= gCamCapability[mCameraId]->sharpness_ctrl.min_value &&
12581 sharpness <= gCamCapability[mCameraId]->sharpness_ctrl.max_value) {
12582 LOGD("Setting edge mode sharpness %d", sharpness);
12583 edge_application.sharpness = sharpness;
12584 }
12585 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012586 }
12587 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
12588 rc = BAD_VALUE;
12589 }
12590 }
12591
12592 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
12593 int32_t respectFlashMode = 1;
12594 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
12595 uint8_t fwk_aeMode =
12596 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080012597 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH ||
12598 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH ||
12599 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012600 respectFlashMode = 0;
12601 LOGH("AE Mode controls flash, ignore android.flash.mode");
12602 }
12603 }
12604 if (respectFlashMode) {
12605 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
12606 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
12607 LOGH("flash mode after mapping %d", val);
12608 // To check: CAM_INTF_META_FLASH_MODE usage
12609 if (NAME_NOT_FOUND != val) {
12610 uint8_t flashMode = (uint8_t)val;
12611 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
12612 rc = BAD_VALUE;
12613 }
12614 }
12615 }
12616 }
12617
12618 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
12619 uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
12620 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
12621 rc = BAD_VALUE;
12622 }
12623 }
12624
12625 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
12626 int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
12627 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
12628 flashFiringTime)) {
12629 rc = BAD_VALUE;
12630 }
12631 }
12632
12633 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
12634 uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
12635 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
12636 hotPixelMode)) {
12637 rc = BAD_VALUE;
12638 }
12639 }
12640
12641 if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
12642 float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
12643 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
12644 lensAperture)) {
12645 rc = BAD_VALUE;
12646 }
12647 }
12648
12649 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
12650 float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
12651 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
12652 filterDensity)) {
12653 rc = BAD_VALUE;
12654 }
12655 }
12656
12657 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
12658 float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
12659 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
12660 focalLength)) {
12661 rc = BAD_VALUE;
12662 }
12663 }
12664
12665 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
12666 uint8_t optStabMode =
12667 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
12668 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
12669 optStabMode)) {
12670 rc = BAD_VALUE;
12671 }
12672 }
12673
12674 if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
12675 uint8_t videoStabMode =
12676 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
12677 LOGD("videoStabMode from APP = %d", videoStabMode);
12678 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
12679 videoStabMode)) {
12680 rc = BAD_VALUE;
12681 }
12682 }
12683
12684
12685 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
12686 uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
12687 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
12688 noiseRedMode)) {
12689 rc = BAD_VALUE;
12690 }
12691 }
12692
12693 if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
12694 float reprocessEffectiveExposureFactor =
12695 frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
12696 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
12697 reprocessEffectiveExposureFactor)) {
12698 rc = BAD_VALUE;
12699 }
12700 }
12701
12702 cam_crop_region_t scalerCropRegion;
12703 bool scalerCropSet = false;
12704 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
12705 scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
12706 scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
12707 scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
12708 scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
12709
12710 // Map coordinate system from active array to sensor output.
12711 mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
12712 scalerCropRegion.width, scalerCropRegion.height);
12713
12714 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
12715 scalerCropRegion)) {
12716 rc = BAD_VALUE;
12717 }
12718 scalerCropSet = true;
12719 }
12720
12721 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
12722 int64_t sensorExpTime =
12723 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
12724 LOGD("setting sensorExpTime %lld", sensorExpTime);
12725 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
12726 sensorExpTime)) {
12727 rc = BAD_VALUE;
12728 }
12729 }
12730
12731 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
12732 int64_t sensorFrameDuration =
12733 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012734 sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
12735 if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
12736 sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
12737 LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
12738 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
12739 sensorFrameDuration)) {
12740 rc = BAD_VALUE;
12741 }
12742 }
12743
12744 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
12745 int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
12746 if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
12747 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
12748 if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
12749 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
12750 LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
12751 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
12752 sensorSensitivity)) {
12753 rc = BAD_VALUE;
12754 }
12755 }
12756
Thierry Strudel9e74aae2016-09-22 17:10:18 -070012757#ifndef USE_HAL_3_3
12758 if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
12759 int32_t ispSensitivity =
12760 frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
12761 if (ispSensitivity <
12762 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
12763 ispSensitivity =
12764 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
12765 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12766 }
12767 if (ispSensitivity >
12768 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
12769 ispSensitivity =
12770 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
12771 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12772 }
12773 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
12774 ispSensitivity)) {
12775 rc = BAD_VALUE;
12776 }
12777 }
12778#endif
12779
Thierry Strudel3d639192016-09-09 11:52:26 -070012780 if (frame_settings.exists(ANDROID_SHADING_MODE)) {
12781 uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
12782 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
12783 rc = BAD_VALUE;
12784 }
12785 }
12786
12787 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
12788 uint8_t fwk_facedetectMode =
12789 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
12790
12791 int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
12792 fwk_facedetectMode);
12793
12794 if (NAME_NOT_FOUND != val) {
12795 uint8_t facedetectMode = (uint8_t)val;
12796 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
12797 facedetectMode)) {
12798 rc = BAD_VALUE;
12799 }
12800 }
12801 }
12802
Thierry Strudel54dc9782017-02-15 12:12:10 -080012803 if (frame_settings.exists(QCAMERA3_HISTOGRAM_MODE)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012804 uint8_t histogramMode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012805 frame_settings.find(QCAMERA3_HISTOGRAM_MODE).data.u8[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012806 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12807 histogramMode)) {
12808 rc = BAD_VALUE;
12809 }
12810 }
12811
12812 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
12813 uint8_t sharpnessMapMode =
12814 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
12815 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
12816 sharpnessMapMode)) {
12817 rc = BAD_VALUE;
12818 }
12819 }
12820
12821 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
12822 uint8_t tonemapMode =
12823 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
12824 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
12825 rc = BAD_VALUE;
12826 }
12827 }
12828 /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
12829 /*All tonemap channels will have the same number of points*/
12830 if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
12831 frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
12832 frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
12833 cam_rgb_tonemap_curves tonemapCurves;
12834 tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
12835 if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
12836 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
12837 tonemapCurves.tonemap_points_cnt,
12838 CAM_MAX_TONEMAP_CURVE_SIZE);
12839 tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
12840 }
12841
12842 /* ch0 = G*/
12843 size_t point = 0;
12844 cam_tonemap_curve_t tonemapCurveGreen;
12845 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12846 for (size_t j = 0; j < 2; j++) {
12847 tonemapCurveGreen.tonemap_points[i][j] =
12848 frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
12849 point++;
12850 }
12851 }
12852 tonemapCurves.curves[0] = tonemapCurveGreen;
12853
12854 /* ch 1 = B */
12855 point = 0;
12856 cam_tonemap_curve_t tonemapCurveBlue;
12857 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12858 for (size_t j = 0; j < 2; j++) {
12859 tonemapCurveBlue.tonemap_points[i][j] =
12860 frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
12861 point++;
12862 }
12863 }
12864 tonemapCurves.curves[1] = tonemapCurveBlue;
12865
12866 /* ch 2 = R */
12867 point = 0;
12868 cam_tonemap_curve_t tonemapCurveRed;
12869 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12870 for (size_t j = 0; j < 2; j++) {
12871 tonemapCurveRed.tonemap_points[i][j] =
12872 frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
12873 point++;
12874 }
12875 }
12876 tonemapCurves.curves[2] = tonemapCurveRed;
12877
12878 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
12879 tonemapCurves)) {
12880 rc = BAD_VALUE;
12881 }
12882 }
12883
12884 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
12885 uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
12886 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
12887 captureIntent)) {
12888 rc = BAD_VALUE;
12889 }
12890 }
12891
12892 if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
12893 uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
12894 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
12895 blackLevelLock)) {
12896 rc = BAD_VALUE;
12897 }
12898 }
12899
12900 if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
12901 uint8_t lensShadingMapMode =
12902 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
12903 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
12904 lensShadingMapMode)) {
12905 rc = BAD_VALUE;
12906 }
12907 }
12908
12909 if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
12910 cam_area_t roi;
12911 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012912 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AE_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012913
12914 // Map coordinate system from active array to sensor output.
12915 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12916 roi.rect.height);
12917
12918 if (scalerCropSet) {
12919 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12920 }
12921 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
12922 rc = BAD_VALUE;
12923 }
12924 }
12925
12926 if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
12927 cam_area_t roi;
12928 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012929 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AF_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012930
12931 // Map coordinate system from active array to sensor output.
12932 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12933 roi.rect.height);
12934
12935 if (scalerCropSet) {
12936 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12937 }
12938 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
12939 rc = BAD_VALUE;
12940 }
12941 }
12942
12943 // CDS for non-HFR non-video mode
12944 if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
12945 !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
12946 int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
12947 if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
12948 LOGE("Invalid CDS mode %d!", *fwk_cds);
12949 } else {
12950 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12951 CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
12952 rc = BAD_VALUE;
12953 }
12954 }
12955 }
12956
Thierry Strudel04e026f2016-10-10 11:27:36 -070012957 // Video HDR
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012958 cam_video_hdr_mode_t vhdr = CAM_VIDEO_HDR_MODE_OFF;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012959 if (frame_settings.exists(QCAMERA3_VIDEO_HDR_MODE)) {
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012960 vhdr = (cam_video_hdr_mode_t) frame_settings.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
12961 }
12962 if (m_bVideoHdrEnabled)
12963 vhdr = CAM_VIDEO_HDR_MODE_ON;
12964
Thierry Strudel54dc9782017-02-15 12:12:10 -080012965 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
12966
12967 if(vhdr != curr_hdr_state)
12968 LOGH("PROFILE_SET_HDR_MODE %d" ,vhdr);
12969
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012970 rc = setVideoHdrMode(mParameters, vhdr);
12971 if (rc != NO_ERROR) {
12972 LOGE("setVideoHDR is failed");
Thierry Strudel04e026f2016-10-10 11:27:36 -070012973 }
12974
12975 //IR
12976 if(frame_settings.exists(QCAMERA3_IR_MODE)) {
12977 cam_ir_mode_type_t fwk_ir = (cam_ir_mode_type_t)
12978 frame_settings.find(QCAMERA3_IR_MODE).data.i32[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012979 uint8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR) != 0);
12980 uint8_t isIRon = 0;
12981
12982 (fwk_ir >0) ? (isIRon = 1) : (isIRon = 0) ;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012983 if ((CAM_IR_MODE_MAX <= fwk_ir) || (0 > fwk_ir)) {
12984 LOGE("Invalid IR mode %d!", fwk_ir);
12985 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012986 if(isIRon != curr_ir_state )
12987 LOGH("PROFILE_SET_IR_MODE %d" ,isIRon);
12988
Thierry Strudel04e026f2016-10-10 11:27:36 -070012989 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12990 CAM_INTF_META_IR_MODE, fwk_ir)) {
12991 rc = BAD_VALUE;
12992 }
12993 }
12994 }
12995
Thierry Strudel54dc9782017-02-15 12:12:10 -080012996 //Binning Correction Mode
12997 if(frame_settings.exists(QCAMERA3_BINNING_CORRECTION_MODE)) {
12998 cam_binning_correction_mode_t fwk_binning_correction = (cam_binning_correction_mode_t)
12999 frame_settings.find(QCAMERA3_BINNING_CORRECTION_MODE).data.i32[0];
13000 if ((CAM_BINNING_CORRECTION_MODE_MAX <= fwk_binning_correction)
13001 || (0 > fwk_binning_correction)) {
13002 LOGE("Invalid binning correction mode %d!", fwk_binning_correction);
13003 } else {
13004 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13005 CAM_INTF_META_BINNING_CORRECTION_MODE, fwk_binning_correction)) {
13006 rc = BAD_VALUE;
13007 }
13008 }
13009 }
13010
Thierry Strudel269c81a2016-10-12 12:13:59 -070013011 if (frame_settings.exists(QCAMERA3_AEC_CONVERGENCE_SPEED)) {
13012 float aec_speed;
13013 aec_speed = frame_settings.find(QCAMERA3_AEC_CONVERGENCE_SPEED).data.f[0];
13014 LOGD("AEC Speed :%f", aec_speed);
13015 if ( aec_speed < 0 ) {
13016 LOGE("Invalid AEC mode %f!", aec_speed);
13017 } else {
13018 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_CONVERGENCE_SPEED,
13019 aec_speed)) {
13020 rc = BAD_VALUE;
13021 }
13022 }
13023 }
13024
13025 if (frame_settings.exists(QCAMERA3_AWB_CONVERGENCE_SPEED)) {
13026 float awb_speed;
13027 awb_speed = frame_settings.find(QCAMERA3_AWB_CONVERGENCE_SPEED).data.f[0];
13028 LOGD("AWB Speed :%f", awb_speed);
13029 if ( awb_speed < 0 ) {
13030 LOGE("Invalid AWB mode %f!", awb_speed);
13031 } else {
13032 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AWB_CONVERGENCE_SPEED,
13033 awb_speed)) {
13034 rc = BAD_VALUE;
13035 }
13036 }
13037 }
13038
Thierry Strudel3d639192016-09-09 11:52:26 -070013039 // TNR
13040 if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
13041 frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
13042 uint8_t b_TnrRequested = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -080013043 uint8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0);
Thierry Strudel3d639192016-09-09 11:52:26 -070013044 cam_denoise_param_t tnr;
13045 tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
13046 tnr.process_plates =
13047 (cam_denoise_process_type_t)frame_settings.find(
13048 QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
13049 b_TnrRequested = tnr.denoise_enable;
Thierry Strudel54dc9782017-02-15 12:12:10 -080013050
13051 if(b_TnrRequested != curr_tnr_state)
13052 LOGH("PROFILE_SET_TNR_MODE %d" ,b_TnrRequested);
13053
Thierry Strudel3d639192016-09-09 11:52:26 -070013054 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
13055 rc = BAD_VALUE;
13056 }
13057 }
13058
Thierry Strudel54dc9782017-02-15 12:12:10 -080013059 if (frame_settings.exists(QCAMERA3_EXPOSURE_METER)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -070013060 int32_t* exposure_metering_mode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080013061 frame_settings.find(QCAMERA3_EXPOSURE_METER).data.i32;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070013062 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_ALGO_TYPE,
13063 *exposure_metering_mode)) {
13064 rc = BAD_VALUE;
13065 }
13066 }
13067
Thierry Strudel3d639192016-09-09 11:52:26 -070013068 if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
13069 int32_t fwk_testPatternMode =
13070 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
13071 int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
13072 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
13073
13074 if (NAME_NOT_FOUND != testPatternMode) {
13075 cam_test_pattern_data_t testPatternData;
13076 memset(&testPatternData, 0, sizeof(testPatternData));
13077 testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
13078 if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
13079 frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
13080 int32_t *fwk_testPatternData =
13081 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
13082 testPatternData.r = fwk_testPatternData[0];
13083 testPatternData.b = fwk_testPatternData[3];
13084 switch (gCamCapability[mCameraId]->color_arrangement) {
13085 case CAM_FILTER_ARRANGEMENT_RGGB:
13086 case CAM_FILTER_ARRANGEMENT_GRBG:
13087 testPatternData.gr = fwk_testPatternData[1];
13088 testPatternData.gb = fwk_testPatternData[2];
13089 break;
13090 case CAM_FILTER_ARRANGEMENT_GBRG:
13091 case CAM_FILTER_ARRANGEMENT_BGGR:
13092 testPatternData.gr = fwk_testPatternData[2];
13093 testPatternData.gb = fwk_testPatternData[1];
13094 break;
13095 default:
13096 LOGE("color arrangement %d is not supported",
13097 gCamCapability[mCameraId]->color_arrangement);
13098 break;
13099 }
13100 }
13101 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
13102 testPatternData)) {
13103 rc = BAD_VALUE;
13104 }
13105 } else {
13106 LOGE("Invalid framework sensor test pattern mode %d",
13107 fwk_testPatternMode);
13108 }
13109 }
13110
13111 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
13112 size_t count = 0;
13113 camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
13114 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
13115 gps_coords.data.d, gps_coords.count, count);
13116 if (gps_coords.count != count) {
13117 rc = BAD_VALUE;
13118 }
13119 }
13120
13121 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
13122 char gps_methods[GPS_PROCESSING_METHOD_SIZE];
13123 size_t count = 0;
13124 const char *gps_methods_src = (const char *)
13125 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
13126 memset(gps_methods, '\0', sizeof(gps_methods));
13127 strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
13128 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
13129 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
13130 if (GPS_PROCESSING_METHOD_SIZE != count) {
13131 rc = BAD_VALUE;
13132 }
13133 }
13134
13135 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
13136 int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
13137 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
13138 gps_timestamp)) {
13139 rc = BAD_VALUE;
13140 }
13141 }
13142
13143 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
13144 int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
13145 cam_rotation_info_t rotation_info;
13146 if (orientation == 0) {
13147 rotation_info.rotation = ROTATE_0;
13148 } else if (orientation == 90) {
13149 rotation_info.rotation = ROTATE_90;
13150 } else if (orientation == 180) {
13151 rotation_info.rotation = ROTATE_180;
13152 } else if (orientation == 270) {
13153 rotation_info.rotation = ROTATE_270;
13154 }
Shuzhen Wang6ec8eac2016-07-28 23:09:23 -070013155 rotation_info.device_rotation = ROTATE_0;
Thierry Strudel3d639192016-09-09 11:52:26 -070013156 rotation_info.streamId = snapshotStreamId;
13157 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
13158 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
13159 rc = BAD_VALUE;
13160 }
13161 }
13162
13163 if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
13164 uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
13165 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
13166 rc = BAD_VALUE;
13167 }
13168 }
13169
13170 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
13171 uint32_t thumb_quality = (uint32_t)
13172 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
13173 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
13174 thumb_quality)) {
13175 rc = BAD_VALUE;
13176 }
13177 }
13178
13179 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
13180 cam_dimension_t dim;
13181 dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
13182 dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
13183 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
13184 rc = BAD_VALUE;
13185 }
13186 }
13187
13188 // Internal metadata
13189 if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
13190 size_t count = 0;
13191 camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
13192 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
13193 privatedata.data.i32, privatedata.count, count);
13194 if (privatedata.count != count) {
13195 rc = BAD_VALUE;
13196 }
13197 }
13198
Thierry Strudel295a0ca2016-11-03 18:38:47 -070013199 // ISO/Exposure Priority
13200 if (frame_settings.exists(QCAMERA3_USE_ISO_EXP_PRIORITY) &&
13201 frame_settings.exists(QCAMERA3_SELECT_PRIORITY)) {
13202 cam_priority_mode_t mode =
13203 (cam_priority_mode_t)frame_settings.find(QCAMERA3_SELECT_PRIORITY).data.i32[0];
13204 if((CAM_ISO_PRIORITY == mode) || (CAM_EXP_PRIORITY == mode)) {
13205 cam_intf_parm_manual_3a_t use_iso_exp_pty;
13206 use_iso_exp_pty.previewOnly = FALSE;
13207 uint64_t* ptr = (uint64_t*)frame_settings.find(QCAMERA3_USE_ISO_EXP_PRIORITY).data.i64;
13208 use_iso_exp_pty.value = *ptr;
13209
13210 if(CAM_ISO_PRIORITY == mode) {
13211 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ISO,
13212 use_iso_exp_pty)) {
13213 rc = BAD_VALUE;
13214 }
13215 }
13216 else {
13217 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_TIME,
13218 use_iso_exp_pty)) {
13219 rc = BAD_VALUE;
13220 }
13221 }
Thierry Strudel54dc9782017-02-15 12:12:10 -080013222
13223 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 1)) {
13224 rc = BAD_VALUE;
13225 }
13226 }
13227 } else {
13228 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 0)) {
13229 rc = BAD_VALUE;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070013230 }
13231 }
13232
13233 // Saturation
13234 if (frame_settings.exists(QCAMERA3_USE_SATURATION)) {
13235 int32_t* use_saturation =
13236 frame_settings.find(QCAMERA3_USE_SATURATION).data.i32;
13237 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_SATURATION, *use_saturation)) {
13238 rc = BAD_VALUE;
13239 }
13240 }
13241
Thierry Strudel3d639192016-09-09 11:52:26 -070013242 // EV step
13243 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
13244 gCamCapability[mCameraId]->exp_compensation_step)) {
13245 rc = BAD_VALUE;
13246 }
13247
13248 // CDS info
13249 if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
13250 cam_cds_data_t *cdsData = (cam_cds_data_t *)
13251 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
13252
13253 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13254 CAM_INTF_META_CDS_DATA, *cdsData)) {
13255 rc = BAD_VALUE;
13256 }
13257 }
13258
Shuzhen Wang19463d72016-03-08 11:09:52 -080013259 // Hybrid AE
13260 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
13261 uint8_t *hybrid_ae = (uint8_t *)
13262 frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
Shuzhen Wang77b049a2017-08-30 12:24:36 -070013263 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
13264 rc = BAD_VALUE;
13265 }
Shuzhen Wang19463d72016-03-08 11:09:52 -080013266 }
13267
Shuzhen Wang14415f52016-11-16 18:26:18 -080013268 // Histogram
13269 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE)) {
13270 uint8_t histogramMode =
13271 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE).data.u8[0];
13272 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
13273 histogramMode)) {
13274 rc = BAD_VALUE;
13275 }
13276 }
13277
13278 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS)) {
13279 int32_t histogramBins =
13280 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS).data.i32[0];
13281 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_BINS,
13282 histogramBins)) {
13283 rc = BAD_VALUE;
13284 }
13285 }
13286
Shuzhen Wangcc386c52017-03-29 09:28:08 -070013287 // Tracking AF
13288 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER)) {
13289 uint8_t trackingAfTrigger =
13290 frame_settings.find(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER).data.u8[0];
13291 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TRACKING_AF_TRIGGER,
13292 trackingAfTrigger)) {
13293 rc = BAD_VALUE;
13294 }
13295 }
13296
Chien-Yu Chendbd619b2017-08-04 17:50:11 -070013297 // Makernote
13298 camera_metadata_entry entry = frame_settings.find(NEXUS_EXPERIMENTAL_2017_EXIF_MAKERNOTE);
13299 if (entry.count != 0) {
13300 if (entry.count <= MAX_MAKERNOTE_LENGTH) {
13301 cam_makernote_t makernote;
13302 makernote.length = entry.count;
13303 memcpy(makernote.data, entry.data.u8, makernote.length);
13304 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MAKERNOTE, makernote)) {
13305 rc = BAD_VALUE;
13306 }
13307 } else {
13308 ALOGE("%s: Makernote length %u is larger than %d", __FUNCTION__, entry.count,
13309 MAX_MAKERNOTE_LENGTH);
13310 rc = BAD_VALUE;
13311 }
13312 }
13313
Thierry Strudel3d639192016-09-09 11:52:26 -070013314 return rc;
13315}
13316
13317/*===========================================================================
13318 * FUNCTION : captureResultCb
13319 *
13320 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
13321 *
13322 * PARAMETERS :
13323 * @frame : frame information from mm-camera-interface
13324 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
13325 * @userdata: userdata
13326 *
13327 * RETURN : NONE
13328 *==========================================================================*/
13329void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
13330 camera3_stream_buffer_t *buffer,
13331 uint32_t frame_number, bool isInputBuffer, void *userdata)
13332{
13333 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
13334 if (hw == NULL) {
13335 LOGE("Invalid hw %p", hw);
13336 return;
13337 }
13338
13339 hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
13340 return;
13341}
13342
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013343/*===========================================================================
13344 * FUNCTION : setBufferErrorStatus
13345 *
13346 * DESCRIPTION: Callback handler for channels to report any buffer errors
13347 *
13348 * PARAMETERS :
13349 * @ch : Channel on which buffer error is reported from
13350 * @frame_number : frame number on which buffer error is reported on
13351 * @buffer_status : buffer error status
13352 * @userdata: userdata
13353 *
13354 * RETURN : NONE
13355 *==========================================================================*/
13356void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
13357 uint32_t frame_number, camera3_buffer_status_t err, void *userdata)
13358{
13359 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
13360 if (hw == NULL) {
13361 LOGE("Invalid hw %p", hw);
13362 return;
13363 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013364
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013365 hw->setBufferErrorStatus(ch, frame_number, err);
13366 return;
13367}
13368
13369void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
13370 uint32_t frameNumber, camera3_buffer_status_t err)
13371{
13372 LOGD("channel: %p, frame# %d, buf err: %d", ch, frameNumber, err);
13373 pthread_mutex_lock(&mMutex);
13374
13375 for (auto& req : mPendingBuffersMap.mPendingBuffersInRequest) {
13376 if (req.frame_number != frameNumber)
13377 continue;
13378 for (auto& k : req.mPendingBufferList) {
13379 if(k.stream->priv == ch) {
13380 k.bufStatus = CAMERA3_BUFFER_STATUS_ERROR;
13381 }
13382 }
13383 }
13384
13385 pthread_mutex_unlock(&mMutex);
13386 return;
13387}
Thierry Strudel3d639192016-09-09 11:52:26 -070013388/*===========================================================================
13389 * FUNCTION : initialize
13390 *
13391 * DESCRIPTION: Pass framework callback pointers to HAL
13392 *
13393 * PARAMETERS :
13394 *
13395 *
13396 * RETURN : Success : 0
13397 * Failure: -ENODEV
13398 *==========================================================================*/
13399
13400int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
13401 const camera3_callback_ops_t *callback_ops)
13402{
13403 LOGD("E");
13404 QCamera3HardwareInterface *hw =
13405 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13406 if (!hw) {
13407 LOGE("NULL camera device");
13408 return -ENODEV;
13409 }
13410
13411 int rc = hw->initialize(callback_ops);
13412 LOGD("X");
13413 return rc;
13414}
13415
13416/*===========================================================================
13417 * FUNCTION : configure_streams
13418 *
13419 * DESCRIPTION:
13420 *
13421 * PARAMETERS :
13422 *
13423 *
13424 * RETURN : Success: 0
13425 * Failure: -EINVAL (if stream configuration is invalid)
13426 * -ENODEV (fatal error)
13427 *==========================================================================*/
13428
13429int QCamera3HardwareInterface::configure_streams(
13430 const struct camera3_device *device,
13431 camera3_stream_configuration_t *stream_list)
13432{
13433 LOGD("E");
13434 QCamera3HardwareInterface *hw =
13435 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13436 if (!hw) {
13437 LOGE("NULL camera device");
13438 return -ENODEV;
13439 }
13440 int rc = hw->configureStreams(stream_list);
13441 LOGD("X");
13442 return rc;
13443}
13444
13445/*===========================================================================
13446 * FUNCTION : construct_default_request_settings
13447 *
13448 * DESCRIPTION: Configure a settings buffer to meet the required use case
13449 *
13450 * PARAMETERS :
13451 *
13452 *
13453 * RETURN : Success: Return valid metadata
13454 * Failure: Return NULL
13455 *==========================================================================*/
13456const camera_metadata_t* QCamera3HardwareInterface::
13457 construct_default_request_settings(const struct camera3_device *device,
13458 int type)
13459{
13460
13461 LOGD("E");
13462 camera_metadata_t* fwk_metadata = NULL;
13463 QCamera3HardwareInterface *hw =
13464 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13465 if (!hw) {
13466 LOGE("NULL camera device");
13467 return NULL;
13468 }
13469
13470 fwk_metadata = hw->translateCapabilityToMetadata(type);
13471
13472 LOGD("X");
13473 return fwk_metadata;
13474}
13475
13476/*===========================================================================
13477 * FUNCTION : process_capture_request
13478 *
13479 * DESCRIPTION:
13480 *
13481 * PARAMETERS :
13482 *
13483 *
13484 * RETURN :
13485 *==========================================================================*/
13486int QCamera3HardwareInterface::process_capture_request(
13487 const struct camera3_device *device,
13488 camera3_capture_request_t *request)
13489{
13490 LOGD("E");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013491 CAMSCOPE_UPDATE_FLAGS(CAMSCOPE_SECTION_HAL, kpi_camscope_flags);
Thierry Strudel3d639192016-09-09 11:52:26 -070013492 QCamera3HardwareInterface *hw =
13493 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13494 if (!hw) {
13495 LOGE("NULL camera device");
13496 return -EINVAL;
13497 }
13498
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013499 int rc = hw->orchestrateRequest(request);
Thierry Strudel3d639192016-09-09 11:52:26 -070013500 LOGD("X");
13501 return rc;
13502}
13503
13504/*===========================================================================
13505 * FUNCTION : dump
13506 *
13507 * DESCRIPTION:
13508 *
13509 * PARAMETERS :
13510 *
13511 *
13512 * RETURN :
13513 *==========================================================================*/
13514
13515void QCamera3HardwareInterface::dump(
13516 const struct camera3_device *device, int fd)
13517{
13518 /* Log level property is read when "adb shell dumpsys media.camera" is
13519 called so that the log level can be controlled without restarting
13520 the media server */
13521 getLogLevel();
13522
13523 LOGD("E");
13524 QCamera3HardwareInterface *hw =
13525 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13526 if (!hw) {
13527 LOGE("NULL camera device");
13528 return;
13529 }
13530
13531 hw->dump(fd);
13532 LOGD("X");
13533 return;
13534}
13535
13536/*===========================================================================
13537 * FUNCTION : flush
13538 *
13539 * DESCRIPTION:
13540 *
13541 * PARAMETERS :
13542 *
13543 *
13544 * RETURN :
13545 *==========================================================================*/
13546
13547int QCamera3HardwareInterface::flush(
13548 const struct camera3_device *device)
13549{
13550 int rc;
13551 LOGD("E");
13552 QCamera3HardwareInterface *hw =
13553 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13554 if (!hw) {
13555 LOGE("NULL camera device");
13556 return -EINVAL;
13557 }
13558
13559 pthread_mutex_lock(&hw->mMutex);
13560 // Validate current state
13561 switch (hw->mState) {
13562 case STARTED:
13563 /* valid state */
13564 break;
13565
13566 case ERROR:
13567 pthread_mutex_unlock(&hw->mMutex);
13568 hw->handleCameraDeviceError();
13569 return -ENODEV;
13570
13571 default:
13572 LOGI("Flush returned during state %d", hw->mState);
13573 pthread_mutex_unlock(&hw->mMutex);
13574 return 0;
13575 }
13576 pthread_mutex_unlock(&hw->mMutex);
13577
13578 rc = hw->flush(true /* restart channels */ );
13579 LOGD("X");
13580 return rc;
13581}
13582
13583/*===========================================================================
13584 * FUNCTION : close_camera_device
13585 *
13586 * DESCRIPTION:
13587 *
13588 * PARAMETERS :
13589 *
13590 *
13591 * RETURN :
13592 *==========================================================================*/
13593int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
13594{
13595 int ret = NO_ERROR;
13596 QCamera3HardwareInterface *hw =
13597 reinterpret_cast<QCamera3HardwareInterface *>(
13598 reinterpret_cast<camera3_device_t *>(device)->priv);
13599 if (!hw) {
13600 LOGE("NULL camera device");
13601 return BAD_VALUE;
13602 }
13603
13604 LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
13605 delete hw;
13606 LOGI("[KPI Perf]: X");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013607 CAMSCOPE_DESTROY(CAMSCOPE_SECTION_HAL);
Thierry Strudel3d639192016-09-09 11:52:26 -070013608 return ret;
13609}
13610
13611/*===========================================================================
13612 * FUNCTION : getWaveletDenoiseProcessPlate
13613 *
13614 * DESCRIPTION: query wavelet denoise process plate
13615 *
13616 * PARAMETERS : None
13617 *
13618 * RETURN : WNR prcocess plate value
13619 *==========================================================================*/
13620cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
13621{
13622 char prop[PROPERTY_VALUE_MAX];
13623 memset(prop, 0, sizeof(prop));
13624 property_get("persist.denoise.process.plates", prop, "0");
13625 int processPlate = atoi(prop);
13626 switch(processPlate) {
13627 case 0:
13628 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13629 case 1:
13630 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13631 case 2:
13632 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13633 case 3:
13634 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13635 default:
13636 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13637 }
13638}
13639
13640
13641/*===========================================================================
13642 * FUNCTION : getTemporalDenoiseProcessPlate
13643 *
13644 * DESCRIPTION: query temporal denoise process plate
13645 *
13646 * PARAMETERS : None
13647 *
13648 * RETURN : TNR prcocess plate value
13649 *==========================================================================*/
13650cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
13651{
13652 char prop[PROPERTY_VALUE_MAX];
13653 memset(prop, 0, sizeof(prop));
13654 property_get("persist.tnr.process.plates", prop, "0");
13655 int processPlate = atoi(prop);
13656 switch(processPlate) {
13657 case 0:
13658 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13659 case 1:
13660 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13661 case 2:
13662 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13663 case 3:
13664 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13665 default:
13666 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13667 }
13668}
13669
13670
13671/*===========================================================================
13672 * FUNCTION : extractSceneMode
13673 *
13674 * DESCRIPTION: Extract scene mode from frameworks set metadata
13675 *
13676 * PARAMETERS :
13677 * @frame_settings: CameraMetadata reference
13678 * @metaMode: ANDROID_CONTORL_MODE
13679 * @hal_metadata: hal metadata structure
13680 *
13681 * RETURN : None
13682 *==========================================================================*/
13683int32_t QCamera3HardwareInterface::extractSceneMode(
13684 const CameraMetadata &frame_settings, uint8_t metaMode,
13685 metadata_buffer_t *hal_metadata)
13686{
13687 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013688 uint8_t sceneMode = CAM_SCENE_MODE_OFF;
13689
13690 if (ANDROID_CONTROL_MODE_OFF_KEEP_STATE == metaMode) {
13691 LOGD("Ignoring control mode OFF_KEEP_STATE");
13692 return NO_ERROR;
13693 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013694
13695 if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
13696 camera_metadata_ro_entry entry =
13697 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
13698 if (0 == entry.count)
13699 return rc;
13700
13701 uint8_t fwk_sceneMode = entry.data.u8[0];
13702
13703 int val = lookupHalName(SCENE_MODES_MAP,
13704 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
13705 fwk_sceneMode);
13706 if (NAME_NOT_FOUND != val) {
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013707 sceneMode = (uint8_t)val;
Thierry Strudel3d639192016-09-09 11:52:26 -070013708 LOGD("sceneMode: %d", sceneMode);
Thierry Strudel3d639192016-09-09 11:52:26 -070013709 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013710 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013711
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013712 if ((sceneMode == CAM_SCENE_MODE_HDR) || m_bSensorHDREnabled) {
13713 rc = setSensorHDR(hal_metadata, (sceneMode == CAM_SCENE_MODE_HDR));
13714 }
13715
13716 if ((rc == NO_ERROR) && !m_bSensorHDREnabled) {
13717 if (sceneMode == ANDROID_CONTROL_SCENE_MODE_HDR) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013718 cam_hdr_param_t hdr_params;
13719 hdr_params.hdr_enable = 1;
13720 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13721 hdr_params.hdr_need_1x = false;
13722 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13723 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13724 rc = BAD_VALUE;
13725 }
13726 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013727
Thierry Strudel3d639192016-09-09 11:52:26 -070013728 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13729 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
13730 rc = BAD_VALUE;
13731 }
13732 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013733
13734 if (mForceHdrSnapshot) {
13735 cam_hdr_param_t hdr_params;
13736 hdr_params.hdr_enable = 1;
13737 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13738 hdr_params.hdr_need_1x = false;
13739 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13740 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13741 rc = BAD_VALUE;
13742 }
13743 }
13744
Thierry Strudel3d639192016-09-09 11:52:26 -070013745 return rc;
13746}
13747
13748/*===========================================================================
Thierry Strudel04e026f2016-10-10 11:27:36 -070013749 * FUNCTION : setVideoHdrMode
13750 *
13751 * DESCRIPTION: Set Video HDR mode from frameworks set metadata
13752 *
13753 * PARAMETERS :
13754 * @hal_metadata: hal metadata structure
13755 * @metaMode: QCAMERA3_VIDEO_HDR_MODE
13756 *
13757 * RETURN : None
13758 *==========================================================================*/
13759int32_t QCamera3HardwareInterface::setVideoHdrMode(
13760 metadata_buffer_t *hal_metadata, cam_video_hdr_mode_t vhdr)
13761{
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013762 if ( (vhdr >= CAM_VIDEO_HDR_MODE_OFF) && (vhdr < CAM_VIDEO_HDR_MODE_MAX)) {
13763 return setSensorHDR(hal_metadata, (vhdr == CAM_VIDEO_HDR_MODE_ON), true);
13764 }
13765
13766 LOGE("Invalid Video HDR mode %d!", vhdr);
13767 return BAD_VALUE;
13768}
13769
13770/*===========================================================================
13771 * FUNCTION : setSensorHDR
13772 *
13773 * DESCRIPTION: Enable/disable sensor HDR.
13774 *
13775 * PARAMETERS :
13776 * @hal_metadata: hal metadata structure
13777 * @enable: boolean whether to enable/disable sensor HDR
13778 *
13779 * RETURN : None
13780 *==========================================================================*/
13781int32_t QCamera3HardwareInterface::setSensorHDR(
13782 metadata_buffer_t *hal_metadata, bool enable, bool isVideoHdrEnable)
13783{
Thierry Strudel04e026f2016-10-10 11:27:36 -070013784 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013785 cam_sensor_hdr_type_t sensor_hdr = CAM_SENSOR_HDR_OFF;
13786
13787 if (enable) {
13788 char sensor_hdr_prop[PROPERTY_VALUE_MAX];
13789 memset(sensor_hdr_prop, 0, sizeof(sensor_hdr_prop));
13790 #ifdef _LE_CAMERA_
13791 //Default to staggered HDR for IOT
13792 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "3");
13793 #else
13794 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "0");
13795 #endif
13796 sensor_hdr = (cam_sensor_hdr_type_t) atoi(sensor_hdr_prop);
13797 }
13798
13799 bool isSupported = false;
13800 switch (sensor_hdr) {
13801 case CAM_SENSOR_HDR_IN_SENSOR:
13802 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13803 CAM_QCOM_FEATURE_SENSOR_HDR) {
13804 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013805 LOGD("Setting HDR mode In Sensor");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013806 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013807 break;
13808 case CAM_SENSOR_HDR_ZIGZAG:
13809 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13810 CAM_QCOM_FEATURE_ZIGZAG_HDR) {
13811 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013812 LOGD("Setting HDR mode Zigzag");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013813 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013814 break;
13815 case CAM_SENSOR_HDR_STAGGERED:
13816 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13817 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
13818 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013819 LOGD("Setting HDR mode Staggered");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013820 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013821 break;
13822 case CAM_SENSOR_HDR_OFF:
13823 isSupported = true;
13824 LOGD("Turning off sensor HDR");
13825 break;
13826 default:
13827 LOGE("HDR mode %d not supported", sensor_hdr);
13828 rc = BAD_VALUE;
13829 break;
13830 }
13831
13832 if(isSupported) {
13833 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13834 CAM_INTF_PARM_SENSOR_HDR, sensor_hdr)) {
13835 rc = BAD_VALUE;
13836 } else {
13837 if(!isVideoHdrEnable)
13838 m_bSensorHDREnabled = (sensor_hdr != CAM_SENSOR_HDR_OFF);
Thierry Strudel04e026f2016-10-10 11:27:36 -070013839 }
13840 }
13841 return rc;
13842}
13843
13844/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070013845 * FUNCTION : needRotationReprocess
13846 *
13847 * DESCRIPTION: if rotation needs to be done by reprocess in pp
13848 *
13849 * PARAMETERS : none
13850 *
13851 * RETURN : true: needed
13852 * false: no need
13853 *==========================================================================*/
13854bool QCamera3HardwareInterface::needRotationReprocess()
13855{
13856 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
13857 // current rotation is not zero, and pp has the capability to process rotation
13858 LOGH("need do reprocess for rotation");
13859 return true;
13860 }
13861
13862 return false;
13863}
13864
13865/*===========================================================================
13866 * FUNCTION : needReprocess
13867 *
13868 * DESCRIPTION: if reprocess in needed
13869 *
13870 * PARAMETERS : none
13871 *
13872 * RETURN : true: needed
13873 * false: no need
13874 *==========================================================================*/
13875bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
13876{
13877 if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
13878 // TODO: add for ZSL HDR later
13879 // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
13880 if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
13881 LOGH("need do reprocess for ZSL WNR or min PP reprocess");
13882 return true;
13883 } else {
13884 LOGH("already post processed frame");
13885 return false;
13886 }
13887 }
13888 return needRotationReprocess();
13889}
13890
13891/*===========================================================================
13892 * FUNCTION : needJpegExifRotation
13893 *
13894 * DESCRIPTION: if rotation from jpeg is needed
13895 *
13896 * PARAMETERS : none
13897 *
13898 * RETURN : true: needed
13899 * false: no need
13900 *==========================================================================*/
13901bool QCamera3HardwareInterface::needJpegExifRotation()
13902{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013903 /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
Thierry Strudel3d639192016-09-09 11:52:26 -070013904 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13905 LOGD("Need use Jpeg EXIF Rotation");
13906 return true;
13907 }
13908 return false;
13909}
13910
13911/*===========================================================================
13912 * FUNCTION : addOfflineReprocChannel
13913 *
13914 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
13915 * coming from input channel
13916 *
13917 * PARAMETERS :
13918 * @config : reprocess configuration
13919 * @inputChHandle : pointer to the input (source) channel
13920 *
13921 *
13922 * RETURN : Ptr to the newly created channel obj. NULL if failed.
13923 *==========================================================================*/
13924QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
13925 const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
13926{
13927 int32_t rc = NO_ERROR;
13928 QCamera3ReprocessChannel *pChannel = NULL;
13929
13930 pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013931 mChannelHandle, mCameraHandle->ops, captureResultCb, setBufferErrorStatus,
13932 config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle);
Thierry Strudel3d639192016-09-09 11:52:26 -070013933 if (NULL == pChannel) {
13934 LOGE("no mem for reprocess channel");
13935 return NULL;
13936 }
13937
13938 rc = pChannel->initialize(IS_TYPE_NONE);
13939 if (rc != NO_ERROR) {
13940 LOGE("init reprocess channel failed, ret = %d", rc);
13941 delete pChannel;
13942 return NULL;
13943 }
13944
13945 // pp feature config
13946 cam_pp_feature_config_t pp_config;
13947 memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
13948
13949 pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
13950 if (gCamCapability[mCameraId]->qcom_supported_feature_mask
13951 & CAM_QCOM_FEATURE_DSDN) {
13952 //Use CPP CDS incase h/w supports it.
13953 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
13954 pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
13955 }
13956 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13957 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
13958 }
13959
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013960 if (config.hdr_param.hdr_enable) {
13961 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13962 pp_config.hdr_param = config.hdr_param;
13963 }
13964
13965 if (mForceHdrSnapshot) {
13966 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13967 pp_config.hdr_param.hdr_enable = 1;
13968 pp_config.hdr_param.hdr_need_1x = 0;
13969 pp_config.hdr_param.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13970 }
13971
Thierry Strudel3d639192016-09-09 11:52:26 -070013972 rc = pChannel->addReprocStreamsFromSource(pp_config,
13973 config,
13974 IS_TYPE_NONE,
13975 mMetadataChannel);
13976
13977 if (rc != NO_ERROR) {
13978 delete pChannel;
13979 return NULL;
13980 }
13981 return pChannel;
13982}
13983
13984/*===========================================================================
13985 * FUNCTION : getMobicatMask
13986 *
13987 * DESCRIPTION: returns mobicat mask
13988 *
13989 * PARAMETERS : none
13990 *
13991 * RETURN : mobicat mask
13992 *
13993 *==========================================================================*/
13994uint8_t QCamera3HardwareInterface::getMobicatMask()
13995{
13996 return m_MobicatMask;
13997}
13998
13999/*===========================================================================
14000 * FUNCTION : setMobicat
14001 *
14002 * DESCRIPTION: set Mobicat on/off.
14003 *
14004 * PARAMETERS :
14005 * @params : none
14006 *
14007 * RETURN : int32_t type of status
14008 * NO_ERROR -- success
14009 * none-zero failure code
14010 *==========================================================================*/
14011int32_t QCamera3HardwareInterface::setMobicat()
14012{
Thierry Strudel3d639192016-09-09 11:52:26 -070014013 int32_t ret = NO_ERROR;
Thierry Strudel3d639192016-09-09 11:52:26 -070014014
Shuzhen Wangb57ec912017-07-31 13:24:27 -070014015 if (m_MobicatMask) {
Thierry Strudel3d639192016-09-09 11:52:26 -070014016 tune_cmd_t tune_cmd;
14017 tune_cmd.type = SET_RELOAD_CHROMATIX;
14018 tune_cmd.module = MODULE_ALL;
14019 tune_cmd.value = TRUE;
14020 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
14021 CAM_INTF_PARM_SET_VFE_COMMAND,
14022 tune_cmd);
14023
14024 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
14025 CAM_INTF_PARM_SET_PP_COMMAND,
14026 tune_cmd);
14027 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014028
14029 return ret;
14030}
14031
14032/*===========================================================================
14033* FUNCTION : getLogLevel
14034*
14035* DESCRIPTION: Reads the log level property into a variable
14036*
14037* PARAMETERS :
14038* None
14039*
14040* RETURN :
14041* None
14042*==========================================================================*/
14043void QCamera3HardwareInterface::getLogLevel()
14044{
14045 char prop[PROPERTY_VALUE_MAX];
14046 uint32_t globalLogLevel = 0;
14047
14048 property_get("persist.camera.hal.debug", prop, "0");
14049 int val = atoi(prop);
14050 if (0 <= val) {
14051 gCamHal3LogLevel = (uint32_t)val;
14052 }
14053
Thierry Strudel9ec39c62016-12-28 11:30:05 -080014054 property_get("persist.camera.kpi.debug", prop, "0");
Thierry Strudel3d639192016-09-09 11:52:26 -070014055 gKpiDebugLevel = atoi(prop);
14056
14057 property_get("persist.camera.global.debug", prop, "0");
14058 val = atoi(prop);
14059 if (0 <= val) {
14060 globalLogLevel = (uint32_t)val;
14061 }
14062
14063 /* Highest log level among hal.logs and global.logs is selected */
14064 if (gCamHal3LogLevel < globalLogLevel)
14065 gCamHal3LogLevel = globalLogLevel;
14066
14067 return;
14068}
14069
14070/*===========================================================================
14071 * FUNCTION : validateStreamRotations
14072 *
14073 * DESCRIPTION: Check if the rotations requested are supported
14074 *
14075 * PARAMETERS :
14076 * @stream_list : streams to be configured
14077 *
14078 * RETURN : NO_ERROR on success
14079 * -EINVAL on failure
14080 *
14081 *==========================================================================*/
14082int QCamera3HardwareInterface::validateStreamRotations(
14083 camera3_stream_configuration_t *streamList)
14084{
14085 int rc = NO_ERROR;
14086
14087 /*
14088 * Loop through all streams requested in configuration
14089 * Check if unsupported rotations have been requested on any of them
14090 */
14091 for (size_t j = 0; j < streamList->num_streams; j++){
14092 camera3_stream_t *newStream = streamList->streams[j];
14093
Emilian Peev35ceeed2017-06-29 11:58:56 -070014094 switch(newStream->rotation) {
14095 case CAMERA3_STREAM_ROTATION_0:
14096 case CAMERA3_STREAM_ROTATION_90:
14097 case CAMERA3_STREAM_ROTATION_180:
14098 case CAMERA3_STREAM_ROTATION_270:
14099 //Expected values
14100 break;
14101 default:
14102 ALOGE("%s: Error: Unsupported rotation of %d requested for stream"
14103 "type:%d and stream format:%d", __func__,
14104 newStream->rotation, newStream->stream_type,
14105 newStream->format);
14106 return -EINVAL;
14107 }
14108
Thierry Strudel3d639192016-09-09 11:52:26 -070014109 bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
14110 bool isImplDef = (newStream->format ==
14111 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
14112 bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
14113 isImplDef);
14114
14115 if (isRotated && (!isImplDef || isZsl)) {
14116 LOGE("Error: Unsupported rotation of %d requested for stream"
14117 "type:%d and stream format:%d",
14118 newStream->rotation, newStream->stream_type,
14119 newStream->format);
14120 rc = -EINVAL;
14121 break;
14122 }
14123 }
14124
14125 return rc;
14126}
14127
14128/*===========================================================================
14129* FUNCTION : getFlashInfo
14130*
14131* DESCRIPTION: Retrieve information about whether the device has a flash.
14132*
14133* PARAMETERS :
14134* @cameraId : Camera id to query
14135* @hasFlash : Boolean indicating whether there is a flash device
14136* associated with given camera
14137* @flashNode : If a flash device exists, this will be its device node.
14138*
14139* RETURN :
14140* None
14141*==========================================================================*/
14142void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
14143 bool& hasFlash,
14144 char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
14145{
14146 cam_capability_t* camCapability = gCamCapability[cameraId];
14147 if (NULL == camCapability) {
14148 hasFlash = false;
14149 flashNode[0] = '\0';
14150 } else {
14151 hasFlash = camCapability->flash_available;
14152 strlcpy(flashNode,
14153 (char*)camCapability->flash_dev_name,
14154 QCAMERA_MAX_FILEPATH_LENGTH);
14155 }
14156}
14157
14158/*===========================================================================
14159* FUNCTION : getEepromVersionInfo
14160*
14161* DESCRIPTION: Retrieve version info of the sensor EEPROM data
14162*
14163* PARAMETERS : None
14164*
14165* RETURN : string describing EEPROM version
14166* "\0" if no such info available
14167*==========================================================================*/
14168const char *QCamera3HardwareInterface::getEepromVersionInfo()
14169{
14170 return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
14171}
14172
14173/*===========================================================================
14174* FUNCTION : getLdafCalib
14175*
14176* DESCRIPTION: Retrieve Laser AF calibration data
14177*
14178* PARAMETERS : None
14179*
14180* RETURN : Two uint32_t describing laser AF calibration data
14181* NULL if none is available.
14182*==========================================================================*/
14183const uint32_t *QCamera3HardwareInterface::getLdafCalib()
14184{
14185 if (mLdafCalibExist) {
14186 return &mLdafCalib[0];
14187 } else {
14188 return NULL;
14189 }
14190}
14191
14192/*===========================================================================
Arnd Geis082a4d72017-08-24 10:33:07 -070014193* FUNCTION : getEaselFwVersion
14194*
14195* DESCRIPTION: Retrieve Easel firmware version
14196*
14197* PARAMETERS : None
14198*
14199* RETURN : string describing Firmware version
Arnd Geis8cbfc182017-09-07 14:46:41 -070014200* "\0" if version is not up to date
Arnd Geis082a4d72017-08-24 10:33:07 -070014201*==========================================================================*/
14202const char *QCamera3HardwareInterface::getEaselFwVersion()
14203{
Arnd Geis8cbfc182017-09-07 14:46:41 -070014204 if (mEaselFwUpdated) {
14205 return (const char *)&mEaselFwVersion[0];
14206 } else {
14207 return NULL;
Arnd Geis082a4d72017-08-24 10:33:07 -070014208 }
Arnd Geis082a4d72017-08-24 10:33:07 -070014209}
14210
14211/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014212 * FUNCTION : dynamicUpdateMetaStreamInfo
14213 *
14214 * DESCRIPTION: This function:
14215 * (1) stops all the channels
14216 * (2) returns error on pending requests and buffers
14217 * (3) sends metastream_info in setparams
14218 * (4) starts all channels
14219 * This is useful when sensor has to be restarted to apply any
14220 * settings such as frame rate from a different sensor mode
14221 *
14222 * PARAMETERS : None
14223 *
14224 * RETURN : NO_ERROR on success
14225 * Error codes on failure
14226 *
14227 *==========================================================================*/
14228int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
14229{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014230 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_DYN_UPDATE_META_STRM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070014231 int rc = NO_ERROR;
14232
14233 LOGD("E");
14234
14235 rc = stopAllChannels();
14236 if (rc < 0) {
14237 LOGE("stopAllChannels failed");
14238 return rc;
14239 }
14240
14241 rc = notifyErrorForPendingRequests();
14242 if (rc < 0) {
14243 LOGE("notifyErrorForPendingRequests failed");
14244 return rc;
14245 }
14246
14247 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
14248 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
14249 "Format:%d",
14250 mStreamConfigInfo.type[i],
14251 mStreamConfigInfo.stream_sizes[i].width,
14252 mStreamConfigInfo.stream_sizes[i].height,
14253 mStreamConfigInfo.postprocess_mask[i],
14254 mStreamConfigInfo.format[i]);
14255 }
14256
14257 /* Send meta stream info once again so that ISP can start */
14258 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
14259 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
14260 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
14261 mParameters);
14262 if (rc < 0) {
14263 LOGE("set Metastreaminfo failed. Sensor mode does not change");
14264 }
14265
14266 rc = startAllChannels();
14267 if (rc < 0) {
14268 LOGE("startAllChannels failed");
14269 return rc;
14270 }
14271
14272 LOGD("X");
14273 return rc;
14274}
14275
14276/*===========================================================================
14277 * FUNCTION : stopAllChannels
14278 *
14279 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
14280 *
14281 * PARAMETERS : None
14282 *
14283 * RETURN : NO_ERROR on success
14284 * Error codes on failure
14285 *
14286 *==========================================================================*/
14287int32_t QCamera3HardwareInterface::stopAllChannels()
14288{
14289 int32_t rc = NO_ERROR;
14290
14291 LOGD("Stopping all channels");
14292 // Stop the Streams/Channels
14293 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14294 it != mStreamInfo.end(); it++) {
14295 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14296 if (channel) {
14297 channel->stop();
14298 }
14299 (*it)->status = INVALID;
14300 }
14301
14302 if (mSupportChannel) {
14303 mSupportChannel->stop();
14304 }
14305 if (mAnalysisChannel) {
14306 mAnalysisChannel->stop();
14307 }
14308 if (mRawDumpChannel) {
14309 mRawDumpChannel->stop();
14310 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014311 if (mHdrPlusRawSrcChannel) {
14312 mHdrPlusRawSrcChannel->stop();
14313 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014314 if (mMetadataChannel) {
14315 /* If content of mStreamInfo is not 0, there is metadata stream */
14316 mMetadataChannel->stop();
14317 }
14318
14319 LOGD("All channels stopped");
14320 return rc;
14321}
14322
14323/*===========================================================================
14324 * FUNCTION : startAllChannels
14325 *
14326 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
14327 *
14328 * PARAMETERS : None
14329 *
14330 * RETURN : NO_ERROR on success
14331 * Error codes on failure
14332 *
14333 *==========================================================================*/
14334int32_t QCamera3HardwareInterface::startAllChannels()
14335{
14336 int32_t rc = NO_ERROR;
14337
14338 LOGD("Start all channels ");
14339 // Start the Streams/Channels
14340 if (mMetadataChannel) {
14341 /* If content of mStreamInfo is not 0, there is metadata stream */
14342 rc = mMetadataChannel->start();
14343 if (rc < 0) {
14344 LOGE("META channel start failed");
14345 return rc;
14346 }
14347 }
14348 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14349 it != mStreamInfo.end(); it++) {
14350 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14351 if (channel) {
14352 rc = channel->start();
14353 if (rc < 0) {
14354 LOGE("channel start failed");
14355 return rc;
14356 }
14357 }
14358 }
14359 if (mAnalysisChannel) {
14360 mAnalysisChannel->start();
14361 }
14362 if (mSupportChannel) {
14363 rc = mSupportChannel->start();
14364 if (rc < 0) {
14365 LOGE("Support channel start failed");
14366 return rc;
14367 }
14368 }
14369 if (mRawDumpChannel) {
14370 rc = mRawDumpChannel->start();
14371 if (rc < 0) {
14372 LOGE("RAW dump channel start failed");
14373 return rc;
14374 }
14375 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014376 if (mHdrPlusRawSrcChannel) {
14377 rc = mHdrPlusRawSrcChannel->start();
14378 if (rc < 0) {
14379 LOGE("HDR+ RAW channel start failed");
14380 return rc;
14381 }
14382 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014383
14384 LOGD("All channels started");
14385 return rc;
14386}
14387
14388/*===========================================================================
14389 * FUNCTION : notifyErrorForPendingRequests
14390 *
14391 * DESCRIPTION: This function sends error for all the pending requests/buffers
14392 *
14393 * PARAMETERS : None
14394 *
14395 * RETURN : Error codes
14396 * NO_ERROR on success
14397 *
14398 *==========================================================================*/
14399int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
14400{
Emilian Peev7650c122017-01-19 08:24:33 -080014401 notifyErrorFoPendingDepthData(mDepthChannel);
14402
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014403 auto pendingRequest = mPendingRequestsList.begin();
14404 auto pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.begin();
Thierry Strudel3d639192016-09-09 11:52:26 -070014405
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014406 // Iterate through pending requests (for which result metadata isn't sent yet) and pending
14407 // buffers (for which buffers aren't sent yet).
14408 while (pendingRequest != mPendingRequestsList.end() ||
14409 pendingBuffer != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
14410 if (pendingRequest == mPendingRequestsList.end() ||
14411 pendingBuffer->frame_number < pendingRequest->frame_number) {
14412 // If metadata for this frame was sent, notify about a buffer error and returns buffers
14413 // with error.
14414 for (auto &info : pendingBuffer->mPendingBufferList) {
14415 // Send a buffer error for this frame number.
Thierry Strudel3d639192016-09-09 11:52:26 -070014416 camera3_notify_msg_t notify_msg;
14417 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14418 notify_msg.type = CAMERA3_MSG_ERROR;
14419 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014420 notify_msg.message.error.error_stream = info.stream;
14421 notify_msg.message.error.frame_number = pendingBuffer->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014422 orchestrateNotify(&notify_msg);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014423
14424 camera3_stream_buffer_t buffer = {};
14425 buffer.acquire_fence = -1;
14426 buffer.release_fence = -1;
14427 buffer.buffer = info.buffer;
14428 buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
14429 buffer.stream = info.stream;
14430 mOutputBufferDispatcher.markBufferReady(pendingBuffer->frame_number, buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -070014431 }
14432
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014433 pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffer);
14434 } else if (pendingBuffer == mPendingBuffersMap.mPendingBuffersInRequest.end() ||
14435 pendingBuffer->frame_number > pendingRequest->frame_number) {
14436 // If the buffers for this frame were sent already, notify about a result error.
Thierry Strudel3d639192016-09-09 11:52:26 -070014437 camera3_notify_msg_t notify_msg;
14438 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14439 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014440 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_RESULT;
14441 notify_msg.message.error.error_stream = nullptr;
14442 notify_msg.message.error.frame_number = pendingRequest->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014443 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070014444
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014445 if (pendingRequest->input_buffer != nullptr) {
14446 camera3_capture_result result = {};
14447 result.frame_number = pendingRequest->frame_number;
14448 result.result = nullptr;
14449 result.input_buffer = pendingRequest->input_buffer;
14450 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070014451 }
14452
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014453 mShutterDispatcher.clear(pendingRequest->frame_number);
14454 pendingRequest = mPendingRequestsList.erase(pendingRequest);
14455 } else {
14456 // If both buffers and result metadata weren't sent yet, notify about a request error
14457 // and return buffers with error.
14458 for (auto &info : pendingBuffer->mPendingBufferList) {
14459 camera3_notify_msg_t notify_msg;
14460 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14461 notify_msg.type = CAMERA3_MSG_ERROR;
14462 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
14463 notify_msg.message.error.error_stream = info.stream;
14464 notify_msg.message.error.frame_number = pendingBuffer->frame_number;
14465 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070014466
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014467 camera3_stream_buffer_t buffer = {};
14468 buffer.acquire_fence = -1;
14469 buffer.release_fence = -1;
14470 buffer.buffer = info.buffer;
14471 buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
14472 buffer.stream = info.stream;
14473 mOutputBufferDispatcher.markBufferReady(pendingBuffer->frame_number, buffer);
14474 }
14475
14476 if (pendingRequest->input_buffer != nullptr) {
14477 camera3_capture_result result = {};
14478 result.frame_number = pendingRequest->frame_number;
14479 result.result = nullptr;
14480 result.input_buffer = pendingRequest->input_buffer;
14481 orchestrateResult(&result);
14482 }
14483
14484 mShutterDispatcher.clear(pendingRequest->frame_number);
14485 pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffer);
14486 pendingRequest = mPendingRequestsList.erase(pendingRequest);
Thierry Strudel3d639192016-09-09 11:52:26 -070014487 }
14488 }
14489
14490 /* Reset pending frame Drop list and requests list */
14491 mPendingFrameDropList.clear();
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014492 mShutterDispatcher.clear();
14493 mOutputBufferDispatcher.clear(/*clearConfiguredStreams*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -070014494 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Emilian Peev30522a12017-08-03 14:36:33 +010014495 mExpectedFrameDuration = 0;
14496 mExpectedInflightDuration = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -070014497 LOGH("Cleared all the pending buffers ");
14498
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014499 return NO_ERROR;
Thierry Strudel3d639192016-09-09 11:52:26 -070014500}
14501
14502bool QCamera3HardwareInterface::isOnEncoder(
14503 const cam_dimension_t max_viewfinder_size,
14504 uint32_t width, uint32_t height)
14505{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014506 return ((width > (uint32_t)max_viewfinder_size.width) ||
14507 (height > (uint32_t)max_viewfinder_size.height) ||
14508 (width > (uint32_t)VIDEO_4K_WIDTH) ||
14509 (height > (uint32_t)VIDEO_4K_HEIGHT));
Thierry Strudel3d639192016-09-09 11:52:26 -070014510}
14511
14512/*===========================================================================
14513 * FUNCTION : setBundleInfo
14514 *
14515 * DESCRIPTION: Set bundle info for all streams that are bundle.
14516 *
14517 * PARAMETERS : None
14518 *
14519 * RETURN : NO_ERROR on success
14520 * Error codes on failure
14521 *==========================================================================*/
14522int32_t QCamera3HardwareInterface::setBundleInfo()
14523{
14524 int32_t rc = NO_ERROR;
14525
14526 if (mChannelHandle) {
14527 cam_bundle_config_t bundleInfo;
14528 memset(&bundleInfo, 0, sizeof(bundleInfo));
14529 rc = mCameraHandle->ops->get_bundle_info(
14530 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
14531 if (rc != NO_ERROR) {
14532 LOGE("get_bundle_info failed");
14533 return rc;
14534 }
14535 if (mAnalysisChannel) {
14536 mAnalysisChannel->setBundleInfo(bundleInfo);
14537 }
14538 if (mSupportChannel) {
14539 mSupportChannel->setBundleInfo(bundleInfo);
14540 }
14541 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14542 it != mStreamInfo.end(); it++) {
14543 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14544 channel->setBundleInfo(bundleInfo);
14545 }
14546 if (mRawDumpChannel) {
14547 mRawDumpChannel->setBundleInfo(bundleInfo);
14548 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014549 if (mHdrPlusRawSrcChannel) {
14550 mHdrPlusRawSrcChannel->setBundleInfo(bundleInfo);
14551 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014552 }
14553
14554 return rc;
14555}
14556
14557/*===========================================================================
Thierry Strudel295a0ca2016-11-03 18:38:47 -070014558 * FUNCTION : setInstantAEC
14559 *
14560 * DESCRIPTION: Set Instant AEC related params.
14561 *
14562 * PARAMETERS :
14563 * @meta: CameraMetadata reference
14564 *
14565 * RETURN : NO_ERROR on success
14566 * Error codes on failure
14567 *==========================================================================*/
14568int32_t QCamera3HardwareInterface::setInstantAEC(const CameraMetadata &meta)
14569{
14570 int32_t rc = NO_ERROR;
14571 uint8_t val = 0;
14572 char prop[PROPERTY_VALUE_MAX];
14573
14574 // First try to configure instant AEC from framework metadata
14575 if (meta.exists(QCAMERA3_INSTANT_AEC_MODE)) {
14576 val = (uint8_t)meta.find(QCAMERA3_INSTANT_AEC_MODE).data.i32[0];
14577 }
14578
14579 // If framework did not set this value, try to read from set prop.
14580 if (val == 0) {
14581 memset(prop, 0, sizeof(prop));
14582 property_get("persist.camera.instant.aec", prop, "0");
14583 val = (uint8_t)atoi(prop);
14584 }
14585
14586 if ((val >= (uint8_t)CAM_AEC_NORMAL_CONVERGENCE) &&
14587 ( val < (uint8_t)CAM_AEC_CONVERGENCE_MAX)) {
14588 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_INSTANT_AEC, val);
14589 mInstantAEC = val;
14590 mInstantAECSettledFrameNumber = 0;
14591 mInstantAecFrameIdxCount = 0;
14592 LOGH("instantAEC value set %d",val);
14593 if (mInstantAEC) {
14594 memset(prop, 0, sizeof(prop));
14595 property_get("persist.camera.ae.instant.bound", prop, "10");
14596 int32_t aec_frame_skip_cnt = atoi(prop);
14597 if (aec_frame_skip_cnt >= 0) {
14598 mAecSkipDisplayFrameBound = (uint8_t)aec_frame_skip_cnt;
14599 } else {
14600 LOGE("Invalid prop for aec frame bound %d", aec_frame_skip_cnt);
14601 rc = BAD_VALUE;
14602 }
14603 }
14604 } else {
14605 LOGE("Bad instant aec value set %d", val);
14606 rc = BAD_VALUE;
14607 }
14608 return rc;
14609}
14610
14611/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014612 * FUNCTION : get_num_overall_buffers
14613 *
14614 * DESCRIPTION: Estimate number of pending buffers across all requests.
14615 *
14616 * PARAMETERS : None
14617 *
14618 * RETURN : Number of overall pending buffers
14619 *
14620 *==========================================================================*/
14621uint32_t PendingBuffersMap::get_num_overall_buffers()
14622{
14623 uint32_t sum_buffers = 0;
14624 for (auto &req : mPendingBuffersInRequest) {
14625 sum_buffers += req.mPendingBufferList.size();
14626 }
14627 return sum_buffers;
14628}
14629
14630/*===========================================================================
14631 * FUNCTION : removeBuf
14632 *
14633 * DESCRIPTION: Remove a matching buffer from tracker.
14634 *
14635 * PARAMETERS : @buffer: image buffer for the callback
14636 *
14637 * RETURN : None
14638 *
14639 *==========================================================================*/
14640void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
14641{
14642 bool buffer_found = false;
14643 for (auto req = mPendingBuffersInRequest.begin();
14644 req != mPendingBuffersInRequest.end(); req++) {
14645 for (auto k = req->mPendingBufferList.begin();
14646 k != req->mPendingBufferList.end(); k++ ) {
14647 if (k->buffer == buffer) {
14648 LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
14649 req->frame_number, buffer);
14650 k = req->mPendingBufferList.erase(k);
14651 if (req->mPendingBufferList.empty()) {
14652 // Remove this request from Map
14653 req = mPendingBuffersInRequest.erase(req);
14654 }
14655 buffer_found = true;
14656 break;
14657 }
14658 }
14659 if (buffer_found) {
14660 break;
14661 }
14662 }
14663 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
14664 get_num_overall_buffers());
14665}
14666
14667/*===========================================================================
Thierry Strudelc2ee3302016-11-17 12:33:12 -080014668 * FUNCTION : getBufErrStatus
14669 *
14670 * DESCRIPTION: get buffer error status
14671 *
14672 * PARAMETERS : @buffer: buffer handle
14673 *
14674 * RETURN : Error status
14675 *
14676 *==========================================================================*/
14677int32_t PendingBuffersMap::getBufErrStatus(buffer_handle_t *buffer)
14678{
14679 for (auto& req : mPendingBuffersInRequest) {
14680 for (auto& k : req.mPendingBufferList) {
14681 if (k.buffer == buffer)
14682 return k.bufStatus;
14683 }
14684 }
14685 return CAMERA3_BUFFER_STATUS_OK;
14686}
14687
14688/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014689 * FUNCTION : setPAAFSupport
14690 *
14691 * DESCRIPTION: Set the preview-assisted auto focus support bit in
14692 * feature mask according to stream type and filter
14693 * arrangement
14694 *
14695 * PARAMETERS : @feature_mask: current feature mask, which may be modified
14696 * @stream_type: stream type
14697 * @filter_arrangement: filter arrangement
14698 *
14699 * RETURN : None
14700 *==========================================================================*/
14701void QCamera3HardwareInterface::setPAAFSupport(
14702 cam_feature_mask_t& feature_mask,
14703 cam_stream_type_t stream_type,
14704 cam_color_filter_arrangement_t filter_arrangement)
14705{
Thierry Strudel3d639192016-09-09 11:52:26 -070014706 switch (filter_arrangement) {
14707 case CAM_FILTER_ARRANGEMENT_RGGB:
14708 case CAM_FILTER_ARRANGEMENT_GRBG:
14709 case CAM_FILTER_ARRANGEMENT_GBRG:
14710 case CAM_FILTER_ARRANGEMENT_BGGR:
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014711 if ((stream_type == CAM_STREAM_TYPE_PREVIEW) ||
14712 (stream_type == CAM_STREAM_TYPE_ANALYSIS) ||
Thierry Strudel3d639192016-09-09 11:52:26 -070014713 (stream_type == CAM_STREAM_TYPE_VIDEO)) {
Thierry Strudel2896d122017-02-23 19:18:03 -080014714 if (!(feature_mask & CAM_QTI_FEATURE_PPEISCORE))
14715 feature_mask |= CAM_QCOM_FEATURE_PAAF;
Thierry Strudel3d639192016-09-09 11:52:26 -070014716 }
14717 break;
14718 case CAM_FILTER_ARRANGEMENT_Y:
14719 if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
14720 feature_mask |= CAM_QCOM_FEATURE_PAAF;
14721 }
14722 break;
14723 default:
14724 break;
14725 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -070014726 LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
14727 feature_mask, stream_type, filter_arrangement);
14728
14729
Thierry Strudel3d639192016-09-09 11:52:26 -070014730}
14731
14732/*===========================================================================
14733* FUNCTION : getSensorMountAngle
14734*
14735* DESCRIPTION: Retrieve sensor mount angle
14736*
14737* PARAMETERS : None
14738*
14739* RETURN : sensor mount angle in uint32_t
14740*==========================================================================*/
14741uint32_t QCamera3HardwareInterface::getSensorMountAngle()
14742{
14743 return gCamCapability[mCameraId]->sensor_mount_angle;
14744}
14745
14746/*===========================================================================
14747* FUNCTION : getRelatedCalibrationData
14748*
14749* DESCRIPTION: Retrieve related system calibration data
14750*
14751* PARAMETERS : None
14752*
14753* RETURN : Pointer of related system calibration data
14754*==========================================================================*/
14755const cam_related_system_calibration_data_t *QCamera3HardwareInterface::getRelatedCalibrationData()
14756{
14757 return (const cam_related_system_calibration_data_t *)
14758 &(gCamCapability[mCameraId]->related_cam_calibration);
14759}
Shuzhen Wangf6890e02016-08-12 14:28:54 -070014760
14761/*===========================================================================
14762 * FUNCTION : is60HzZone
14763 *
14764 * DESCRIPTION: Whether the phone is in zone with 60hz electricity frequency
14765 *
14766 * PARAMETERS : None
14767 *
14768 * RETURN : True if in 60Hz zone, False otherwise
14769 *==========================================================================*/
14770bool QCamera3HardwareInterface::is60HzZone()
14771{
14772 time_t t = time(NULL);
14773 struct tm lt;
14774
14775 struct tm* r = localtime_r(&t, &lt);
14776
14777 if (r == NULL || lt.tm_gmtoff <= -2*60*60 || lt.tm_gmtoff >= 8*60*60)
14778 return true;
14779 else
14780 return false;
14781}
Shuzhen Wanga5da1022016-07-13 20:18:42 -070014782
14783/*===========================================================================
14784 * FUNCTION : adjustBlackLevelForCFA
14785 *
14786 * DESCRIPTION: Adjust the black level pattern in the order of RGGB to the order
14787 * of bayer CFA (Color Filter Array).
14788 *
14789 * PARAMETERS : @input: black level pattern in the order of RGGB
14790 * @output: black level pattern in the order of CFA
14791 * @color_arrangement: CFA color arrangement
14792 *
14793 * RETURN : None
14794 *==========================================================================*/
14795template<typename T>
14796void QCamera3HardwareInterface::adjustBlackLevelForCFA(
14797 T input[BLACK_LEVEL_PATTERN_CNT],
14798 T output[BLACK_LEVEL_PATTERN_CNT],
14799 cam_color_filter_arrangement_t color_arrangement)
14800{
14801 switch (color_arrangement) {
14802 case CAM_FILTER_ARRANGEMENT_GRBG:
14803 output[0] = input[1];
14804 output[1] = input[0];
14805 output[2] = input[3];
14806 output[3] = input[2];
14807 break;
14808 case CAM_FILTER_ARRANGEMENT_GBRG:
14809 output[0] = input[2];
14810 output[1] = input[3];
14811 output[2] = input[0];
14812 output[3] = input[1];
14813 break;
14814 case CAM_FILTER_ARRANGEMENT_BGGR:
14815 output[0] = input[3];
14816 output[1] = input[2];
14817 output[2] = input[1];
14818 output[3] = input[0];
14819 break;
14820 case CAM_FILTER_ARRANGEMENT_RGGB:
14821 output[0] = input[0];
14822 output[1] = input[1];
14823 output[2] = input[2];
14824 output[3] = input[3];
14825 break;
14826 default:
14827 LOGE("Invalid color arrangement to derive dynamic blacklevel");
14828 break;
14829 }
14830}
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014831
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014832void QCamera3HardwareInterface::updateHdrPlusResultMetadata(
14833 CameraMetadata &resultMetadata,
14834 std::shared_ptr<metadata_buffer_t> settings)
14835{
14836 if (settings == nullptr) {
14837 ALOGE("%s: settings is nullptr.", __FUNCTION__);
14838 return;
14839 }
14840
14841 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, settings) {
14842 resultMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
Chien-Yu Chen4e9a8bd2017-09-21 16:02:55 -070014843 } else {
14844 resultMetadata.erase(ANDROID_JPEG_GPS_COORDINATES);
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014845 }
14846
14847 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, settings) {
14848 String8 str((const char *)gps_methods);
14849 resultMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
Chien-Yu Chen4e9a8bd2017-09-21 16:02:55 -070014850 } else {
14851 resultMetadata.erase(ANDROID_JPEG_GPS_PROCESSING_METHOD);
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014852 }
14853
14854 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, settings) {
14855 resultMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
Chien-Yu Chen4e9a8bd2017-09-21 16:02:55 -070014856 } else {
14857 resultMetadata.erase(ANDROID_JPEG_GPS_TIMESTAMP);
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014858 }
14859
14860 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, settings) {
14861 resultMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
Chien-Yu Chen4e9a8bd2017-09-21 16:02:55 -070014862 } else {
14863 resultMetadata.erase(ANDROID_JPEG_ORIENTATION);
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014864 }
14865
14866 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, settings) {
14867 uint8_t fwk_jpeg_quality = static_cast<uint8_t>(*jpeg_quality);
14868 resultMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
Chien-Yu Chen4e9a8bd2017-09-21 16:02:55 -070014869 } else {
14870 resultMetadata.erase(ANDROID_JPEG_QUALITY);
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014871 }
14872
14873 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, settings) {
14874 uint8_t fwk_thumb_quality = static_cast<uint8_t>(*thumb_quality);
14875 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
Chien-Yu Chen4e9a8bd2017-09-21 16:02:55 -070014876 } else {
14877 resultMetadata.erase(ANDROID_JPEG_THUMBNAIL_QUALITY);
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014878 }
14879
14880 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, settings) {
14881 int32_t fwk_thumb_size[2];
14882 fwk_thumb_size[0] = thumb_size->width;
14883 fwk_thumb_size[1] = thumb_size->height;
14884 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
Chien-Yu Chen4e9a8bd2017-09-21 16:02:55 -070014885 } else {
14886 resultMetadata.erase(ANDROID_JPEG_THUMBNAIL_SIZE);
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014887 }
14888
14889 IF_META_AVAILABLE(uint32_t, intent, CAM_INTF_META_CAPTURE_INTENT, settings) {
14890 uint8_t fwk_intent = intent[0];
14891 resultMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &fwk_intent, 1);
Chien-Yu Chen4e9a8bd2017-09-21 16:02:55 -070014892 } else {
14893 resultMetadata.erase(ANDROID_CONTROL_CAPTURE_INTENT);
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014894 }
14895}
14896
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014897bool QCamera3HardwareInterface::isRequestHdrPlusCompatible(
14898 const camera3_capture_request_t &request, const CameraMetadata &metadata) {
Chien-Yu Chenec328c82017-08-30 16:41:08 -070014899 if (metadata.exists(NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS) &&
14900 metadata.find(NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS).data.i32[0] == 1) {
14901 ALOGV("%s: NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS is 1", __FUNCTION__);
14902 return false;
14903 }
14904
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014905 if (!metadata.exists(ANDROID_NOISE_REDUCTION_MODE) ||
14906 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0] !=
14907 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY) {
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014908 ALOGV("%s: ANDROID_NOISE_REDUCTION_MODE is not HQ: %d", __FUNCTION__,
Chien-Yu Chenee335912017-02-09 17:53:20 -080014909 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0]);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014910 return false;
14911 }
14912
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014913 if (!metadata.exists(ANDROID_EDGE_MODE) ||
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014914 metadata.find(ANDROID_EDGE_MODE).data.u8[0] != ANDROID_EDGE_MODE_HIGH_QUALITY) {
14915 ALOGV("%s: ANDROID_EDGE_MODE is not HQ.", __FUNCTION__);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014916 return false;
14917 }
14918
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014919 if (!metadata.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE) ||
14920 metadata.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0] !=
14921 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY) {
14922 ALOGV("%s: ANDROID_COLOR_CORRECTION_ABERRATION_MODE is not HQ.", __FUNCTION__);
14923 return false;
14924 }
14925
14926 if (!metadata.exists(ANDROID_CONTROL_AE_MODE) ||
14927 (metadata.find(ANDROID_CONTROL_AE_MODE).data.u8[0] != ANDROID_CONTROL_AE_MODE_ON &&
14928 metadata.find(ANDROID_CONTROL_AE_MODE).data.u8[0] !=
14929 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH)) {
14930 ALOGV("%s: ANDROID_CONTROL_AE_MODE is not ON or ON_AUTO_FLASH.", __FUNCTION__);
14931 return false;
14932 }
14933
14934 if (!metadata.exists(ANDROID_CONTROL_AWB_MODE) ||
14935 metadata.find(ANDROID_CONTROL_AWB_MODE).data.u8[0] != ANDROID_CONTROL_AWB_MODE_AUTO) {
14936 ALOGV("%s: ANDROID_CONTROL_AWB_MODE is not AUTO.", __FUNCTION__);
14937 return false;
14938 }
14939
14940 if (!metadata.exists(ANDROID_CONTROL_EFFECT_MODE) ||
14941 metadata.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0] !=
14942 ANDROID_CONTROL_EFFECT_MODE_OFF) {
14943 ALOGV("%s: ANDROID_CONTROL_EFFECT_MODE_OFF is not OFF.", __FUNCTION__);
14944 return false;
14945 }
14946
14947 if (!metadata.exists(ANDROID_CONTROL_MODE) ||
14948 (metadata.find(ANDROID_CONTROL_MODE).data.u8[0] != ANDROID_CONTROL_MODE_AUTO &&
14949 metadata.find(ANDROID_CONTROL_MODE).data.u8[0] !=
14950 ANDROID_CONTROL_MODE_USE_SCENE_MODE)) {
14951 ALOGV("%s: ANDROID_CONTROL_MODE is not AUTO or USE_SCENE_MODE.", __FUNCTION__);
14952 return false;
14953 }
14954
Chien-Yu Chen4e9a8bd2017-09-21 16:02:55 -070014955 // TODO (b/66500626): support AE compensation.
14956 if (!metadata.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION) ||
14957 metadata.find(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0] != 0) {
14958 ALOGV("%s: ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION is not 0.", __FUNCTION__);
14959 return false;
14960 }
14961
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014962 // TODO (b/32585046): support non-ZSL.
14963 if (!metadata.exists(ANDROID_CONTROL_ENABLE_ZSL) ||
14964 metadata.find(ANDROID_CONTROL_ENABLE_ZSL).data.u8[0] != ANDROID_CONTROL_ENABLE_ZSL_TRUE) {
14965 ALOGV("%s: ANDROID_CONTROL_ENABLE_ZSL is not true.", __FUNCTION__);
14966 return false;
14967 }
14968
14969 // TODO (b/32586081): support flash.
14970 if (!metadata.exists(ANDROID_FLASH_MODE) ||
14971 metadata.find(ANDROID_FLASH_MODE).data.u8[0] != ANDROID_FLASH_MODE_OFF) {
14972 ALOGV("%s: ANDROID_FLASH_MODE is not OFF.", __FUNCTION__);
14973 return false;
14974 }
14975
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014976 if (!metadata.exists(ANDROID_TONEMAP_MODE) ||
14977 metadata.find(ANDROID_TONEMAP_MODE).data.u8[0] != ANDROID_TONEMAP_MODE_HIGH_QUALITY) {
14978 ALOGV("%s: ANDROID_TONEMAP_MODE is not HQ.", __FUNCTION__);
14979 return false;
14980 }
14981
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070014982 switch (request.output_buffers[0].stream->format) {
14983 case HAL_PIXEL_FORMAT_BLOB:
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070014984 case HAL_PIXEL_FORMAT_YCbCr_420_888:
14985 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070014986 break;
14987 default:
14988 ALOGV("%s: Not an HDR+ request: Only Jpeg and YUV output is supported.", __FUNCTION__);
14989 for (uint32_t i = 0; i < request.num_output_buffers; i++) {
14990 ALOGV("%s: output_buffers[%u]: %dx%d format %d", __FUNCTION__, i,
14991 request.output_buffers[0].stream->width,
14992 request.output_buffers[0].stream->height,
14993 request.output_buffers[0].stream->format);
14994 }
14995 return false;
14996 }
14997
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014998 return true;
14999}
15000
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015001void QCamera3HardwareInterface::abortPendingHdrplusRequest(HdrPlusPendingRequest *hdrPlusRequest) {
15002 if (hdrPlusRequest == nullptr) return;
15003
15004 for (auto & outputBufferIter : hdrPlusRequest->outputBuffers) {
15005 // Find the stream for this buffer.
15006 for (auto streamInfo : mStreamInfo) {
15007 if (streamInfo->id == outputBufferIter.first) {
15008 if (streamInfo->channel == mPictureChannel) {
15009 // For picture channel, this buffer is internally allocated so return this
15010 // buffer to picture channel.
15011 mPictureChannel->returnYuvBuffer(outputBufferIter.second.get());
15012 } else {
15013 // Unregister this buffer for other channels.
15014 streamInfo->channel->unregisterBuffer(outputBufferIter.second.get());
15015 }
15016 break;
15017 }
15018 }
15019 }
15020
15021 hdrPlusRequest->outputBuffers.clear();
15022 hdrPlusRequest->frameworkOutputBuffers.clear();
15023}
15024
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070015025bool QCamera3HardwareInterface::trySubmittingHdrPlusRequestLocked(
15026 HdrPlusPendingRequest *hdrPlusRequest, const camera3_capture_request_t &request,
15027 const CameraMetadata &metadata)
15028{
15029 if (hdrPlusRequest == nullptr) return false;
15030 if (!isRequestHdrPlusCompatible(request, metadata)) return false;
15031
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015032 status_t res = OK;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015033 pbcamera::CaptureRequest pbRequest;
15034 pbRequest.id = request.frame_number;
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015035 // Iterate through all requested output buffers and add them to an HDR+ request.
15036 for (uint32_t i = 0; i < request.num_output_buffers; i++) {
15037 // Find the index of the stream in mStreamInfo.
15038 uint32_t pbStreamId = 0;
15039 bool found = false;
15040 for (auto streamInfo : mStreamInfo) {
15041 if (streamInfo->stream == request.output_buffers[i].stream) {
15042 pbStreamId = streamInfo->id;
15043 found = true;
15044 break;
15045 }
15046 }
15047
15048 if (!found) {
15049 ALOGE("%s: requested stream was not configured.", __FUNCTION__);
15050 abortPendingHdrplusRequest(hdrPlusRequest);
15051 return false;
15052 }
15053 auto outBuffer = std::make_shared<mm_camera_buf_def_t>();
15054 switch (request.output_buffers[i].stream->format) {
15055 case HAL_PIXEL_FORMAT_BLOB:
15056 {
15057 // For jpeg output, get a YUV buffer from pic channel.
15058 QCamera3PicChannel *picChannel =
15059 (QCamera3PicChannel*)request.output_buffers[i].stream->priv;
15060 res = picChannel->getYuvBufferForRequest(outBuffer.get(), request.frame_number);
15061 if (res != OK) {
15062 ALOGE("%s: Getting an available YUV buffer from pic channel failed: %s (%d)",
15063 __FUNCTION__, strerror(-res), res);
15064 abortPendingHdrplusRequest(hdrPlusRequest);
15065 return false;
15066 }
15067 break;
15068 }
15069 case HAL_PIXEL_FORMAT_YCbCr_420_888:
15070 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
15071 {
15072 // For YUV output, register the buffer and get the buffer def from the channel.
15073 QCamera3ProcessingChannel *channel =
15074 (QCamera3ProcessingChannel*)request.output_buffers[i].stream->priv;
15075 res = channel->registerBufferAndGetBufDef(request.output_buffers[i].buffer,
15076 outBuffer.get());
15077 if (res != OK) {
15078 ALOGE("%s: Getting the buffer def failed: %s (%d)", __FUNCTION__,
15079 strerror(-res), res);
15080 abortPendingHdrplusRequest(hdrPlusRequest);
15081 return false;
15082 }
15083 break;
15084 }
15085 default:
15086 abortPendingHdrplusRequest(hdrPlusRequest);
15087 return false;
15088 }
15089
15090 pbcamera::StreamBuffer buffer;
15091 buffer.streamId = pbStreamId;
15092 buffer.dmaBufFd = outBuffer->fd;
15093 buffer.data = outBuffer->fd == -1 ? outBuffer->buffer : nullptr;
15094 buffer.dataSize = outBuffer->frame_len;
15095
15096 pbRequest.outputBuffers.push_back(buffer);
15097
15098 hdrPlusRequest->outputBuffers.emplace(pbStreamId, outBuffer);
15099 hdrPlusRequest->frameworkOutputBuffers.emplace(pbStreamId, request.output_buffers[i]);
15100 }
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015101
15102 // Submit an HDR+ capture request to HDR+ service.
Chien-Yu Chen17cec362017-07-05 17:10:31 -070015103 res = gHdrPlusClient->submitCaptureRequest(&pbRequest, metadata);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015104 if (res != OK) {
15105 ALOGE("%s: %d: Submitting a capture request failed: %s (%d)", __FUNCTION__, __LINE__,
15106 strerror(-res), res);
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015107 abortPendingHdrplusRequest(hdrPlusRequest);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015108 return false;
15109 }
15110
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015111 return true;
15112}
15113
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015114status_t QCamera3HardwareInterface::openHdrPlusClientAsyncLocked()
15115{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015116 if (gHdrPlusClientOpening || gHdrPlusClient != nullptr) {
15117 return OK;
15118 }
15119
Chien-Yu Chend77a5462017-06-02 18:00:38 -070015120 status_t res = gEaselManagerClient->openHdrPlusClientAsync(this);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015121 if (res != OK) {
15122 ALOGE("%s: Opening HDR+ client asynchronously failed: %s (%d)", __FUNCTION__,
15123 strerror(-res), res);
15124 return res;
15125 }
15126 gHdrPlusClientOpening = true;
15127
15128 return OK;
15129}
15130
Chien-Yu Chenee335912017-02-09 17:53:20 -080015131status_t QCamera3HardwareInterface::enableHdrPlusModeLocked()
15132{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070015133 status_t res;
Chien-Yu Chenee335912017-02-09 17:53:20 -080015134
Chien-Yu Chena6c99062017-05-23 13:45:06 -070015135 if (mHdrPlusModeEnabled) {
15136 return OK;
15137 }
15138
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015139 // Check if gHdrPlusClient is opened or being opened.
15140 if (gHdrPlusClient == nullptr) {
15141 if (gHdrPlusClientOpening) {
15142 // HDR+ client is being opened. HDR+ mode will be enabled when it's opened.
15143 return OK;
15144 }
15145
15146 res = openHdrPlusClientAsyncLocked();
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070015147 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015148 ALOGE("%s: Failed to open HDR+ client asynchronously: %s (%d).", __FUNCTION__,
15149 strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070015150 return res;
15151 }
15152
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015153 // When opening HDR+ client completes, HDR+ mode will be enabled.
15154 return OK;
15155
Chien-Yu Chenee335912017-02-09 17:53:20 -080015156 }
15157
15158 // Configure stream for HDR+.
15159 res = configureHdrPlusStreamsLocked();
15160 if (res != OK) {
15161 LOGE("%s: Failed to configure HDR+ streams: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070015162 return res;
15163 }
15164
15165 // Enable HDR+ mode so Easel will start capturing ZSL raw buffers.
15166 res = gHdrPlusClient->setZslHdrPlusMode(true);
15167 if (res != OK) {
15168 LOGE("%s: Failed to enable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chenee335912017-02-09 17:53:20 -080015169 return res;
15170 }
15171
15172 mHdrPlusModeEnabled = true;
15173 ALOGD("%s: HDR+ mode enabled", __FUNCTION__);
15174
15175 return OK;
15176}
15177
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070015178void QCamera3HardwareInterface::finishHdrPlusClientOpeningLocked(std::unique_lock<std::mutex> &lock)
15179{
15180 if (gHdrPlusClientOpening) {
15181 gHdrPlusClientOpenCond.wait(lock, [&] { return !gHdrPlusClientOpening; });
15182 }
15183}
15184
Chien-Yu Chenee335912017-02-09 17:53:20 -080015185void QCamera3HardwareInterface::disableHdrPlusModeLocked()
15186{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070015187 // Disable HDR+ mode.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080015188 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070015189 status_t res = gHdrPlusClient->setZslHdrPlusMode(false);
15190 if (res != OK) {
15191 ALOGE("%s: Failed to disable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
15192 }
Chien-Yu Chen3b17c672017-04-24 12:49:52 -070015193
15194 // Close HDR+ client so Easel can enter low power mode.
Chien-Yu Chend77a5462017-06-02 18:00:38 -070015195 gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
Chien-Yu Chen3b17c672017-04-24 12:49:52 -070015196 gHdrPlusClient = nullptr;
Chien-Yu Chenee335912017-02-09 17:53:20 -080015197 }
15198
15199 mHdrPlusModeEnabled = false;
15200 ALOGD("%s: HDR+ mode disabled", __FUNCTION__);
15201}
15202
Chien-Yu Chendeaebad2017-06-30 11:46:34 -070015203bool QCamera3HardwareInterface::isSessionHdrPlusModeCompatible()
15204{
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015205 // Check that at least one YUV or one JPEG output is configured.
15206 // TODO: Support RAW (b/36690506)
15207 for (auto streamInfo : mStreamInfo) {
15208 if (streamInfo != nullptr && streamInfo->stream != nullptr) {
15209 if (streamInfo->stream->stream_type == CAMERA3_STREAM_OUTPUT &&
15210 (streamInfo->stream->format == HAL_PIXEL_FORMAT_BLOB ||
15211 streamInfo->stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888 ||
15212 streamInfo->stream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED)) {
15213 return true;
15214 }
15215 }
Chien-Yu Chendeaebad2017-06-30 11:46:34 -070015216 }
15217
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015218 return false;
Chien-Yu Chendeaebad2017-06-30 11:46:34 -070015219}
15220
Chien-Yu Chenee335912017-02-09 17:53:20 -080015221status_t QCamera3HardwareInterface::configureHdrPlusStreamsLocked()
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015222{
15223 pbcamera::InputConfiguration inputConfig;
15224 std::vector<pbcamera::StreamConfiguration> outputStreamConfigs;
15225 status_t res = OK;
15226
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015227 // Sensor MIPI will send data to Easel.
15228 inputConfig.isSensorInput = true;
15229 inputConfig.sensorMode.cameraId = mCameraId;
15230 inputConfig.sensorMode.pixelArrayWidth = mSensorModeInfo.pixel_array_size.width;
15231 inputConfig.sensorMode.pixelArrayHeight = mSensorModeInfo.pixel_array_size.height;
15232 inputConfig.sensorMode.activeArrayWidth = mSensorModeInfo.active_array_size.width;
15233 inputConfig.sensorMode.activeArrayHeight = mSensorModeInfo.active_array_size.height;
15234 inputConfig.sensorMode.outputPixelClkHz = mSensorModeInfo.op_pixel_clk;
15235 inputConfig.sensorMode.timestampOffsetNs = mSensorModeInfo.timestamp_offset;
Chien-Yu Chenc8b6ad02017-09-15 13:50:26 -070015236 inputConfig.sensorMode.timestampCropOffsetNs = mSensorModeInfo.timestamp_crop_offset;
15237
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015238 if (mSensorModeInfo.num_raw_bits != 10) {
15239 ALOGE("%s: Only RAW10 is supported but this sensor mode has %d raw bits.", __FUNCTION__,
15240 mSensorModeInfo.num_raw_bits);
15241 return BAD_VALUE;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015242 }
15243
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015244 inputConfig.sensorMode.format = HAL_PIXEL_FORMAT_RAW10;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015245
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015246 // Iterate through configured output streams in HAL and configure those streams in HDR+
15247 // service.
15248 for (auto streamInfo : mStreamInfo) {
15249 pbcamera::StreamConfiguration outputConfig;
15250 if (streamInfo->stream->stream_type == CAMERA3_STREAM_OUTPUT) {
15251 switch (streamInfo->stream->format) {
15252 case HAL_PIXEL_FORMAT_BLOB:
15253 case HAL_PIXEL_FORMAT_YCbCr_420_888:
15254 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
15255 res = fillPbStreamConfig(&outputConfig, streamInfo->id,
15256 streamInfo->channel, /*stream index*/0);
15257 if (res != OK) {
15258 LOGE("%s: Failed to get fill stream config for YUV stream: %s (%d)",
15259 __FUNCTION__, strerror(-res), res);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015260
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015261 return res;
15262 }
15263
15264 outputStreamConfigs.push_back(outputConfig);
15265 break;
15266 default:
15267 // TODO: handle RAW16 outputs if mRawChannel was created. (b/36690506)
15268 break;
15269 }
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015270 }
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015271 }
15272
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080015273 res = gHdrPlusClient->configureStreams(inputConfig, outputStreamConfigs);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015274 if (res != OK) {
15275 LOGE("%d: Failed to configure streams with HDR+ client: %s (%d)", __FUNCTION__,
15276 strerror(-res), res);
15277 return res;
15278 }
15279
15280 return OK;
15281}
15282
Chien-Yu Chene80574b2017-09-08 19:05:20 -070015283void QCamera3HardwareInterface::handleEaselFatalError()
Chien-Yu Chen90f1fc12017-07-14 14:31:53 -070015284{
Chien-Yu Chen90f1fc12017-07-14 14:31:53 -070015285 pthread_mutex_lock(&mMutex);
15286 mState = ERROR;
15287 pthread_mutex_unlock(&mMutex);
15288
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -070015289 handleCameraDeviceError(/*stopChannelImmediately*/true);
Chien-Yu Chen90f1fc12017-07-14 14:31:53 -070015290}
15291
Chien-Yu Chene80574b2017-09-08 19:05:20 -070015292void QCamera3HardwareInterface::handleEaselFatalErrorAsync()
15293{
15294 if (mEaselErrorFuture.valid()) {
15295 // The error future has been invoked.
15296 return;
15297 }
15298
15299 // Launch a future to handle the fatal error.
15300 mEaselErrorFuture = std::async(std::launch::async,
15301 &QCamera3HardwareInterface::handleEaselFatalError, this);
15302}
15303
15304void QCamera3HardwareInterface::onEaselFatalError(std::string errMsg)
15305{
15306 ALOGE("%s: Got an Easel fatal error: %s", __FUNCTION__, errMsg.c_str());
15307 handleEaselFatalErrorAsync();
15308}
15309
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015310void QCamera3HardwareInterface::onOpened(std::unique_ptr<HdrPlusClient> client)
15311{
Arnd Geis8cbfc182017-09-07 14:46:41 -070015312 int rc = NO_ERROR;
15313
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015314 if (client == nullptr) {
15315 ALOGE("%s: Opened client is null.", __FUNCTION__);
15316 return;
15317 }
15318
Chien-Yu Chene96475e2017-04-11 11:53:26 -070015319 logEaselEvent("EASEL_STARTUP_LATENCY", "HDR+ client opened.");
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015320 ALOGI("%s: HDR+ client opened.", __FUNCTION__);
15321
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070015322 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015323 if (!gHdrPlusClientOpening) {
15324 ALOGW("%s: HDR+ is disabled while HDR+ client is being opened.", __FUNCTION__);
15325 return;
15326 }
15327
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015328 gHdrPlusClient = std::move(client);
15329 gHdrPlusClientOpening = false;
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070015330 gHdrPlusClientOpenCond.notify_one();
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015331
15332 // Set static metadata.
15333 status_t res = gHdrPlusClient->setStaticMetadata(*gStaticMetadata[mCameraId]);
15334 if (res != OK) {
15335 LOGE("%s: Failed to set static metadata in HDR+ client: %s (%d). Closing HDR+ client.",
15336 __FUNCTION__, strerror(-res), res);
Chien-Yu Chend77a5462017-06-02 18:00:38 -070015337 gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015338 gHdrPlusClient = nullptr;
15339 return;
15340 }
15341
15342 // Enable HDR+ mode.
15343 res = enableHdrPlusModeLocked();
15344 if (res != OK) {
15345 LOGE("%s: Failed to configure HDR+ streams.", __FUNCTION__);
15346 }
Arnd Geis8cbfc182017-09-07 14:46:41 -070015347
15348 // Get Easel firmware version
15349 if (EaselManagerClientOpened) {
15350 rc = gEaselManagerClient->getFwVersion(mEaselFwVersion);
15351 if (rc != OK) {
15352 ALOGD("%s: Failed to query Easel firmware version", __FUNCTION__);
15353 } else {
15354 mEaselFwUpdated = true;
15355 }
15356 }
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015357}
15358
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015359void QCamera3HardwareInterface::onOpenFailed(status_t err)
15360{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015361 ALOGE("%s: Opening HDR+ client failed: %s (%d)", __FUNCTION__, strerror(-err), err);
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070015362 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015363 gHdrPlusClientOpening = false;
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070015364 gHdrPlusClientOpenCond.notify_one();
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015365}
15366
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015367void QCamera3HardwareInterface::onFatalError()
15368{
Chien-Yu Chene80574b2017-09-08 19:05:20 -070015369 ALOGE("%s: HDR+ client encountered a fatal error.", __FUNCTION__);
15370 handleEaselFatalErrorAsync();
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015371}
15372
Chien-Yu Chen29fd1d72017-04-27 18:42:09 -070015373void QCamera3HardwareInterface::onShutter(uint32_t requestId, int64_t apSensorTimestampNs)
15374{
15375 ALOGV("%s: %d: Received a shutter for HDR+ request %d timestamp %" PRId64, __FUNCTION__,
15376 __LINE__, requestId, apSensorTimestampNs);
15377
15378 mShutterDispatcher.markShutterReady(requestId, apSensorTimestampNs);
15379}
15380
Chien-Yu Chendaf68892017-08-25 12:56:40 -070015381void QCamera3HardwareInterface::onNextCaptureReady(uint32_t requestId)
15382{
15383 pthread_mutex_lock(&mMutex);
15384
15385 // Find the pending request for this result metadata.
15386 auto requestIter = mPendingRequestsList.begin();
15387 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != requestId) {
15388 requestIter++;
15389 }
15390
15391 if (requestIter == mPendingRequestsList.end()) {
15392 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, requestId);
15393 pthread_mutex_unlock(&mMutex);
15394 return;
15395 }
15396
15397 requestIter->partial_result_cnt++;
15398
15399 CameraMetadata metadata;
15400 uint8_t ready = true;
15401 metadata.update(NEXUS_EXPERIMENTAL_2017_NEXT_STILL_INTENT_REQUEST_READY, &ready, 1);
15402
15403 // Send it to framework.
15404 camera3_capture_result_t result = {};
15405
15406 result.result = metadata.getAndLock();
15407 // Populate metadata result
15408 result.frame_number = requestId;
15409 result.num_output_buffers = 0;
15410 result.output_buffers = NULL;
15411 result.partial_result = requestIter->partial_result_cnt;
15412
15413 orchestrateResult(&result);
15414 metadata.unlock(result.result);
15415
15416 pthread_mutex_unlock(&mMutex);
15417}
15418
Chien-Yu Chen0a921f92017-08-27 17:25:33 -070015419void QCamera3HardwareInterface::onPostview(uint32_t requestId,
15420 std::unique_ptr<std::vector<uint8_t>> postview, uint32_t width, uint32_t height,
15421 uint32_t stride, int32_t format)
15422{
15423 if (property_get_bool("persist.camera.hdrplus.dump_postview", false)) {
15424 ALOGI("%s: %d: Received a postview %dx%d for HDR+ request %d", __FUNCTION__,
15425 __LINE__, width, height, requestId);
15426 char buf[FILENAME_MAX] = {};
15427 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"postview_%d_%dx%d.ppm",
15428 requestId, width, height);
15429
15430 pbcamera::StreamConfiguration config = {};
15431 config.image.width = width;
15432 config.image.height = height;
15433 config.image.format = format;
15434
15435 pbcamera::PlaneConfiguration plane = {};
15436 plane.stride = stride;
15437 plane.scanline = height;
15438
15439 config.image.planes.push_back(plane);
15440
15441 pbcamera::StreamBuffer buffer = {};
15442 buffer.streamId = 0;
15443 buffer.dmaBufFd = -1;
15444 buffer.data = postview->data();
15445 buffer.dataSize = postview->size();
15446
15447 hdrplus_client_utils::writePpm(buf, config, buffer);
15448 }
15449
15450 pthread_mutex_lock(&mMutex);
15451
15452 // Find the pending request for this result metadata.
15453 auto requestIter = mPendingRequestsList.begin();
15454 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != requestId) {
15455 requestIter++;
15456 }
15457
15458 if (requestIter == mPendingRequestsList.end()) {
15459 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, requestId);
15460 pthread_mutex_unlock(&mMutex);
15461 return;
15462 }
15463
15464 requestIter->partial_result_cnt++;
15465
15466 CameraMetadata metadata;
15467 int32_t config[3] = {static_cast<int32_t>(width), static_cast<int32_t>(height),
15468 static_cast<int32_t>(stride)};
15469 metadata.update(NEXUS_EXPERIMENTAL_2017_POSTVIEW_CONFIG, config, 3);
15470 metadata.update(NEXUS_EXPERIMENTAL_2017_POSTVIEW_DATA, postview->data(), postview->size());
15471
15472 // Send it to framework.
15473 camera3_capture_result_t result = {};
15474
15475 result.result = metadata.getAndLock();
15476 // Populate metadata result
15477 result.frame_number = requestId;
15478 result.num_output_buffers = 0;
15479 result.output_buffers = NULL;
15480 result.partial_result = requestIter->partial_result_cnt;
15481
15482 orchestrateResult(&result);
15483 metadata.unlock(result.result);
15484
15485 pthread_mutex_unlock(&mMutex);
15486}
15487
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015488void QCamera3HardwareInterface::onCaptureResult(pbcamera::CaptureResult *result,
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015489 const camera_metadata_t &resultMetadata)
15490{
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015491 if (result == nullptr) {
15492 ALOGE("%s: result is nullptr.", __FUNCTION__);
15493 return;
15494 }
15495
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015496 // Find the pending HDR+ request.
15497 HdrPlusPendingRequest pendingRequest;
15498 {
15499 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
15500 auto req = mHdrPlusPendingRequests.find(result->requestId);
15501 pendingRequest = req->second;
15502 }
15503
15504 // Update the result metadata with the settings of the HDR+ still capture request because
15505 // the result metadata belongs to a ZSL buffer.
15506 CameraMetadata metadata;
15507 metadata = &resultMetadata;
15508 updateHdrPlusResultMetadata(metadata, pendingRequest.settings);
15509 camera_metadata_t* updatedResultMetadata = metadata.release();
15510
15511 uint32_t halSnapshotStreamId = 0;
15512 if (mPictureChannel != nullptr) {
15513 halSnapshotStreamId = mPictureChannel->getStreamID(mPictureChannel->getStreamTypeMask());
15514 }
15515
15516 auto halMetadata = std::make_shared<metadata_buffer_t>();
15517 clear_metadata_buffer(halMetadata.get());
15518
15519 // Convert updated result metadata to HAL metadata.
15520 status_t res = translateFwkMetadataToHalMetadata(updatedResultMetadata, halMetadata.get(),
15521 halSnapshotStreamId, /*minFrameDuration*/0);
15522 if (res != 0) {
15523 ALOGE("%s: Translating metadata failed: %s (%d)", __FUNCTION__, strerror(-res), res);
15524 }
15525
15526 for (auto &outputBuffer : result->outputBuffers) {
15527 uint32_t streamId = outputBuffer.streamId;
15528
15529 // Find the framework output buffer in the pending request.
15530 auto frameworkOutputBufferIter = pendingRequest.frameworkOutputBuffers.find(streamId);
15531 if (frameworkOutputBufferIter == pendingRequest.frameworkOutputBuffers.end()) {
15532 ALOGE("%s: Couldn't find framework output buffers for stream id %u", __FUNCTION__,
15533 streamId);
15534 continue;
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015535 }
15536
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015537 camera3_stream_buffer_t *frameworkOutputBuffer = &frameworkOutputBufferIter->second;
15538
15539 // Find the channel for the output buffer.
15540 QCamera3ProcessingChannel *channel =
15541 (QCamera3ProcessingChannel*)frameworkOutputBuffer->stream->priv;
15542
15543 // Find the output buffer def.
15544 auto outputBufferIter = pendingRequest.outputBuffers.find(streamId);
15545 if (outputBufferIter == pendingRequest.outputBuffers.end()) {
15546 ALOGE("%s: Cannot find output buffer", __FUNCTION__);
15547 continue;
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015548 }
15549
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015550 std::shared_ptr<mm_camera_buf_def_t> outputBufferDef = outputBufferIter->second;
Chien-Yu Chendaf68892017-08-25 12:56:40 -070015551
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015552 // Check whether to dump the buffer.
15553 if (frameworkOutputBuffer->stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888 ||
15554 frameworkOutputBuffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
15555 // If the stream format is YUV or jpeg, check if dumping HDR+ YUV output is enabled.
15556 char prop[PROPERTY_VALUE_MAX];
15557 property_get("persist.camera.hdrplus.dump_yuv", prop, "0");
15558 bool dumpYuvOutput = atoi(prop);
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015559
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015560 if (dumpYuvOutput) {
15561 // Dump yuv buffer to a ppm file.
15562 pbcamera::StreamConfiguration outputConfig;
15563 status_t rc = fillPbStreamConfig(&outputConfig, streamId,
15564 channel, /*stream index*/0);
15565 if (rc == OK) {
15566 char buf[FILENAME_MAX] = {};
15567 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"s_%d_%d_%dx%d.ppm",
15568 result->requestId, streamId,
15569 outputConfig.image.width, outputConfig.image.height);
Chien-Yu Chen92724a82017-01-06 11:50:30 -080015570
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015571 hdrplus_client_utils::writePpm(buf, outputConfig, outputBuffer);
15572 } else {
15573 LOGW("%s: Couldn't dump YUV buffer because getting stream config failed: "
15574 "%s (%d).", __FUNCTION__, strerror(-rc), rc);
15575 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015576 }
15577 }
15578
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015579 if (channel == mPictureChannel) {
Chien-Yu Chen92724a82017-01-06 11:50:30 -080015580 // Return the buffer to pic channel for encoding.
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015581 mPictureChannel->returnYuvBufferAndEncode(outputBufferDef.get(),
15582 frameworkOutputBuffer->buffer, result->requestId,
Chien-Yu Chen92724a82017-01-06 11:50:30 -080015583 halMetadata);
15584 } else {
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015585 // Return the buffer to camera framework.
Chien-Yu Chen92724a82017-01-06 11:50:30 -080015586 pthread_mutex_lock(&mMutex);
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015587 handleBufferWithLock(frameworkOutputBuffer, result->requestId);
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015588 channel->unregisterBuffer(outputBufferDef.get());
Chien-Yu Chen0c8eaaa2017-09-19 14:13:14 -070015589 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015590 }
15591 }
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015592
15593 // Send HDR+ metadata to framework.
15594 {
15595 pthread_mutex_lock(&mMutex);
15596
15597 // updatedResultMetadata will be freed in handlePendingResultMetadataWithLock.
15598 handlePendingResultMetadataWithLock(result->requestId, updatedResultMetadata);
15599 pthread_mutex_unlock(&mMutex);
15600 }
15601
15602 // Remove the HDR+ pending request.
15603 {
15604 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
15605 auto req = mHdrPlusPendingRequests.find(result->requestId);
15606 mHdrPlusPendingRequests.erase(req);
15607 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070015608}
15609
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015610void QCamera3HardwareInterface::onFailedCaptureResult(pbcamera::CaptureResult *failedResult)
15611{
15612 if (failedResult == nullptr) {
15613 ALOGE("%s: Got an empty failed result.", __FUNCTION__);
15614 return;
15615 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015616
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015617 ALOGE("%s: Got a failed HDR+ result for request %d", __FUNCTION__, failedResult->requestId);
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015618
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015619 // Find the pending HDR+ request.
15620 HdrPlusPendingRequest pendingRequest;
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015621 {
15622 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015623 auto req = mHdrPlusPendingRequests.find(failedResult->requestId);
15624 if (req == mHdrPlusPendingRequests.end()) {
15625 ALOGE("%s: Couldn't find pending request %d", __FUNCTION__, failedResult->requestId);
15626 return;
15627 }
15628 pendingRequest = req->second;
15629 }
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015630
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015631 for (auto &outputBuffer : failedResult->outputBuffers) {
15632 uint32_t streamId = outputBuffer.streamId;
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015633
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015634 // Find the channel
15635 // Find the framework output buffer in the pending request.
15636 auto frameworkOutputBufferIter = pendingRequest.frameworkOutputBuffers.find(streamId);
15637 if (frameworkOutputBufferIter == pendingRequest.frameworkOutputBuffers.end()) {
15638 ALOGE("%s: Couldn't find framework output buffers for stream id %u", __FUNCTION__,
15639 streamId);
15640 continue;
15641 }
15642
15643 camera3_stream_buffer_t *frameworkOutputBuffer = &frameworkOutputBufferIter->second;
15644
15645 // Find the channel for the output buffer.
15646 QCamera3ProcessingChannel *channel =
15647 (QCamera3ProcessingChannel*)frameworkOutputBuffer->stream->priv;
15648
15649 // Find the output buffer def.
15650 auto outputBufferIter = pendingRequest.outputBuffers.find(streamId);
15651 if (outputBufferIter == pendingRequest.outputBuffers.end()) {
15652 ALOGE("%s: Cannot find output buffer", __FUNCTION__);
15653 continue;
15654 }
15655
15656 std::shared_ptr<mm_camera_buf_def_t> outputBufferDef = outputBufferIter->second;
15657
15658 if (channel == mPictureChannel) {
15659 // Return the buffer to pic channel.
15660 mPictureChannel->returnYuvBuffer(outputBufferDef.get());
15661 } else {
15662 channel->unregisterBuffer(outputBufferDef.get());
15663 }
15664 }
15665
15666 // Remove the HDR+ pending request.
15667 {
15668 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
15669 auto req = mHdrPlusPendingRequests.find(failedResult->requestId);
15670 mHdrPlusPendingRequests.erase(req);
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015671 }
15672
15673 pthread_mutex_lock(&mMutex);
15674
15675 // Find the pending buffers.
15676 auto pendingBuffers = mPendingBuffersMap.mPendingBuffersInRequest.begin();
15677 while (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
15678 if (pendingBuffers->frame_number == failedResult->requestId) {
15679 break;
15680 }
15681 pendingBuffers++;
15682 }
15683
15684 // Send out buffer errors for the pending buffers.
15685 if (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
15686 std::vector<camera3_stream_buffer_t> streamBuffers;
15687 for (auto &buffer : pendingBuffers->mPendingBufferList) {
15688 // Prepare a stream buffer.
15689 camera3_stream_buffer_t streamBuffer = {};
15690 streamBuffer.stream = buffer.stream;
15691 streamBuffer.buffer = buffer.buffer;
15692 streamBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
15693 streamBuffer.acquire_fence = -1;
15694 streamBuffer.release_fence = -1;
15695
15696 streamBuffers.push_back(streamBuffer);
15697
15698 // Send out error buffer event.
15699 camera3_notify_msg_t notify_msg = {};
15700 notify_msg.type = CAMERA3_MSG_ERROR;
15701 notify_msg.message.error.frame_number = pendingBuffers->frame_number;
15702 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
15703 notify_msg.message.error.error_stream = buffer.stream;
15704
15705 orchestrateNotify(&notify_msg);
15706 }
15707
15708 camera3_capture_result_t result = {};
15709 result.frame_number = pendingBuffers->frame_number;
15710 result.num_output_buffers = streamBuffers.size();
15711 result.output_buffers = &streamBuffers[0];
15712
15713 // Send out result with buffer errors.
15714 orchestrateResult(&result);
15715
15716 // Remove pending buffers.
15717 mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffers);
15718 }
15719
15720 // Remove pending request.
15721 auto halRequest = mPendingRequestsList.begin();
15722 while (halRequest != mPendingRequestsList.end()) {
15723 if (halRequest->frame_number == failedResult->requestId) {
15724 mPendingRequestsList.erase(halRequest);
15725 break;
15726 }
15727 halRequest++;
15728 }
15729
15730 pthread_mutex_unlock(&mMutex);
Chien-Yu Chen8e599492016-11-01 13:37:46 -070015731}
15732
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015733
15734ShutterDispatcher::ShutterDispatcher(QCamera3HardwareInterface *parent) :
15735 mParent(parent) {}
15736
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015737void ShutterDispatcher::expectShutter(uint32_t frameNumber, bool isReprocess)
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015738{
15739 std::lock_guard<std::mutex> lock(mLock);
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015740
15741 if (isReprocess) {
15742 mReprocessShutters.emplace(frameNumber, Shutter());
15743 } else {
15744 mShutters.emplace(frameNumber, Shutter());
15745 }
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015746}
15747
15748void ShutterDispatcher::markShutterReady(uint32_t frameNumber, uint64_t timestamp)
15749{
15750 std::lock_guard<std::mutex> lock(mLock);
15751
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015752 std::map<uint32_t, Shutter> *shutters = nullptr;
15753
15754 // Find the shutter entry.
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015755 auto shutter = mShutters.find(frameNumber);
15756 if (shutter == mShutters.end()) {
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015757 shutter = mReprocessShutters.find(frameNumber);
15758 if (shutter == mReprocessShutters.end()) {
15759 // Shutter was already sent.
15760 return;
15761 }
15762 shutters = &mReprocessShutters;
15763 } else {
15764 shutters = &mShutters;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015765 }
15766
Chien-Yu Chen0469c9b2017-09-22 13:22:19 -070015767 if (shutter->second.ready) {
15768 // If shutter is already ready, don't update timestamp again.
15769 return;
15770 }
15771
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015772 // Make this frame's shutter ready.
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015773 shutter->second.ready = true;
15774 shutter->second.timestamp = timestamp;
15775
15776 // Iterate throught the shutters and send out shuters until the one that's not ready yet.
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015777 shutter = shutters->begin();
15778 while (shutter != shutters->end()) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015779 if (!shutter->second.ready) {
15780 // If this shutter is not ready, the following shutters can't be sent.
15781 break;
15782 }
15783
15784 camera3_notify_msg_t msg = {};
15785 msg.type = CAMERA3_MSG_SHUTTER;
15786 msg.message.shutter.frame_number = shutter->first;
15787 msg.message.shutter.timestamp = shutter->second.timestamp;
15788 mParent->orchestrateNotify(&msg);
15789
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015790 shutter = shutters->erase(shutter);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015791 }
15792}
15793
15794void ShutterDispatcher::clear(uint32_t frameNumber)
15795{
15796 std::lock_guard<std::mutex> lock(mLock);
15797 mShutters.erase(frameNumber);
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015798 mReprocessShutters.erase(frameNumber);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015799}
15800
15801void ShutterDispatcher::clear()
15802{
15803 std::lock_guard<std::mutex> lock(mLock);
15804
15805 // Log errors for stale shutters.
15806 for (auto &shutter : mShutters) {
15807 ALOGE("%s: stale shutter: frame number %u, ready %d, timestamp %" PRId64,
15808 __FUNCTION__, shutter.first, shutter.second.ready,
15809 shutter.second.timestamp);
15810 }
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015811
15812 // Log errors for stale reprocess shutters.
15813 for (auto &shutter : mReprocessShutters) {
15814 ALOGE("%s: stale reprocess shutter: frame number %u, ready %d, timestamp %" PRId64,
15815 __FUNCTION__, shutter.first, shutter.second.ready,
15816 shutter.second.timestamp);
15817 }
15818
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015819 mShutters.clear();
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015820 mReprocessShutters.clear();
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015821}
15822
15823OutputBufferDispatcher::OutputBufferDispatcher(QCamera3HardwareInterface *parent) :
15824 mParent(parent) {}
15825
15826status_t OutputBufferDispatcher::configureStreams(camera3_stream_configuration_t *streamList)
15827{
15828 std::lock_guard<std::mutex> lock(mLock);
15829 mStreamBuffers.clear();
15830 if (!streamList) {
15831 ALOGE("%s: streamList is nullptr.", __FUNCTION__);
15832 return -EINVAL;
15833 }
15834
15835 // Create a "frame-number -> buffer" map for each stream.
15836 for (uint32_t i = 0; i < streamList->num_streams; i++) {
15837 mStreamBuffers.emplace(streamList->streams[i], std::map<uint32_t, Buffer>());
15838 }
15839
15840 return OK;
15841}
15842
15843status_t OutputBufferDispatcher::expectBuffer(uint32_t frameNumber, camera3_stream_t *stream)
15844{
15845 std::lock_guard<std::mutex> lock(mLock);
15846
15847 // Find the "frame-number -> buffer" map for the stream.
15848 auto buffers = mStreamBuffers.find(stream);
15849 if (buffers == mStreamBuffers.end()) {
15850 ALOGE("%s: Stream %p was not configured.", __FUNCTION__, stream);
15851 return -EINVAL;
15852 }
15853
15854 // Create an unready buffer for this frame number.
15855 buffers->second.emplace(frameNumber, Buffer());
15856 return OK;
15857}
15858
15859void OutputBufferDispatcher::markBufferReady(uint32_t frameNumber,
15860 const camera3_stream_buffer_t &buffer)
15861{
15862 std::lock_guard<std::mutex> lock(mLock);
15863
15864 // Find the frame number -> buffer map for the stream.
15865 auto buffers = mStreamBuffers.find(buffer.stream);
15866 if (buffers == mStreamBuffers.end()) {
15867 ALOGE("%s: Cannot find pending buffers for stream %p.", __FUNCTION__, buffer.stream);
15868 return;
15869 }
15870
15871 // Find the unready buffer this frame number and mark it ready.
15872 auto pendingBuffer = buffers->second.find(frameNumber);
15873 if (pendingBuffer == buffers->second.end()) {
15874 ALOGE("%s: Cannot find the pending buffer for frame number %u.", __FUNCTION__, frameNumber);
15875 return;
15876 }
15877
15878 pendingBuffer->second.ready = true;
15879 pendingBuffer->second.buffer = buffer;
15880
15881 // Iterate through the buffers and send out buffers until the one that's not ready yet.
15882 pendingBuffer = buffers->second.begin();
15883 while (pendingBuffer != buffers->second.end()) {
15884 if (!pendingBuffer->second.ready) {
15885 // If this buffer is not ready, the following buffers can't be sent.
15886 break;
15887 }
15888
15889 camera3_capture_result_t result = {};
15890 result.frame_number = pendingBuffer->first;
15891 result.num_output_buffers = 1;
15892 result.output_buffers = &pendingBuffer->second.buffer;
15893
15894 // Send out result with buffer errors.
15895 mParent->orchestrateResult(&result);
15896
15897 pendingBuffer = buffers->second.erase(pendingBuffer);
15898 }
15899}
15900
15901void OutputBufferDispatcher::clear(bool clearConfiguredStreams)
15902{
15903 std::lock_guard<std::mutex> lock(mLock);
15904
15905 // Log errors for stale buffers.
15906 for (auto &buffers : mStreamBuffers) {
15907 for (auto &buffer : buffers.second) {
15908 ALOGE("%s: stale buffer: stream %p, frame number %u, ready %d",
15909 __FUNCTION__, buffers.first, buffer.first, buffer.second.ready);
15910 }
15911 buffers.second.clear();
15912 }
15913
15914 if (clearConfiguredStreams) {
15915 mStreamBuffers.clear();
15916 }
15917}
15918
Thierry Strudel3d639192016-09-09 11:52:26 -070015919}; //end namespace qcamera