blob: bda5b85df745017697aeb1ab5245466d7fd4da77 [file] [log] [blame]
Thierry Strudel3d639192016-09-09 11:52:26 -07001/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6* * Redistributions of source code must retain the above copyright
7* notice, this list of conditions and the following disclaimer.
8* * Redistributions in binary form must reproduce the above
9* copyright notice, this list of conditions and the following
10* disclaimer in the documentation and/or other materials provided
11* with the distribution.
12* * Neither the name of The Linux Foundation nor the names of its
13* contributors may be used to endorse or promote products derived
14* from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34
35// To remove
36#include <cutils/properties.h>
37
38// System dependencies
39#include <dlfcn.h>
40#include <fcntl.h>
41#include <stdio.h>
42#include <stdlib.h>
43#include "utils/Timers.h"
44#include "sys/ioctl.h"
Shuzhen Wangf6890e02016-08-12 14:28:54 -070045#include <time.h>
Thierry Strudel3d639192016-09-09 11:52:26 -070046#include <sync/sync.h>
47#include "gralloc_priv.h"
Thierry Strudele80ad7c2016-12-06 10:16:27 -080048#include <map>
Thierry Strudel3d639192016-09-09 11:52:26 -070049
50// Display dependencies
51#include "qdMetaData.h"
52
53// Camera dependencies
54#include "android/QCamera3External.h"
55#include "util/QCameraFlash.h"
56#include "QCamera3HWI.h"
57#include "QCamera3VendorTags.h"
58#include "QCameraTrace.h"
59
Chien-Yu Chene687bd02016-12-07 18:30:26 -080060#include "HdrPlusClientUtils.h"
61
Thierry Strudel3d639192016-09-09 11:52:26 -070062extern "C" {
63#include "mm_camera_dbg.h"
64}
Shuzhen Wangfb961e52016-11-28 11:48:02 -080065#include "cam_cond.h"
Thierry Strudel3d639192016-09-09 11:52:26 -070066
Jiyong Parkd4caeb72017-06-12 17:16:36 +090067using ::android::hardware::camera::common::V1_0::helper::CameraMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070068using namespace android;
69
70namespace qcamera {
71
72#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
73
74#define EMPTY_PIPELINE_DELAY 2
Chien-Yu Chen0a921f92017-08-27 17:25:33 -070075// mm_camera has 2 partial results: 3A, and final result.
76// HDR+ requests have 3 partial results: postview, next request ready, and final result.
77#define PARTIAL_RESULT_COUNT 3
Thierry Strudel3d639192016-09-09 11:52:26 -070078#define FRAME_SKIP_DELAY 0
79
80#define MAX_VALUE_8BIT ((1<<8)-1)
81#define MAX_VALUE_10BIT ((1<<10)-1)
82#define MAX_VALUE_12BIT ((1<<12)-1)
83
84#define VIDEO_4K_WIDTH 3840
85#define VIDEO_4K_HEIGHT 2160
86
Jason Leeb9e76432017-03-10 17:14:19 -080087#define MAX_EIS_WIDTH 3840
88#define MAX_EIS_HEIGHT 2160
Thierry Strudel3d639192016-09-09 11:52:26 -070089
90#define MAX_RAW_STREAMS 1
91#define MAX_STALLING_STREAMS 1
92#define MAX_PROCESSED_STREAMS 3
93/* Batch mode is enabled only if FPS set is equal to or greater than this */
94#define MIN_FPS_FOR_BATCH_MODE (120)
95#define PREVIEW_FPS_FOR_HFR (30)
96#define DEFAULT_VIDEO_FPS (30.0)
Thierry Strudele80ad7c2016-12-06 10:16:27 -080097#define TEMPLATE_MAX_PREVIEW_FPS (30.0)
Thierry Strudel3d639192016-09-09 11:52:26 -070098#define MAX_HFR_BATCH_SIZE (8)
99#define REGIONS_TUPLE_COUNT 5
Thierry Strudel3d639192016-09-09 11:52:26 -0700100// Set a threshold for detection of missing buffers //seconds
Emilian Peev30522a12017-08-03 14:36:33 +0100101#define MISSING_REQUEST_BUF_TIMEOUT 5
Chien-Yu Chene687bd02016-12-07 18:30:26 -0800102#define MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT 30
Thierry Strudel3d639192016-09-09 11:52:26 -0700103#define FLUSH_TIMEOUT 3
104#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
105
106#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3 ( CAM_QCOM_FEATURE_DENOISE2D |\
107 CAM_QCOM_FEATURE_CROP |\
108 CAM_QCOM_FEATURE_ROTATION |\
109 CAM_QCOM_FEATURE_SHARPNESS |\
110 CAM_QCOM_FEATURE_SCALE |\
111 CAM_QCOM_FEATURE_CAC |\
112 CAM_QCOM_FEATURE_CDS )
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700113/* Per configuration size for static metadata length*/
114#define PER_CONFIGURATION_SIZE_3 (3)
Thierry Strudel3d639192016-09-09 11:52:26 -0700115
116#define TIMEOUT_NEVER -1
117
Jason Lee8ce36fa2017-04-19 19:40:37 -0700118/* Face rect indices */
119#define FACE_LEFT 0
120#define FACE_TOP 1
121#define FACE_RIGHT 2
122#define FACE_BOTTOM 3
123#define FACE_WEIGHT 4
124
Thierry Strudel04e026f2016-10-10 11:27:36 -0700125/* Face landmarks indices */
126#define LEFT_EYE_X 0
127#define LEFT_EYE_Y 1
128#define RIGHT_EYE_X 2
129#define RIGHT_EYE_Y 3
130#define MOUTH_X 4
131#define MOUTH_Y 5
132#define TOTAL_LANDMARK_INDICES 6
133
Zhijun He2a5df222017-04-04 18:20:38 -0700134// Max preferred zoom
Zhijun He76870072017-05-08 17:13:17 -0700135#define MAX_PREFERRED_ZOOM_RATIO 7.0
Zhijun He2a5df222017-04-04 18:20:38 -0700136
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700137// Whether to check for the GPU stride padding, or use the default
138//#define CHECK_GPU_PIXEL_ALIGNMENT
139
Thierry Strudel3d639192016-09-09 11:52:26 -0700140cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
141const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
142extern pthread_mutex_t gCamLock;
143volatile uint32_t gCamHal3LogLevel = 1;
144extern uint8_t gNumCameraSessions;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700145
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800146// Note that this doesn't support concurrent front and back camera b/35960155.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700147// The following Easel related variables must be protected by gHdrPlusClientLock.
Chien-Yu Chend77a5462017-06-02 18:00:38 -0700148std::unique_ptr<EaselManagerClient> gEaselManagerClient;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700149bool EaselManagerClientOpened = false; // If gEaselManagerClient is opened.
150std::unique_ptr<HdrPlusClient> gHdrPlusClient = nullptr;
151bool gHdrPlusClientOpening = false; // If HDR+ client is being opened.
Chien-Yu Chen77ccd022017-06-23 12:00:36 -0700152std::condition_variable gHdrPlusClientOpenCond; // Used to synchronize HDR+ client opening.
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700153bool gEaselProfilingEnabled = false; // If Easel profiling is enabled.
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700154bool gExposeEnableZslKey = false; // If HAL makes android.control.enableZsl available.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700155
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800156// If Easel is in bypass only mode. If true, Easel HDR+ won't be enabled.
157bool gEaselBypassOnly;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700158
Chien-Yu Chen77ccd022017-06-23 12:00:36 -0700159std::mutex gHdrPlusClientLock; // Protect above Easel related variables.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700160
Thierry Strudel3d639192016-09-09 11:52:26 -0700161
162const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
163 {"On", CAM_CDS_MODE_ON},
164 {"Off", CAM_CDS_MODE_OFF},
165 {"Auto",CAM_CDS_MODE_AUTO}
166};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700167const QCamera3HardwareInterface::QCameraMap<
168 camera_metadata_enum_android_video_hdr_mode_t,
169 cam_video_hdr_mode_t> QCamera3HardwareInterface::VIDEO_HDR_MODES_MAP[] = {
170 { QCAMERA3_VIDEO_HDR_MODE_OFF, CAM_VIDEO_HDR_MODE_OFF },
171 { QCAMERA3_VIDEO_HDR_MODE_ON, CAM_VIDEO_HDR_MODE_ON }
172};
173
Thierry Strudel54dc9782017-02-15 12:12:10 -0800174const QCamera3HardwareInterface::QCameraMap<
175 camera_metadata_enum_android_binning_correction_mode_t,
176 cam_binning_correction_mode_t> QCamera3HardwareInterface::BINNING_CORRECTION_MODES_MAP[] = {
177 { QCAMERA3_BINNING_CORRECTION_MODE_OFF, CAM_BINNING_CORRECTION_MODE_OFF },
178 { QCAMERA3_BINNING_CORRECTION_MODE_ON, CAM_BINNING_CORRECTION_MODE_ON }
179};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700180
181const QCamera3HardwareInterface::QCameraMap<
182 camera_metadata_enum_android_ir_mode_t,
183 cam_ir_mode_type_t> QCamera3HardwareInterface::IR_MODES_MAP [] = {
184 {QCAMERA3_IR_MODE_OFF, CAM_IR_MODE_OFF},
185 {QCAMERA3_IR_MODE_ON, CAM_IR_MODE_ON},
186 {QCAMERA3_IR_MODE_AUTO, CAM_IR_MODE_AUTO}
187};
Thierry Strudel3d639192016-09-09 11:52:26 -0700188
189const QCamera3HardwareInterface::QCameraMap<
190 camera_metadata_enum_android_control_effect_mode_t,
191 cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
192 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF },
193 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO },
194 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE },
195 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE },
196 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA },
197 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE },
198 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
199 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
200 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA }
201};
202
203const QCamera3HardwareInterface::QCameraMap<
204 camera_metadata_enum_android_control_awb_mode_t,
205 cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
206 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF },
207 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO },
208 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT },
209 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT },
210 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
211 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT },
212 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
213 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT },
214 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE }
215};
216
217const QCamera3HardwareInterface::QCameraMap<
218 camera_metadata_enum_android_control_scene_mode_t,
219 cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
220 { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY, CAM_SCENE_MODE_FACE_PRIORITY },
221 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION },
222 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT },
223 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE },
224 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT },
225 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
226 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE },
227 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH },
228 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW },
229 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET },
230 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE },
231 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS },
232 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS },
233 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY },
234 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT },
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800235 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE},
236 { ANDROID_CONTROL_SCENE_MODE_HDR, CAM_SCENE_MODE_HDR}
Thierry Strudel3d639192016-09-09 11:52:26 -0700237};
238
239const QCamera3HardwareInterface::QCameraMap<
240 camera_metadata_enum_android_control_af_mode_t,
241 cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
242 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_OFF },
243 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED },
244 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO },
245 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO },
246 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF },
247 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
248 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO }
249};
250
251const QCamera3HardwareInterface::QCameraMap<
252 camera_metadata_enum_android_color_correction_aberration_mode_t,
253 cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
254 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
255 CAM_COLOR_CORRECTION_ABERRATION_OFF },
256 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
257 CAM_COLOR_CORRECTION_ABERRATION_FAST },
258 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
259 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
260};
261
262const QCamera3HardwareInterface::QCameraMap<
263 camera_metadata_enum_android_control_ae_antibanding_mode_t,
264 cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
265 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF },
266 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
267 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
268 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
269};
270
271const QCamera3HardwareInterface::QCameraMap<
272 camera_metadata_enum_android_control_ae_mode_t,
273 cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
274 { ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF },
275 { ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF },
276 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO},
277 { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON },
Shuzhen Wang3d11a642017-08-18 09:57:29 -0700278 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO},
279 { (camera_metadata_enum_android_control_ae_mode_t)
280 NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH, CAM_FLASH_MODE_OFF }
Thierry Strudel3d639192016-09-09 11:52:26 -0700281};
282
283const QCamera3HardwareInterface::QCameraMap<
284 camera_metadata_enum_android_flash_mode_t,
285 cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
286 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF },
287 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
288 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH }
289};
290
291const QCamera3HardwareInterface::QCameraMap<
292 camera_metadata_enum_android_statistics_face_detect_mode_t,
293 cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
294 { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, CAM_FACE_DETECT_MODE_OFF },
295 { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE },
296 { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL, CAM_FACE_DETECT_MODE_FULL }
297};
298
299const QCamera3HardwareInterface::QCameraMap<
300 camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
301 cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
302 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
303 CAM_FOCUS_UNCALIBRATED },
304 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
305 CAM_FOCUS_APPROXIMATE },
306 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
307 CAM_FOCUS_CALIBRATED }
308};
309
310const QCamera3HardwareInterface::QCameraMap<
311 camera_metadata_enum_android_lens_state_t,
312 cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
313 { ANDROID_LENS_STATE_STATIONARY, CAM_AF_LENS_STATE_STATIONARY},
314 { ANDROID_LENS_STATE_MOVING, CAM_AF_LENS_STATE_MOVING}
315};
316
317const int32_t available_thumbnail_sizes[] = {0, 0,
318 176, 144,
319 240, 144,
320 256, 144,
321 240, 160,
322 256, 154,
323 240, 240,
324 320, 240};
325
326const QCamera3HardwareInterface::QCameraMap<
327 camera_metadata_enum_android_sensor_test_pattern_mode_t,
328 cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
329 { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, CAM_TEST_PATTERN_OFF },
330 { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, CAM_TEST_PATTERN_SOLID_COLOR },
331 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS, CAM_TEST_PATTERN_COLOR_BARS },
332 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
333 { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9, CAM_TEST_PATTERN_PN9 },
334 { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1, CAM_TEST_PATTERN_CUSTOM1},
335};
336
337/* Since there is no mapping for all the options some Android enum are not listed.
338 * Also, the order in this list is important because while mapping from HAL to Android it will
339 * traverse from lower to higher index which means that for HAL values that are map to different
340 * Android values, the traverse logic will select the first one found.
341 */
342const QCamera3HardwareInterface::QCameraMap<
343 camera_metadata_enum_android_sensor_reference_illuminant1_t,
344 cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
345 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
346 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
347 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
348 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
349 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
350 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
351 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
352 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
353 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
354 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
355 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
356 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
357 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
358 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
359 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
360 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
361};
362
363const QCamera3HardwareInterface::QCameraMap<
364 int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
365 { 60, CAM_HFR_MODE_60FPS},
366 { 90, CAM_HFR_MODE_90FPS},
367 { 120, CAM_HFR_MODE_120FPS},
368 { 150, CAM_HFR_MODE_150FPS},
369 { 180, CAM_HFR_MODE_180FPS},
370 { 210, CAM_HFR_MODE_210FPS},
371 { 240, CAM_HFR_MODE_240FPS},
372 { 480, CAM_HFR_MODE_480FPS},
373};
374
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700375const QCamera3HardwareInterface::QCameraMap<
376 qcamera3_ext_instant_aec_mode_t,
377 cam_aec_convergence_type> QCamera3HardwareInterface::INSTANT_AEC_MODES_MAP[] = {
378 { QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE, CAM_AEC_NORMAL_CONVERGENCE},
379 { QCAMERA3_INSTANT_AEC_AGGRESSIVE_CONVERGENCE, CAM_AEC_AGGRESSIVE_CONVERGENCE},
380 { QCAMERA3_INSTANT_AEC_FAST_CONVERGENCE, CAM_AEC_FAST_CONVERGENCE},
381};
Thierry Strudel54dc9782017-02-15 12:12:10 -0800382
383const QCamera3HardwareInterface::QCameraMap<
384 qcamera3_ext_exposure_meter_mode_t,
385 cam_auto_exposure_mode_type> QCamera3HardwareInterface::AEC_MODES_MAP[] = {
386 { QCAMERA3_EXP_METER_MODE_FRAME_AVERAGE, CAM_AEC_MODE_FRAME_AVERAGE },
387 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED, CAM_AEC_MODE_CENTER_WEIGHTED },
388 { QCAMERA3_EXP_METER_MODE_SPOT_METERING, CAM_AEC_MODE_SPOT_METERING },
389 { QCAMERA3_EXP_METER_MODE_SMART_METERING, CAM_AEC_MODE_SMART_METERING },
390 { QCAMERA3_EXP_METER_MODE_USER_METERING, CAM_AEC_MODE_USER_METERING },
391 { QCAMERA3_EXP_METER_MODE_SPOT_METERING_ADV, CAM_AEC_MODE_SPOT_METERING_ADV },
392 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED_ADV, CAM_AEC_MODE_CENTER_WEIGHTED_ADV },
393};
394
395const QCamera3HardwareInterface::QCameraMap<
396 qcamera3_ext_iso_mode_t,
397 cam_iso_mode_type> QCamera3HardwareInterface::ISO_MODES_MAP[] = {
398 { QCAMERA3_ISO_MODE_AUTO, CAM_ISO_MODE_AUTO },
399 { QCAMERA3_ISO_MODE_DEBLUR, CAM_ISO_MODE_DEBLUR },
400 { QCAMERA3_ISO_MODE_100, CAM_ISO_MODE_100 },
401 { QCAMERA3_ISO_MODE_200, CAM_ISO_MODE_200 },
402 { QCAMERA3_ISO_MODE_400, CAM_ISO_MODE_400 },
403 { QCAMERA3_ISO_MODE_800, CAM_ISO_MODE_800 },
404 { QCAMERA3_ISO_MODE_1600, CAM_ISO_MODE_1600 },
405 { QCAMERA3_ISO_MODE_3200, CAM_ISO_MODE_3200 },
406};
407
Thierry Strudel3d639192016-09-09 11:52:26 -0700408camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
409 .initialize = QCamera3HardwareInterface::initialize,
410 .configure_streams = QCamera3HardwareInterface::configure_streams,
411 .register_stream_buffers = NULL,
412 .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
413 .process_capture_request = QCamera3HardwareInterface::process_capture_request,
414 .get_metadata_vendor_tag_ops = NULL,
415 .dump = QCamera3HardwareInterface::dump,
416 .flush = QCamera3HardwareInterface::flush,
417 .reserved = {0},
418};
419
420// initialise to some default value
421uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
422
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700423static inline void logEaselEvent(const char *tag, const char *event) {
424 if (CC_UNLIKELY(gEaselProfilingEnabled)) {
425 struct timespec ts = {};
426 static int64_t kMsPerSec = 1000;
427 static int64_t kNsPerMs = 1000000;
428 status_t res = clock_gettime(CLOCK_BOOTTIME, &ts);
429 if (res != OK) {
430 ALOGE("[%s] Failed to get boot time for <%s>.", tag, event);
431 } else {
432 int64_t now = static_cast<int64_t>(ts.tv_sec) * kMsPerSec + ts.tv_nsec / kNsPerMs;
433 ALOGI("[%s] %s at %" PRId64 " ms", tag, event, now);
434 }
435 }
436}
437
Thierry Strudel3d639192016-09-09 11:52:26 -0700438/*===========================================================================
439 * FUNCTION : QCamera3HardwareInterface
440 *
441 * DESCRIPTION: constructor of QCamera3HardwareInterface
442 *
443 * PARAMETERS :
444 * @cameraId : camera ID
445 *
446 * RETURN : none
447 *==========================================================================*/
448QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
449 const camera_module_callbacks_t *callbacks)
450 : mCameraId(cameraId),
451 mCameraHandle(NULL),
452 mCameraInitialized(false),
453 mCallbackOps(NULL),
454 mMetadataChannel(NULL),
455 mPictureChannel(NULL),
456 mRawChannel(NULL),
457 mSupportChannel(NULL),
458 mAnalysisChannel(NULL),
459 mRawDumpChannel(NULL),
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700460 mHdrPlusRawSrcChannel(NULL),
Thierry Strudel3d639192016-09-09 11:52:26 -0700461 mDummyBatchChannel(NULL),
Emilian Peev7650c122017-01-19 08:24:33 -0800462 mDepthChannel(NULL),
Emilian Peev656e4fa2017-06-02 16:47:04 +0100463 mDepthCloudMode(CAM_PD_DATA_SKIP),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800464 mPerfLockMgr(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700465 mChannelHandle(0),
466 mFirstConfiguration(true),
467 mFlush(false),
468 mFlushPerf(false),
469 mParamHeap(NULL),
470 mParameters(NULL),
471 mPrevParameters(NULL),
472 m_bIsVideo(false),
473 m_bIs4KVideo(false),
474 m_bEisSupportedSize(false),
475 m_bEisEnable(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800476 m_bEis3PropertyEnabled(false),
Binhao Lin09245482017-08-31 18:25:29 -0700477 m_bAVTimerEnabled(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700478 m_MobicatMask(0),
Chien-Yu Chen3f303522017-05-19 15:21:45 -0700479 mShutterDispatcher(this),
480 mOutputBufferDispatcher(this),
Thierry Strudel3d639192016-09-09 11:52:26 -0700481 mMinProcessedFrameDuration(0),
482 mMinJpegFrameDuration(0),
483 mMinRawFrameDuration(0),
Emilian Peev30522a12017-08-03 14:36:33 +0100484 mExpectedFrameDuration(0),
485 mExpectedInflightDuration(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700486 mMetaFrameCount(0U),
487 mUpdateDebugLevel(false),
488 mCallbacks(callbacks),
489 mCaptureIntent(0),
490 mCacMode(0),
Samuel Ha68ba5172016-12-15 18:41:12 -0800491 /* DevCamDebug metadata internal m control*/
492 mDevCamDebugMetaEnable(0),
493 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -0700494 mBatchSize(0),
495 mToBeQueuedVidBufs(0),
496 mHFRVideoFps(DEFAULT_VIDEO_FPS),
497 mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800498 mStreamConfig(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800499 mCommon(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700500 mFirstFrameNumberInBatch(0),
501 mNeedSensorRestart(false),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800502 mPreviewStarted(false),
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700503 mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
504 mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
Emilian Peev0f3c3162017-03-15 12:57:46 +0000505 mPDSupported(false),
506 mPDIndex(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700507 mInstantAEC(false),
508 mResetInstantAEC(false),
509 mInstantAECSettledFrameNumber(0),
510 mAecSkipDisplayFrameBound(0),
511 mInstantAecFrameIdxCount(0),
Chien-Yu Chenbc730232017-07-12 14:49:55 -0700512 mLastRequestedLensShadingMapMode(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800513 mCurrFeatureState(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700514 mLdafCalibExist(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700515 mLastCustIntentFrmNum(-1),
Shuzhen Wang3c077d72017-04-20 22:48:59 -0700516 mFirstMetadataCallback(true),
Thierry Strudel3d639192016-09-09 11:52:26 -0700517 mState(CLOSED),
518 mIsDeviceLinked(false),
519 mIsMainCamera(true),
520 mLinkedCameraId(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700521 m_pDualCamCmdHeap(NULL),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800522 m_pDualCamCmdPtr(NULL),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800523 mHdrPlusModeEnabled(false),
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700524 mZslEnabled(false),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800525 mIsApInputUsedForHdrPlus(false),
526 mFirstPreviewIntentSeen(false),
Shuzhen Wang181c57b2017-07-21 11:39:44 -0700527 m_bSensorHDREnabled(false),
528 mAfTrigger()
Thierry Strudel3d639192016-09-09 11:52:26 -0700529{
530 getLogLevel();
Thierry Strudel3d639192016-09-09 11:52:26 -0700531 mCommon.init(gCamCapability[cameraId]);
532 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700533#ifndef USE_HAL_3_3
534 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
535#else
Thierry Strudel3d639192016-09-09 11:52:26 -0700536 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700537#endif
Thierry Strudel3d639192016-09-09 11:52:26 -0700538 mCameraDevice.common.close = close_camera_device;
539 mCameraDevice.ops = &mCameraOps;
540 mCameraDevice.priv = this;
541 gCamCapability[cameraId]->version = CAM_HAL_V3;
542 // TODO: hardcode for now until mctl add support for min_num_pp_bufs
543 //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
544 gCamCapability[cameraId]->min_num_pp_bufs = 3;
545
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800546 PTHREAD_COND_INIT(&mBuffersCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700547
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800548 PTHREAD_COND_INIT(&mRequestCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700549 mPendingLiveRequest = 0;
550 mCurrentRequestId = -1;
551 pthread_mutex_init(&mMutex, NULL);
552
553 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
554 mDefaultMetadata[i] = NULL;
555
556 // Getting system props of different kinds
557 char prop[PROPERTY_VALUE_MAX];
558 memset(prop, 0, sizeof(prop));
559 property_get("persist.camera.raw.dump", prop, "0");
560 mEnableRawDump = atoi(prop);
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800561 property_get("persist.camera.hal3.force.hdr", prop, "0");
562 mForceHdrSnapshot = atoi(prop);
563
Thierry Strudel3d639192016-09-09 11:52:26 -0700564 if (mEnableRawDump)
565 LOGD("Raw dump from Camera HAL enabled");
566
567 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
568 memset(mLdafCalib, 0, sizeof(mLdafCalib));
569
Arnd Geis082a4d72017-08-24 10:33:07 -0700570 memset(mEaselFwVersion, 0, sizeof(mEaselFwVersion));
571
Thierry Strudel3d639192016-09-09 11:52:26 -0700572 memset(prop, 0, sizeof(prop));
573 property_get("persist.camera.tnr.preview", prop, "0");
574 m_bTnrPreview = (uint8_t)atoi(prop);
575
576 memset(prop, 0, sizeof(prop));
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800577 property_get("persist.camera.swtnr.preview", prop, "1");
578 m_bSwTnrPreview = (uint8_t)atoi(prop);
579
580 memset(prop, 0, sizeof(prop));
Binhao Lincdb362a2017-04-20 13:31:54 -0700581 property_get("persist.camera.tnr.video", prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -0700582 m_bTnrVideo = (uint8_t)atoi(prop);
583
584 memset(prop, 0, sizeof(prop));
585 property_get("persist.camera.avtimer.debug", prop, "0");
586 m_debug_avtimer = (uint8_t)atoi(prop);
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800587 LOGI("AV timer enabled: %d", m_debug_avtimer);
Thierry Strudel3d639192016-09-09 11:52:26 -0700588
Thierry Strudel54dc9782017-02-15 12:12:10 -0800589 memset(prop, 0, sizeof(prop));
590 property_get("persist.camera.cacmode.disable", prop, "0");
591 m_cacModeDisabled = (uint8_t)atoi(prop);
592
Shuzhen Wangb57ec912017-07-31 13:24:27 -0700593 m_bForceInfinityAf = property_get_bool("persist.camera.af.infinity", 0);
Shuzhen Wang8c276ef2017-08-09 11:12:20 -0700594 m_MobicatMask = (uint8_t)property_get_int32("persist.camera.mobicat", 0);
Shuzhen Wangb57ec912017-07-31 13:24:27 -0700595
Thierry Strudel3d639192016-09-09 11:52:26 -0700596 //Load and read GPU library.
597 lib_surface_utils = NULL;
598 LINK_get_surface_pixel_alignment = NULL;
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700599 mSurfaceStridePadding = CAM_PAD_TO_64;
600#ifdef CHECK_GPU_PIXEL_ALIGNMENT
Thierry Strudel3d639192016-09-09 11:52:26 -0700601 lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
602 if (lib_surface_utils) {
603 *(void **)&LINK_get_surface_pixel_alignment =
604 dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
605 if (LINK_get_surface_pixel_alignment) {
606 mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
607 }
608 dlclose(lib_surface_utils);
609 }
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700610#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +0000611 mPDIndex = getPDStatIndex(gCamCapability[cameraId]);
612 mPDSupported = (0 <= mPDIndex) ? true : false;
613
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700614 m60HzZone = is60HzZone();
Thierry Strudel3d639192016-09-09 11:52:26 -0700615}
616
617/*===========================================================================
618 * FUNCTION : ~QCamera3HardwareInterface
619 *
620 * DESCRIPTION: destructor of QCamera3HardwareInterface
621 *
622 * PARAMETERS : none
623 *
624 * RETURN : none
625 *==========================================================================*/
626QCamera3HardwareInterface::~QCamera3HardwareInterface()
627{
628 LOGD("E");
629
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800630 int32_t rc = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -0700631
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800632 // Disable power hint and enable the perf lock for close camera
633 mPerfLockMgr.releasePerfLock(PERF_LOCK_POWERHINT_ENCODE);
634 mPerfLockMgr.acquirePerfLock(PERF_LOCK_CLOSE_CAMERA);
635
636 // unlink of dualcam during close camera
637 if (mIsDeviceLinked) {
638 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
639 &m_pDualCamCmdPtr->bundle_info;
640 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
641 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
642 pthread_mutex_lock(&gCamLock);
643
644 if (mIsMainCamera == 1) {
645 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
646 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
647 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
648 // related session id should be session id of linked session
649 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
650 } else {
651 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
652 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
653 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
654 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
655 }
Thierry Strudel2896d122017-02-23 19:18:03 -0800656 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800657 pthread_mutex_unlock(&gCamLock);
658
659 rc = mCameraHandle->ops->set_dual_cam_cmd(
660 mCameraHandle->camera_handle);
661 if (rc < 0) {
662 LOGE("Dualcam: Unlink failed, but still proceed to close");
663 }
664 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700665
666 /* We need to stop all streams before deleting any stream */
667 if (mRawDumpChannel) {
668 mRawDumpChannel->stop();
669 }
670
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700671 if (mHdrPlusRawSrcChannel) {
672 mHdrPlusRawSrcChannel->stop();
673 }
674
Thierry Strudel3d639192016-09-09 11:52:26 -0700675 // NOTE: 'camera3_stream_t *' objects are already freed at
676 // this stage by the framework
677 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
678 it != mStreamInfo.end(); it++) {
679 QCamera3ProcessingChannel *channel = (*it)->channel;
680 if (channel) {
681 channel->stop();
682 }
683 }
684 if (mSupportChannel)
685 mSupportChannel->stop();
686
687 if (mAnalysisChannel) {
688 mAnalysisChannel->stop();
689 }
690 if (mMetadataChannel) {
691 mMetadataChannel->stop();
692 }
693 if (mChannelHandle) {
694 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -0700695 mChannelHandle, /*stop_immediately*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -0700696 LOGD("stopping channel %d", mChannelHandle);
697 }
698
699 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
700 it != mStreamInfo.end(); it++) {
701 QCamera3ProcessingChannel *channel = (*it)->channel;
702 if (channel)
703 delete channel;
704 free (*it);
705 }
706 if (mSupportChannel) {
707 delete mSupportChannel;
708 mSupportChannel = NULL;
709 }
710
711 if (mAnalysisChannel) {
712 delete mAnalysisChannel;
713 mAnalysisChannel = NULL;
714 }
715 if (mRawDumpChannel) {
716 delete mRawDumpChannel;
717 mRawDumpChannel = NULL;
718 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700719 if (mHdrPlusRawSrcChannel) {
720 delete mHdrPlusRawSrcChannel;
721 mHdrPlusRawSrcChannel = NULL;
722 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700723 if (mDummyBatchChannel) {
724 delete mDummyBatchChannel;
725 mDummyBatchChannel = NULL;
726 }
727
728 mPictureChannel = NULL;
Emilian Peev7650c122017-01-19 08:24:33 -0800729 mDepthChannel = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -0700730
731 if (mMetadataChannel) {
732 delete mMetadataChannel;
733 mMetadataChannel = NULL;
734 }
735
736 /* Clean up all channels */
737 if (mCameraInitialized) {
738 if(!mFirstConfiguration){
739 //send the last unconfigure
740 cam_stream_size_info_t stream_config_info;
741 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
742 stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
743 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -0800744 m_bIs4KVideo ? 0 :
Jason Leea46ad5e2017-07-07 15:20:56 -0700745 m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700746 clear_metadata_buffer(mParameters);
Thierry Strudel3d639192016-09-09 11:52:26 -0700747 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
748 stream_config_info);
749 int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
750 if (rc < 0) {
751 LOGE("set_parms failed for unconfigure");
752 }
753 }
754 deinitParameters();
755 }
756
757 if (mChannelHandle) {
758 mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
759 mChannelHandle);
760 LOGH("deleting channel %d", mChannelHandle);
761 mChannelHandle = 0;
762 }
763
764 if (mState != CLOSED)
765 closeCamera();
766
767 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
768 req.mPendingBufferList.clear();
769 }
770 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -0700771 for (pendingRequestIterator i = mPendingRequestsList.begin();
772 i != mPendingRequestsList.end();) {
773 i = erasePendingRequest(i);
774 }
775 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
776 if (mDefaultMetadata[i])
777 free_camera_metadata(mDefaultMetadata[i]);
778
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800779 mPerfLockMgr.releasePerfLock(PERF_LOCK_CLOSE_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700780
781 pthread_cond_destroy(&mRequestCond);
782
783 pthread_cond_destroy(&mBuffersCond);
784
785 pthread_mutex_destroy(&mMutex);
786 LOGD("X");
787}
788
789/*===========================================================================
790 * FUNCTION : erasePendingRequest
791 *
792 * DESCRIPTION: function to erase a desired pending request after freeing any
793 * allocated memory
794 *
795 * PARAMETERS :
796 * @i : iterator pointing to pending request to be erased
797 *
798 * RETURN : iterator pointing to the next request
799 *==========================================================================*/
800QCamera3HardwareInterface::pendingRequestIterator
801 QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
802{
803 if (i->input_buffer != NULL) {
804 free(i->input_buffer);
805 i->input_buffer = NULL;
806 }
807 if (i->settings != NULL)
808 free_camera_metadata((camera_metadata_t*)i->settings);
Emilian Peev30522a12017-08-03 14:36:33 +0100809
810 mExpectedInflightDuration -= i->expectedFrameDuration;
811 if (mExpectedInflightDuration < 0) {
812 LOGE("Negative expected in-flight duration!");
813 mExpectedInflightDuration = 0;
814 }
815
Thierry Strudel3d639192016-09-09 11:52:26 -0700816 return mPendingRequestsList.erase(i);
817}
818
819/*===========================================================================
820 * FUNCTION : camEvtHandle
821 *
822 * DESCRIPTION: Function registered to mm-camera-interface to handle events
823 *
824 * PARAMETERS :
825 * @camera_handle : interface layer camera handle
826 * @evt : ptr to event
827 * @user_data : user data ptr
828 *
829 * RETURN : none
830 *==========================================================================*/
831void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
832 mm_camera_event_t *evt,
833 void *user_data)
834{
835 QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
836 if (obj && evt) {
837 switch(evt->server_event_type) {
838 case CAM_EVENT_TYPE_DAEMON_DIED:
839 pthread_mutex_lock(&obj->mMutex);
840 obj->mState = ERROR;
841 pthread_mutex_unlock(&obj->mMutex);
842 LOGE("Fatal, camera daemon died");
843 break;
844
845 case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
846 LOGD("HAL got request pull from Daemon");
847 pthread_mutex_lock(&obj->mMutex);
848 obj->mWokenUpByDaemon = true;
849 obj->unblockRequestIfNecessary();
850 pthread_mutex_unlock(&obj->mMutex);
851 break;
852
853 default:
854 LOGW("Warning: Unhandled event %d",
855 evt->server_event_type);
856 break;
857 }
858 } else {
859 LOGE("NULL user_data/evt");
860 }
861}
862
863/*===========================================================================
864 * FUNCTION : openCamera
865 *
866 * DESCRIPTION: open camera
867 *
868 * PARAMETERS :
869 * @hw_device : double ptr for camera device struct
870 *
871 * RETURN : int32_t type of status
872 * NO_ERROR -- success
873 * none-zero failure code
874 *==========================================================================*/
875int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
876{
877 int rc = 0;
878 if (mState != CLOSED) {
879 *hw_device = NULL;
880 return PERMISSION_DENIED;
881 }
882
Chien-Yu Chene96475e2017-04-11 11:53:26 -0700883 logEaselEvent("EASEL_STARTUP_LATENCY", "Camera Open");
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800884 mPerfLockMgr.acquirePerfLock(PERF_LOCK_OPEN_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700885 LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
886 mCameraId);
887
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700888 if (mCameraHandle) {
889 LOGE("Failure: Camera already opened");
890 return ALREADY_EXISTS;
891 }
892
893 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -0700894 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chend77a5462017-06-02 18:00:38 -0700895 if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice()) {
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700896 logEaselEvent("EASEL_STARTUP_LATENCY", "Resume");
Chien-Yu Chen90f1fc12017-07-14 14:31:53 -0700897 rc = gEaselManagerClient->resume(this);
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700898 if (rc != 0) {
899 ALOGE("%s: Resuming Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
900 return rc;
901 }
902 }
903 }
904
Thierry Strudel3d639192016-09-09 11:52:26 -0700905 rc = openCamera();
906 if (rc == 0) {
907 *hw_device = &mCameraDevice.common;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800908 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -0700909 *hw_device = NULL;
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700910
911 // Suspend Easel because opening camera failed.
912 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -0700913 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chend77a5462017-06-02 18:00:38 -0700914 if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice()) {
915 status_t suspendErr = gEaselManagerClient->suspend();
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700916 if (suspendErr != 0) {
917 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__,
918 strerror(-suspendErr), suspendErr);
919 }
920 }
921 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800922 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700923
Thierry Strudel3d639192016-09-09 11:52:26 -0700924 LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
925 mCameraId, rc);
926
927 if (rc == NO_ERROR) {
928 mState = OPENED;
929 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800930
Thierry Strudel3d639192016-09-09 11:52:26 -0700931 return rc;
932}
933
934/*===========================================================================
935 * FUNCTION : openCamera
936 *
937 * DESCRIPTION: open camera
938 *
939 * PARAMETERS : none
940 *
941 * RETURN : int32_t type of status
942 * NO_ERROR -- success
943 * none-zero failure code
944 *==========================================================================*/
945int QCamera3HardwareInterface::openCamera()
946{
947 int rc = 0;
948 char value[PROPERTY_VALUE_MAX];
949
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800950 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_OPENCAMERA);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800951
Thierry Strudel3d639192016-09-09 11:52:26 -0700952 rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
953 if (rc < 0) {
954 LOGE("Failed to reserve flash for camera id: %d",
955 mCameraId);
956 return UNKNOWN_ERROR;
957 }
958
959 rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
960 if (rc) {
961 LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
962 return rc;
963 }
964
965 if (!mCameraHandle) {
966 LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
967 return -ENODEV;
968 }
969
970 rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
971 camEvtHandle, (void *)this);
972
973 if (rc < 0) {
974 LOGE("Error, failed to register event callback");
975 /* Not closing camera here since it is already handled in destructor */
976 return FAILED_TRANSACTION;
977 }
978
979 mExifParams.debug_params =
980 (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
981 if (mExifParams.debug_params) {
982 memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
983 } else {
984 LOGE("Out of Memory. Allocation failed for 3A debug exif params");
985 return NO_MEMORY;
986 }
987 mFirstConfiguration = true;
988
989 //Notify display HAL that a camera session is active.
990 //But avoid calling the same during bootup because camera service might open/close
991 //cameras at boot time during its initialization and display service will also internally
992 //wait for camera service to initialize first while calling this display API, resulting in a
993 //deadlock situation. Since boot time camera open/close calls are made only to fetch
994 //capabilities, no need of this display bw optimization.
995 //Use "service.bootanim.exit" property to know boot status.
996 property_get("service.bootanim.exit", value, "0");
997 if (atoi(value) == 1) {
998 pthread_mutex_lock(&gCamLock);
999 if (gNumCameraSessions++ == 0) {
1000 setCameraLaunchStatus(true);
1001 }
1002 pthread_mutex_unlock(&gCamLock);
1003 }
1004
1005 //fill the session id needed while linking dual cam
1006 pthread_mutex_lock(&gCamLock);
1007 rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
1008 &sessionId[mCameraId]);
1009 pthread_mutex_unlock(&gCamLock);
1010
1011 if (rc < 0) {
1012 LOGE("Error, failed to get sessiion id");
1013 return UNKNOWN_ERROR;
1014 } else {
1015 //Allocate related cam sync buffer
1016 //this is needed for the payload that goes along with bundling cmd for related
1017 //camera use cases
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001018 m_pDualCamCmdHeap = new QCamera3HeapMemory(1);
1019 rc = m_pDualCamCmdHeap->allocate(sizeof(cam_dual_camera_cmd_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07001020 if(rc != OK) {
1021 rc = NO_MEMORY;
1022 LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
1023 return NO_MEMORY;
1024 }
1025
1026 //Map memory for related cam sync buffer
1027 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001028 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF,
1029 m_pDualCamCmdHeap->getFd(0),
1030 sizeof(cam_dual_camera_cmd_info_t),
1031 m_pDualCamCmdHeap->getPtr(0));
Thierry Strudel3d639192016-09-09 11:52:26 -07001032 if(rc < 0) {
1033 LOGE("Dualcam: failed to map Related cam sync buffer");
1034 rc = FAILED_TRANSACTION;
1035 return NO_MEMORY;
1036 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001037 m_pDualCamCmdPtr =
1038 (cam_dual_camera_cmd_info_t*) DATA_PTR(m_pDualCamCmdHeap,0);
Thierry Strudel3d639192016-09-09 11:52:26 -07001039 }
1040
1041 LOGH("mCameraId=%d",mCameraId);
1042
1043 return NO_ERROR;
1044}
1045
1046/*===========================================================================
1047 * FUNCTION : closeCamera
1048 *
1049 * DESCRIPTION: close camera
1050 *
1051 * PARAMETERS : none
1052 *
1053 * RETURN : int32_t type of status
1054 * NO_ERROR -- success
1055 * none-zero failure code
1056 *==========================================================================*/
1057int QCamera3HardwareInterface::closeCamera()
1058{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001059 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CLOSECAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -07001060 int rc = NO_ERROR;
1061 char value[PROPERTY_VALUE_MAX];
1062
1063 LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
1064 mCameraId);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001065
1066 // unmap memory for related cam sync buffer
1067 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001068 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001069 if (NULL != m_pDualCamCmdHeap) {
1070 m_pDualCamCmdHeap->deallocate();
1071 delete m_pDualCamCmdHeap;
1072 m_pDualCamCmdHeap = NULL;
1073 m_pDualCamCmdPtr = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001074 }
1075
Thierry Strudel3d639192016-09-09 11:52:26 -07001076 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
1077 mCameraHandle = NULL;
1078
1079 //reset session id to some invalid id
1080 pthread_mutex_lock(&gCamLock);
1081 sessionId[mCameraId] = 0xDEADBEEF;
1082 pthread_mutex_unlock(&gCamLock);
1083
1084 //Notify display HAL that there is no active camera session
1085 //but avoid calling the same during bootup. Refer to openCamera
1086 //for more details.
1087 property_get("service.bootanim.exit", value, "0");
1088 if (atoi(value) == 1) {
1089 pthread_mutex_lock(&gCamLock);
1090 if (--gNumCameraSessions == 0) {
1091 setCameraLaunchStatus(false);
1092 }
1093 pthread_mutex_unlock(&gCamLock);
1094 }
1095
Thierry Strudel3d639192016-09-09 11:52:26 -07001096 if (mExifParams.debug_params) {
1097 free(mExifParams.debug_params);
1098 mExifParams.debug_params = NULL;
1099 }
1100 if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
1101 LOGW("Failed to release flash for camera id: %d",
1102 mCameraId);
1103 }
1104 mState = CLOSED;
1105 LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
1106 mCameraId, rc);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001107
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001108 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07001109 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
1110 finishHdrPlusClientOpeningLocked(l);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001111 if (gHdrPlusClient != nullptr) {
1112 // Disable HDR+ mode.
1113 disableHdrPlusModeLocked();
1114 // Disconnect Easel if it's connected.
Chien-Yu Chend77a5462017-06-02 18:00:38 -07001115 gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001116 gHdrPlusClient = nullptr;
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001117 }
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -07001118
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001119 if (EaselManagerClientOpened) {
Chien-Yu Chend77a5462017-06-02 18:00:38 -07001120 rc = gEaselManagerClient->stopMipi(mCameraId);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001121 if (rc != 0) {
1122 ALOGE("%s: Stopping MIPI failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1123 }
1124
Chien-Yu Chend77a5462017-06-02 18:00:38 -07001125 rc = gEaselManagerClient->suspend();
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001126 if (rc != 0) {
1127 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1128 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001129 }
1130 }
1131
Thierry Strudel3d639192016-09-09 11:52:26 -07001132 return rc;
1133}
1134
1135/*===========================================================================
1136 * FUNCTION : initialize
1137 *
1138 * DESCRIPTION: Initialize frameworks callback functions
1139 *
1140 * PARAMETERS :
1141 * @callback_ops : callback function to frameworks
1142 *
1143 * RETURN :
1144 *
1145 *==========================================================================*/
1146int QCamera3HardwareInterface::initialize(
1147 const struct camera3_callback_ops *callback_ops)
1148{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001149 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_INIT);
Thierry Strudel3d639192016-09-09 11:52:26 -07001150 int rc;
1151
1152 LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
1153 pthread_mutex_lock(&mMutex);
1154
1155 // Validate current state
1156 switch (mState) {
1157 case OPENED:
1158 /* valid state */
1159 break;
1160 default:
1161 LOGE("Invalid state %d", mState);
1162 rc = -ENODEV;
1163 goto err1;
1164 }
1165
1166 rc = initParameters();
1167 if (rc < 0) {
1168 LOGE("initParamters failed %d", rc);
1169 goto err1;
1170 }
1171 mCallbackOps = callback_ops;
1172
1173 mChannelHandle = mCameraHandle->ops->add_channel(
1174 mCameraHandle->camera_handle, NULL, NULL, this);
1175 if (mChannelHandle == 0) {
1176 LOGE("add_channel failed");
1177 rc = -ENOMEM;
1178 pthread_mutex_unlock(&mMutex);
1179 return rc;
1180 }
1181
1182 pthread_mutex_unlock(&mMutex);
1183 mCameraInitialized = true;
1184 mState = INITIALIZED;
1185 LOGI("X");
1186 return 0;
1187
1188err1:
1189 pthread_mutex_unlock(&mMutex);
1190 return rc;
1191}
1192
1193/*===========================================================================
1194 * FUNCTION : validateStreamDimensions
1195 *
1196 * DESCRIPTION: Check if the configuration requested are those advertised
1197 *
1198 * PARAMETERS :
1199 * @stream_list : streams to be configured
1200 *
1201 * RETURN :
1202 *
1203 *==========================================================================*/
1204int QCamera3HardwareInterface::validateStreamDimensions(
1205 camera3_stream_configuration_t *streamList)
1206{
1207 int rc = NO_ERROR;
1208 size_t count = 0;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001209 uint32_t depthWidth = 0;
1210 uint32_t depthHeight = 0;
1211 if (mPDSupported) {
1212 depthWidth = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].width;
1213 depthHeight = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].height;
1214 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001215
1216 camera3_stream_t *inputStream = NULL;
1217 /*
1218 * Loop through all streams to find input stream if it exists*
1219 */
1220 for (size_t i = 0; i< streamList->num_streams; i++) {
1221 if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
1222 if (inputStream != NULL) {
1223 LOGE("Error, Multiple input streams requested");
1224 return -EINVAL;
1225 }
1226 inputStream = streamList->streams[i];
1227 }
1228 }
1229 /*
1230 * Loop through all streams requested in configuration
1231 * Check if unsupported sizes have been requested on any of them
1232 */
1233 for (size_t j = 0; j < streamList->num_streams; j++) {
1234 bool sizeFound = false;
1235 camera3_stream_t *newStream = streamList->streams[j];
1236
1237 uint32_t rotatedHeight = newStream->height;
1238 uint32_t rotatedWidth = newStream->width;
1239 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1240 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1241 rotatedHeight = newStream->width;
1242 rotatedWidth = newStream->height;
1243 }
1244
1245 /*
1246 * Sizes are different for each type of stream format check against
1247 * appropriate table.
1248 */
1249 switch (newStream->format) {
1250 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
1251 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
1252 case HAL_PIXEL_FORMAT_RAW10:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001253 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1254 (ANDROID_SCALER_AVAILABLE_FORMATS_RAW16 == newStream->format) &&
1255 mPDSupported) {
1256 if ((depthWidth == newStream->width) &&
1257 (depthHeight == newStream->height)) {
1258 sizeFound = true;
1259 }
1260 break;
1261 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001262 count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
1263 for (size_t i = 0; i < count; i++) {
1264 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
1265 (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
1266 sizeFound = true;
1267 break;
1268 }
1269 }
1270 break;
1271 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001272 if ((newStream->data_space == HAL_DATASPACE_DEPTH) &&
1273 mPDSupported) {
Emilian Peev7650c122017-01-19 08:24:33 -08001274 //As per spec. depth cloud should be sample count / 16
Emilian Peev0f3c3162017-03-15 12:57:46 +00001275 uint32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
Emilian Peev7650c122017-01-19 08:24:33 -08001276 if ((depthSamplesCount == newStream->width) &&
1277 (1 == newStream->height)) {
1278 sizeFound = true;
1279 }
1280 break;
1281 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001282 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
1283 /* Verify set size against generated sizes table */
1284 for (size_t i = 0; i < count; i++) {
1285 if (((int32_t)rotatedWidth ==
1286 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1287 ((int32_t)rotatedHeight ==
1288 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1289 sizeFound = true;
1290 break;
1291 }
1292 }
1293 break;
1294 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1295 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1296 default:
1297 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1298 || newStream->stream_type == CAMERA3_STREAM_INPUT
1299 || IS_USAGE_ZSL(newStream->usage)) {
1300 if (((int32_t)rotatedWidth ==
1301 gCamCapability[mCameraId]->active_array_size.width) &&
1302 ((int32_t)rotatedHeight ==
1303 gCamCapability[mCameraId]->active_array_size.height)) {
1304 sizeFound = true;
1305 break;
1306 }
1307 /* We could potentially break here to enforce ZSL stream
1308 * set from frameworks always is full active array size
1309 * but it is not clear from the spc if framework will always
1310 * follow that, also we have logic to override to full array
1311 * size, so keeping the logic lenient at the moment
1312 */
1313 }
1314 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
1315 MAX_SIZES_CNT);
1316 for (size_t i = 0; i < count; i++) {
1317 if (((int32_t)rotatedWidth ==
1318 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1319 ((int32_t)rotatedHeight ==
1320 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1321 sizeFound = true;
1322 break;
1323 }
1324 }
1325 break;
1326 } /* End of switch(newStream->format) */
1327
1328 /* We error out even if a single stream has unsupported size set */
1329 if (!sizeFound) {
1330 LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1331 rotatedWidth, rotatedHeight, newStream->format,
1332 gCamCapability[mCameraId]->active_array_size.width,
1333 gCamCapability[mCameraId]->active_array_size.height);
1334 rc = -EINVAL;
1335 break;
1336 }
1337 } /* End of for each stream */
1338 return rc;
1339}
1340
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001341/*===========================================================================
1342 * FUNCTION : validateUsageFlags
1343 *
1344 * DESCRIPTION: Check if the configuration usage flags map to same internal format.
1345 *
1346 * PARAMETERS :
1347 * @stream_list : streams to be configured
1348 *
1349 * RETURN :
1350 * NO_ERROR if the usage flags are supported
1351 * error code if usage flags are not supported
1352 *
1353 *==========================================================================*/
1354int QCamera3HardwareInterface::validateUsageFlags(
1355 const camera3_stream_configuration_t* streamList)
1356{
1357 for (size_t j = 0; j < streamList->num_streams; j++) {
1358 const camera3_stream_t *newStream = streamList->streams[j];
1359
1360 if (newStream->format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
1361 (newStream->stream_type != CAMERA3_STREAM_OUTPUT &&
1362 newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL)) {
1363 continue;
1364 }
1365
Jason Leec4cf5032017-05-24 18:31:41 -07001366 // Here we only care whether it's EIS3 or not
1367 char is_type_value[PROPERTY_VALUE_MAX];
1368 property_get("persist.camera.is_type", is_type_value, "4");
1369 cam_is_type_t isType = atoi(is_type_value) == IS_TYPE_EIS_3_0 ? IS_TYPE_EIS_3_0 : IS_TYPE_NONE;
1370 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1371 mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
1372 isType = IS_TYPE_NONE;
1373
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001374 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1375 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1376 bool isZSL = IS_USAGE_ZSL(newStream->usage);
1377 bool forcePreviewUBWC = true;
1378 if (isVideo && !QCameraCommon::isVideoUBWCEnabled()) {
1379 forcePreviewUBWC = false;
1380 }
1381 cam_format_t videoFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001382 CAM_STREAM_TYPE_VIDEO, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001383 cam_format_t previewFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001384 CAM_STREAM_TYPE_PREVIEW, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001385 cam_format_t zslFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001386 CAM_STREAM_TYPE_SNAPSHOT, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001387
1388 // Color space for this camera device is guaranteed to be ITU_R_601_FR.
1389 // So color spaces will always match.
1390
1391 // Check whether underlying formats of shared streams match.
1392 if (isVideo && isPreview && videoFormat != previewFormat) {
1393 LOGE("Combined video and preview usage flag is not supported");
1394 return -EINVAL;
1395 }
1396 if (isPreview && isZSL && previewFormat != zslFormat) {
1397 LOGE("Combined preview and zsl usage flag is not supported");
1398 return -EINVAL;
1399 }
1400 if (isVideo && isZSL && videoFormat != zslFormat) {
1401 LOGE("Combined video and zsl usage flag is not supported");
1402 return -EINVAL;
1403 }
1404 }
1405 return NO_ERROR;
1406}
1407
1408/*===========================================================================
1409 * FUNCTION : validateUsageFlagsForEis
1410 *
1411 * DESCRIPTION: Check if the configuration usage flags conflict with Eis
1412 *
1413 * PARAMETERS :
1414 * @stream_list : streams to be configured
1415 *
1416 * RETURN :
1417 * NO_ERROR if the usage flags are supported
1418 * error code if usage flags are not supported
1419 *
1420 *==========================================================================*/
1421int QCamera3HardwareInterface::validateUsageFlagsForEis(
1422 const camera3_stream_configuration_t* streamList)
1423{
1424 for (size_t j = 0; j < streamList->num_streams; j++) {
1425 const camera3_stream_t *newStream = streamList->streams[j];
1426
1427 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1428 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1429
1430 // Because EIS is "hard-coded" for certain use case, and current
1431 // implementation doesn't support shared preview and video on the same
1432 // stream, return failure if EIS is forced on.
1433 if (isPreview && isVideo && m_bEisEnable && m_bEisSupportedSize) {
1434 LOGE("Combined video and preview usage flag is not supported due to EIS");
1435 return -EINVAL;
1436 }
1437 }
1438 return NO_ERROR;
1439}
1440
Thierry Strudel3d639192016-09-09 11:52:26 -07001441/*==============================================================================
1442 * FUNCTION : isSupportChannelNeeded
1443 *
1444 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1445 *
1446 * PARAMETERS :
1447 * @stream_list : streams to be configured
1448 * @stream_config_info : the config info for streams to be configured
1449 *
1450 * RETURN : Boolen true/false decision
1451 *
1452 *==========================================================================*/
1453bool QCamera3HardwareInterface::isSupportChannelNeeded(
1454 camera3_stream_configuration_t *streamList,
1455 cam_stream_size_info_t stream_config_info)
1456{
1457 uint32_t i;
1458 bool pprocRequested = false;
1459 /* Check for conditions where PProc pipeline does not have any streams*/
1460 for (i = 0; i < stream_config_info.num_streams; i++) {
1461 if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1462 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1463 pprocRequested = true;
1464 break;
1465 }
1466 }
1467
1468 if (pprocRequested == false )
1469 return true;
1470
1471 /* Dummy stream needed if only raw or jpeg streams present */
1472 for (i = 0; i < streamList->num_streams; i++) {
1473 switch(streamList->streams[i]->format) {
1474 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1475 case HAL_PIXEL_FORMAT_RAW10:
1476 case HAL_PIXEL_FORMAT_RAW16:
1477 case HAL_PIXEL_FORMAT_BLOB:
1478 break;
1479 default:
1480 return false;
1481 }
1482 }
1483 return true;
1484}
1485
1486/*==============================================================================
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001487 * FUNCTION : sensor_mode_info
Thierry Strudel3d639192016-09-09 11:52:26 -07001488 *
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001489 * DESCRIPTION: Get sensor mode information based on current stream configuratoin
Thierry Strudel3d639192016-09-09 11:52:26 -07001490 *
1491 * PARAMETERS :
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001492 * @sensor_mode_info : sensor mode information (output)
Thierry Strudel3d639192016-09-09 11:52:26 -07001493 *
1494 * RETURN : int32_t type of status
1495 * NO_ERROR -- success
1496 * none-zero failure code
1497 *
1498 *==========================================================================*/
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001499int32_t QCamera3HardwareInterface::getSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
Thierry Strudel3d639192016-09-09 11:52:26 -07001500{
1501 int32_t rc = NO_ERROR;
1502
1503 cam_dimension_t max_dim = {0, 0};
1504 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1505 if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1506 max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1507 if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1508 max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1509 }
1510
1511 clear_metadata_buffer(mParameters);
1512
1513 rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1514 max_dim);
1515 if (rc != NO_ERROR) {
1516 LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1517 return rc;
1518 }
1519
1520 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1521 if (rc != NO_ERROR) {
1522 LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1523 return rc;
1524 }
1525
1526 clear_metadata_buffer(mParameters);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001527 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -07001528
1529 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1530 mParameters);
1531 if (rc != NO_ERROR) {
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001532 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
Thierry Strudel3d639192016-09-09 11:52:26 -07001533 return rc;
1534 }
1535
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001536 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO, sensorModeInfo);
Chien-Yu Chenee335912017-02-09 17:53:20 -08001537 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1538 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1539 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1540 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1541 sensorModeInfo.num_raw_bits);
Thierry Strudel3d639192016-09-09 11:52:26 -07001542
1543 return rc;
1544}
1545
1546/*==============================================================================
Chien-Yu Chen605c3872017-06-14 11:09:23 -07001547 * FUNCTION : getCurrentSensorModeInfo
1548 *
1549 * DESCRIPTION: Get sensor mode information that is currently selected.
1550 *
1551 * PARAMETERS :
1552 * @sensorModeInfo : sensor mode information (output)
1553 *
1554 * RETURN : int32_t type of status
1555 * NO_ERROR -- success
1556 * none-zero failure code
1557 *
1558 *==========================================================================*/
1559int32_t QCamera3HardwareInterface::getCurrentSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
1560{
1561 int32_t rc = NO_ERROR;
1562
1563 clear_metadata_buffer(mParameters);
1564 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_CURRENT_SENSOR_MODE_INFO);
1565
1566 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1567 mParameters);
1568 if (rc != NO_ERROR) {
1569 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
1570 return rc;
1571 }
1572
1573 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_CURRENT_SENSOR_MODE_INFO, sensorModeInfo);
1574 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1575 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1576 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1577 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1578 sensorModeInfo.num_raw_bits);
1579
1580 return rc;
1581}
1582
1583/*==============================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07001584 * FUNCTION : addToPPFeatureMask
1585 *
1586 * DESCRIPTION: add additional features to pp feature mask based on
1587 * stream type and usecase
1588 *
1589 * PARAMETERS :
1590 * @stream_format : stream type for feature mask
1591 * @stream_idx : stream idx within postprocess_mask list to change
1592 *
1593 * RETURN : NULL
1594 *
1595 *==========================================================================*/
1596void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1597 uint32_t stream_idx)
1598{
1599 char feature_mask_value[PROPERTY_VALUE_MAX];
1600 cam_feature_mask_t feature_mask;
1601 int args_converted;
1602 int property_len;
1603
1604 /* Get feature mask from property */
Thierry Strudel269c81a2016-10-12 12:13:59 -07001605#ifdef _LE_CAMERA_
1606 char swtnr_feature_mask_value[PROPERTY_VALUE_MAX];
1607 snprintf(swtnr_feature_mask_value, PROPERTY_VALUE_MAX, "%lld", CAM_QTI_FEATURE_SW_TNR);
1608 property_len = property_get("persist.camera.hal3.feature",
1609 feature_mask_value, swtnr_feature_mask_value);
1610#else
Thierry Strudel3d639192016-09-09 11:52:26 -07001611 property_len = property_get("persist.camera.hal3.feature",
1612 feature_mask_value, "0");
Thierry Strudel269c81a2016-10-12 12:13:59 -07001613#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07001614 if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1615 (feature_mask_value[1] == 'x')) {
1616 args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1617 } else {
1618 args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1619 }
1620 if (1 != args_converted) {
1621 feature_mask = 0;
1622 LOGE("Wrong feature mask %s", feature_mask_value);
1623 return;
1624 }
1625
1626 switch (stream_format) {
1627 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1628 /* Add LLVD to pp feature mask only if video hint is enabled */
1629 if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1630 mStreamConfigInfo.postprocess_mask[stream_idx]
1631 |= CAM_QTI_FEATURE_SW_TNR;
1632 LOGH("Added SW TNR to pp feature mask");
1633 } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1634 mStreamConfigInfo.postprocess_mask[stream_idx]
1635 |= CAM_QCOM_FEATURE_LLVD;
1636 LOGH("Added LLVD SeeMore to pp feature mask");
1637 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001638 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1639 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
1640 mStreamConfigInfo.postprocess_mask[stream_idx] |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
1641 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08001642 if ((m_bIsVideo) && (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1643 CAM_QTI_FEATURE_BINNING_CORRECTION)) {
1644 mStreamConfigInfo.postprocess_mask[stream_idx] |=
1645 CAM_QTI_FEATURE_BINNING_CORRECTION;
1646 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001647 break;
1648 }
1649 default:
1650 break;
1651 }
1652 LOGD("PP feature mask %llx",
1653 mStreamConfigInfo.postprocess_mask[stream_idx]);
1654}
1655
1656/*==============================================================================
1657 * FUNCTION : updateFpsInPreviewBuffer
1658 *
1659 * DESCRIPTION: update FPS information in preview buffer.
1660 *
1661 * PARAMETERS :
1662 * @metadata : pointer to metadata buffer
1663 * @frame_number: frame_number to look for in pending buffer list
1664 *
1665 * RETURN : None
1666 *
1667 *==========================================================================*/
1668void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1669 uint32_t frame_number)
1670{
1671 // Mark all pending buffers for this particular request
1672 // with corresponding framerate information
1673 for (List<PendingBuffersInRequest>::iterator req =
1674 mPendingBuffersMap.mPendingBuffersInRequest.begin();
1675 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1676 for(List<PendingBufferInfo>::iterator j =
1677 req->mPendingBufferList.begin();
1678 j != req->mPendingBufferList.end(); j++) {
1679 QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1680 if ((req->frame_number == frame_number) &&
1681 (channel->getStreamTypeMask() &
1682 (1U << CAM_STREAM_TYPE_PREVIEW))) {
1683 IF_META_AVAILABLE(cam_fps_range_t, float_range,
1684 CAM_INTF_PARM_FPS_RANGE, metadata) {
1685 typeof (MetaData_t::refreshrate) cameraFps = float_range->max_fps;
1686 struct private_handle_t *priv_handle =
1687 (struct private_handle_t *)(*(j->buffer));
1688 setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1689 }
1690 }
1691 }
1692 }
1693}
1694
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001695/*==============================================================================
1696 * FUNCTION : updateTimeStampInPendingBuffers
1697 *
1698 * DESCRIPTION: update timestamp in display metadata for all pending buffers
1699 * of a frame number
1700 *
1701 * PARAMETERS :
1702 * @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
1703 * @timestamp : timestamp to be set
1704 *
1705 * RETURN : None
1706 *
1707 *==========================================================================*/
1708void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
1709 uint32_t frameNumber, nsecs_t timestamp)
1710{
1711 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
1712 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
Binhao Lin09245482017-08-31 18:25:29 -07001713 // WAR: save the av_timestamp to the next frame
1714 if(req->frame_number == frameNumber + 1) {
1715 req->av_timestamp = timestamp;
1716 }
1717
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001718 if (req->frame_number != frameNumber)
1719 continue;
1720
1721 for (auto k = req->mPendingBufferList.begin();
1722 k != req->mPendingBufferList.end(); k++ ) {
Binhao Lin09245482017-08-31 18:25:29 -07001723 // WAR: update timestamp when it's not VT usecase
1724 QCamera3Channel *channel = (QCamera3Channel *)k->stream->priv;
1725 if (!((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask() &&
1726 m_bAVTimerEnabled)) {
1727 struct private_handle_t *priv_handle =
1728 (struct private_handle_t *) (*(k->buffer));
1729 setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
1730 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001731 }
1732 }
1733 return;
1734}
1735
Thierry Strudel3d639192016-09-09 11:52:26 -07001736/*===========================================================================
1737 * FUNCTION : configureStreams
1738 *
1739 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1740 * and output streams.
1741 *
1742 * PARAMETERS :
1743 * @stream_list : streams to be configured
1744 *
1745 * RETURN :
1746 *
1747 *==========================================================================*/
1748int QCamera3HardwareInterface::configureStreams(
1749 camera3_stream_configuration_t *streamList)
1750{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001751 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS);
Thierry Strudel3d639192016-09-09 11:52:26 -07001752 int rc = 0;
1753
1754 // Acquire perfLock before configure streams
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001755 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001756 rc = configureStreamsPerfLocked(streamList);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001757 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001758
1759 return rc;
1760}
1761
1762/*===========================================================================
1763 * FUNCTION : configureStreamsPerfLocked
1764 *
1765 * DESCRIPTION: configureStreams while perfLock is held.
1766 *
1767 * PARAMETERS :
1768 * @stream_list : streams to be configured
1769 *
1770 * RETURN : int32_t type of status
1771 * NO_ERROR -- success
1772 * none-zero failure code
1773 *==========================================================================*/
1774int QCamera3HardwareInterface::configureStreamsPerfLocked(
1775 camera3_stream_configuration_t *streamList)
1776{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001777 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS_PERF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07001778 int rc = 0;
1779
1780 // Sanity check stream_list
1781 if (streamList == NULL) {
1782 LOGE("NULL stream configuration");
1783 return BAD_VALUE;
1784 }
1785 if (streamList->streams == NULL) {
1786 LOGE("NULL stream list");
1787 return BAD_VALUE;
1788 }
1789
1790 if (streamList->num_streams < 1) {
1791 LOGE("Bad number of streams requested: %d",
1792 streamList->num_streams);
1793 return BAD_VALUE;
1794 }
1795
1796 if (streamList->num_streams >= MAX_NUM_STREAMS) {
1797 LOGE("Maximum number of streams %d exceeded: %d",
1798 MAX_NUM_STREAMS, streamList->num_streams);
1799 return BAD_VALUE;
1800 }
1801
Jason Leec4cf5032017-05-24 18:31:41 -07001802 mOpMode = streamList->operation_mode;
1803 LOGD("mOpMode: %d", mOpMode);
1804
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001805 rc = validateUsageFlags(streamList);
1806 if (rc != NO_ERROR) {
1807 return rc;
1808 }
1809
Thierry Strudel3d639192016-09-09 11:52:26 -07001810 /* first invalidate all the steams in the mStreamList
1811 * if they appear again, they will be validated */
1812 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1813 it != mStreamInfo.end(); it++) {
1814 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1815 if (channel) {
1816 channel->stop();
1817 }
1818 (*it)->status = INVALID;
1819 }
1820
1821 if (mRawDumpChannel) {
1822 mRawDumpChannel->stop();
1823 delete mRawDumpChannel;
1824 mRawDumpChannel = NULL;
1825 }
1826
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001827 if (mHdrPlusRawSrcChannel) {
1828 mHdrPlusRawSrcChannel->stop();
1829 delete mHdrPlusRawSrcChannel;
1830 mHdrPlusRawSrcChannel = NULL;
1831 }
1832
Thierry Strudel3d639192016-09-09 11:52:26 -07001833 if (mSupportChannel)
1834 mSupportChannel->stop();
1835
1836 if (mAnalysisChannel) {
1837 mAnalysisChannel->stop();
1838 }
1839 if (mMetadataChannel) {
1840 /* If content of mStreamInfo is not 0, there is metadata stream */
1841 mMetadataChannel->stop();
1842 }
1843 if (mChannelHandle) {
1844 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07001845 mChannelHandle, /*stop_immediately*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -07001846 LOGD("stopping channel %d", mChannelHandle);
1847 }
1848
1849 pthread_mutex_lock(&mMutex);
1850
Chien-Yu Chendeaebad2017-06-30 11:46:34 -07001851 mPictureChannel = NULL;
1852
Thierry Strudel3d639192016-09-09 11:52:26 -07001853 // Check state
1854 switch (mState) {
1855 case INITIALIZED:
1856 case CONFIGURED:
1857 case STARTED:
1858 /* valid state */
1859 break;
1860 default:
1861 LOGE("Invalid state %d", mState);
1862 pthread_mutex_unlock(&mMutex);
1863 return -ENODEV;
1864 }
1865
1866 /* Check whether we have video stream */
1867 m_bIs4KVideo = false;
1868 m_bIsVideo = false;
1869 m_bEisSupportedSize = false;
1870 m_bTnrEnabled = false;
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001871 m_bVideoHdrEnabled = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001872 bool isZsl = false;
Emilian Peev7650c122017-01-19 08:24:33 -08001873 bool depthPresent = false;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001874 bool isPreview = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001875 uint32_t videoWidth = 0U;
1876 uint32_t videoHeight = 0U;
1877 size_t rawStreamCnt = 0;
1878 size_t stallStreamCnt = 0;
1879 size_t processedStreamCnt = 0;
1880 // Number of streams on ISP encoder path
1881 size_t numStreamsOnEncoder = 0;
1882 size_t numYuv888OnEncoder = 0;
1883 bool bYuv888OverrideJpeg = false;
1884 cam_dimension_t largeYuv888Size = {0, 0};
1885 cam_dimension_t maxViewfinderSize = {0, 0};
1886 bool bJpegExceeds4K = false;
1887 bool bJpegOnEncoder = false;
1888 bool bUseCommonFeatureMask = false;
1889 cam_feature_mask_t commonFeatureMask = 0;
1890 bool bSmallJpegSize = false;
1891 uint32_t width_ratio;
1892 uint32_t height_ratio;
1893 maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1894 camera3_stream_t *inputStream = NULL;
1895 bool isJpeg = false;
1896 cam_dimension_t jpegSize = {0, 0};
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001897 cam_dimension_t previewSize = {0, 0};
Emilian Peev0f3c3162017-03-15 12:57:46 +00001898 size_t pdStatCount = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07001899
1900 cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1901
1902 /*EIS configuration*/
Thierry Strudel3d639192016-09-09 11:52:26 -07001903 uint8_t eis_prop_set;
1904 uint32_t maxEisWidth = 0;
1905 uint32_t maxEisHeight = 0;
1906
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001907 // Initialize all instant AEC related variables
1908 mInstantAEC = false;
1909 mResetInstantAEC = false;
1910 mInstantAECSettledFrameNumber = 0;
1911 mAecSkipDisplayFrameBound = 0;
1912 mInstantAecFrameIdxCount = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001913 mCurrFeatureState = 0;
1914 mStreamConfig = true;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001915
Binhao Lin09245482017-08-31 18:25:29 -07001916 m_bAVTimerEnabled = false;
1917
Thierry Strudel3d639192016-09-09 11:52:26 -07001918 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1919
1920 size_t count = IS_TYPE_MAX;
1921 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1922 for (size_t i = 0; i < count; i++) {
1923 if ((gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001924 (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
1925 m_bEisSupported = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001926 break;
1927 }
1928 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001929
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001930 if (m_bEisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001931 maxEisWidth = MAX_EIS_WIDTH;
1932 maxEisHeight = MAX_EIS_HEIGHT;
1933 }
1934
1935 /* EIS setprop control */
1936 char eis_prop[PROPERTY_VALUE_MAX];
1937 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001938 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07001939 eis_prop_set = (uint8_t)atoi(eis_prop);
1940
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001941 m_bEisEnable = eis_prop_set && m_bEisSupported &&
Thierry Strudel3d639192016-09-09 11:52:26 -07001942 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1943
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001944 LOGD("m_bEisEnable: %d, eis_prop_set: %d, m_bEisSupported: %d",
1945 m_bEisEnable, eis_prop_set, m_bEisSupported);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001946
Thierry Strudel3d639192016-09-09 11:52:26 -07001947 /* stream configurations */
1948 for (size_t i = 0; i < streamList->num_streams; i++) {
1949 camera3_stream_t *newStream = streamList->streams[i];
1950 LOGI("stream[%d] type = %d, format = %d, width = %d, "
1951 "height = %d, rotation = %d, usage = 0x%x",
1952 i, newStream->stream_type, newStream->format,
1953 newStream->width, newStream->height, newStream->rotation,
1954 newStream->usage);
1955 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1956 newStream->stream_type == CAMERA3_STREAM_INPUT){
1957 isZsl = true;
1958 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001959 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1960 IS_USAGE_PREVIEW(newStream->usage)) {
1961 isPreview = true;
1962 }
1963
Thierry Strudel3d639192016-09-09 11:52:26 -07001964 if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1965 inputStream = newStream;
1966 }
1967
Emilian Peev7650c122017-01-19 08:24:33 -08001968 if ((newStream->format == HAL_PIXEL_FORMAT_BLOB) &&
1969 (newStream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001970 isJpeg = true;
1971 jpegSize.width = newStream->width;
1972 jpegSize.height = newStream->height;
1973 if (newStream->width > VIDEO_4K_WIDTH ||
1974 newStream->height > VIDEO_4K_HEIGHT)
1975 bJpegExceeds4K = true;
1976 }
1977
1978 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1979 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1980 m_bIsVideo = true;
Thierry Strudel2896d122017-02-23 19:18:03 -08001981 // In HAL3 we can have multiple different video streams.
1982 // The variables video width and height are used below as
1983 // dimensions of the biggest of them
1984 if (videoWidth < newStream->width ||
1985 videoHeight < newStream->height) {
1986 videoWidth = newStream->width;
1987 videoHeight = newStream->height;
1988 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001989 if ((VIDEO_4K_WIDTH <= newStream->width) &&
1990 (VIDEO_4K_HEIGHT <= newStream->height)) {
1991 m_bIs4KVideo = true;
1992 }
1993 m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
1994 (newStream->height <= maxEisHeight);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001995
Thierry Strudel3d639192016-09-09 11:52:26 -07001996 }
1997 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1998 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
1999 switch (newStream->format) {
2000 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08002001 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
2002 depthPresent = true;
2003 break;
2004 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002005 stallStreamCnt++;
2006 if (isOnEncoder(maxViewfinderSize, newStream->width,
2007 newStream->height)) {
2008 numStreamsOnEncoder++;
2009 bJpegOnEncoder = true;
2010 }
2011 width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
2012 newStream->width);
2013 height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
2014 newStream->height);;
2015 FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
2016 "FATAL: max_downscale_factor cannot be zero and so assert");
2017 if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
2018 (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
2019 LOGH("Setting small jpeg size flag to true");
2020 bSmallJpegSize = true;
2021 }
2022 break;
2023 case HAL_PIXEL_FORMAT_RAW10:
2024 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2025 case HAL_PIXEL_FORMAT_RAW16:
2026 rawStreamCnt++;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002027 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
2028 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
2029 pdStatCount++;
2030 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002031 break;
2032 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2033 processedStreamCnt++;
2034 if (isOnEncoder(maxViewfinderSize, newStream->width,
2035 newStream->height)) {
2036 if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
2037 !IS_USAGE_ZSL(newStream->usage)) {
2038 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2039 }
2040 numStreamsOnEncoder++;
2041 }
2042 break;
2043 case HAL_PIXEL_FORMAT_YCbCr_420_888:
2044 processedStreamCnt++;
2045 if (isOnEncoder(maxViewfinderSize, newStream->width,
2046 newStream->height)) {
2047 // If Yuv888 size is not greater than 4K, set feature mask
2048 // to SUPERSET so that it support concurrent request on
2049 // YUV and JPEG.
2050 if (newStream->width <= VIDEO_4K_WIDTH &&
2051 newStream->height <= VIDEO_4K_HEIGHT) {
2052 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2053 }
2054 numStreamsOnEncoder++;
2055 numYuv888OnEncoder++;
2056 largeYuv888Size.width = newStream->width;
2057 largeYuv888Size.height = newStream->height;
2058 }
2059 break;
2060 default:
2061 processedStreamCnt++;
2062 if (isOnEncoder(maxViewfinderSize, newStream->width,
2063 newStream->height)) {
2064 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2065 numStreamsOnEncoder++;
2066 }
2067 break;
2068 }
2069
2070 }
2071 }
2072
2073 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
2074 gCamCapability[mCameraId]->position == CAM_POSITION_FRONT_AUX ||
2075 !m_bIsVideo) {
2076 m_bEisEnable = false;
2077 }
2078
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002079 if (validateUsageFlagsForEis(streamList) != NO_ERROR) {
2080 pthread_mutex_unlock(&mMutex);
2081 return -EINVAL;
2082 }
2083
Thierry Strudel54dc9782017-02-15 12:12:10 -08002084 uint8_t forceEnableTnr = 0;
2085 char tnr_prop[PROPERTY_VALUE_MAX];
2086 memset(tnr_prop, 0, sizeof(tnr_prop));
2087 property_get("debug.camera.tnr.forceenable", tnr_prop, "0");
2088 forceEnableTnr = (uint8_t)atoi(tnr_prop);
2089
Thierry Strudel3d639192016-09-09 11:52:26 -07002090 /* Logic to enable/disable TNR based on specific config size/etc.*/
2091 if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
Thierry Strudel3d639192016-09-09 11:52:26 -07002092 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
2093 m_bTnrEnabled = true;
Thierry Strudel54dc9782017-02-15 12:12:10 -08002094 else if (forceEnableTnr)
2095 m_bTnrEnabled = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002096
Mansoor Aftab93a66e52017-01-26 14:58:25 -08002097 char videoHdrProp[PROPERTY_VALUE_MAX];
2098 memset(videoHdrProp, 0, sizeof(videoHdrProp));
2099 property_get("persist.camera.hdr.video", videoHdrProp, "0");
2100 uint8_t hdr_mode_prop = (uint8_t)atoi(videoHdrProp);
2101
2102 if (hdr_mode_prop == 1 && m_bIsVideo &&
2103 mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2104 m_bVideoHdrEnabled = true;
2105 else
2106 m_bVideoHdrEnabled = false;
2107
2108
Thierry Strudel3d639192016-09-09 11:52:26 -07002109 /* Check if num_streams is sane */
2110 if (stallStreamCnt > MAX_STALLING_STREAMS ||
2111 rawStreamCnt > MAX_RAW_STREAMS ||
2112 processedStreamCnt > MAX_PROCESSED_STREAMS) {
2113 LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
2114 stallStreamCnt, rawStreamCnt, processedStreamCnt);
2115 pthread_mutex_unlock(&mMutex);
2116 return -EINVAL;
2117 }
2118 /* Check whether we have zsl stream or 4k video case */
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002119 if (isZsl && m_bIs4KVideo) {
2120 LOGE("Currently invalid configuration ZSL & 4K Video!");
Thierry Strudel3d639192016-09-09 11:52:26 -07002121 pthread_mutex_unlock(&mMutex);
2122 return -EINVAL;
2123 }
2124 /* Check if stream sizes are sane */
2125 if (numStreamsOnEncoder > 2) {
2126 LOGE("Number of streams on ISP encoder path exceeds limits of 2");
2127 pthread_mutex_unlock(&mMutex);
2128 return -EINVAL;
2129 } else if (1 < numStreamsOnEncoder){
2130 bUseCommonFeatureMask = true;
2131 LOGH("Multiple streams above max viewfinder size, common mask needed");
2132 }
2133
2134 /* Check if BLOB size is greater than 4k in 4k recording case */
2135 if (m_bIs4KVideo && bJpegExceeds4K) {
2136 LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
2137 pthread_mutex_unlock(&mMutex);
2138 return -EINVAL;
2139 }
2140
Emilian Peev7650c122017-01-19 08:24:33 -08002141 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2142 depthPresent) {
2143 LOGE("HAL doesn't support depth streams in HFR mode!");
2144 pthread_mutex_unlock(&mMutex);
2145 return -EINVAL;
2146 }
2147
Thierry Strudel3d639192016-09-09 11:52:26 -07002148 // When JPEG and preview streams share VFE output, CPP will not apply CAC2
2149 // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
2150 // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
2151 // is not true. Otherwise testMandatoryOutputCombinations will fail with following
2152 // configurations:
2153 // {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
2154 // {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
2155 // (These two configurations will not have CAC2 enabled even in HQ modes.)
2156 if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
2157 ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
2158 __func__);
2159 pthread_mutex_unlock(&mMutex);
2160 return -EINVAL;
2161 }
2162
2163 // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
2164 // the YUV stream's size is greater or equal to the JPEG size, set common
2165 // postprocess mask to NONE, so that we can take advantage of postproc bypass.
2166 if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
2167 jpegSize.width, jpegSize.height) &&
2168 largeYuv888Size.width > jpegSize.width &&
2169 largeYuv888Size.height > jpegSize.height) {
2170 bYuv888OverrideJpeg = true;
2171 } else if (!isJpeg && numStreamsOnEncoder > 1) {
2172 commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2173 }
2174
2175 LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
2176 maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
2177 commonFeatureMask);
2178 LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
2179 numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
2180
2181 rc = validateStreamDimensions(streamList);
2182 if (rc == NO_ERROR) {
2183 rc = validateStreamRotations(streamList);
2184 }
2185 if (rc != NO_ERROR) {
2186 LOGE("Invalid stream configuration requested!");
2187 pthread_mutex_unlock(&mMutex);
2188 return rc;
2189 }
2190
Emilian Peev0f3c3162017-03-15 12:57:46 +00002191 if (1 < pdStatCount) {
2192 LOGE("HAL doesn't support multiple PD streams");
2193 pthread_mutex_unlock(&mMutex);
2194 return -EINVAL;
2195 }
2196
2197 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2198 (1 == pdStatCount)) {
2199 LOGE("HAL doesn't support PD streams in HFR mode!");
2200 pthread_mutex_unlock(&mMutex);
2201 return -EINVAL;
2202 }
2203
Thierry Strudel3d639192016-09-09 11:52:26 -07002204 camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
2205 for (size_t i = 0; i < streamList->num_streams; i++) {
2206 camera3_stream_t *newStream = streamList->streams[i];
2207 LOGH("newStream type = %d, stream format = %d "
2208 "stream size : %d x %d, stream rotation = %d",
2209 newStream->stream_type, newStream->format,
2210 newStream->width, newStream->height, newStream->rotation);
2211 //if the stream is in the mStreamList validate it
2212 bool stream_exists = false;
2213 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2214 it != mStreamInfo.end(); it++) {
2215 if ((*it)->stream == newStream) {
2216 QCamera3ProcessingChannel *channel =
2217 (QCamera3ProcessingChannel*)(*it)->stream->priv;
2218 stream_exists = true;
2219 if (channel)
2220 delete channel;
2221 (*it)->status = VALID;
2222 (*it)->stream->priv = NULL;
2223 (*it)->channel = NULL;
2224 }
2225 }
2226 if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
2227 //new stream
2228 stream_info_t* stream_info;
2229 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
2230 if (!stream_info) {
2231 LOGE("Could not allocate stream info");
2232 rc = -ENOMEM;
2233 pthread_mutex_unlock(&mMutex);
2234 return rc;
2235 }
2236 stream_info->stream = newStream;
2237 stream_info->status = VALID;
2238 stream_info->channel = NULL;
2239 mStreamInfo.push_back(stream_info);
2240 }
2241 /* Covers Opaque ZSL and API1 F/W ZSL */
2242 if (IS_USAGE_ZSL(newStream->usage)
2243 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
2244 if (zslStream != NULL) {
2245 LOGE("Multiple input/reprocess streams requested!");
2246 pthread_mutex_unlock(&mMutex);
2247 return BAD_VALUE;
2248 }
2249 zslStream = newStream;
2250 }
2251 /* Covers YUV reprocess */
2252 if (inputStream != NULL) {
2253 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
2254 && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2255 && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2256 && inputStream->width == newStream->width
2257 && inputStream->height == newStream->height) {
2258 if (zslStream != NULL) {
2259 /* This scenario indicates multiple YUV streams with same size
2260 * as input stream have been requested, since zsl stream handle
2261 * is solely use for the purpose of overriding the size of streams
2262 * which share h/w streams we will just make a guess here as to
2263 * which of the stream is a ZSL stream, this will be refactored
2264 * once we make generic logic for streams sharing encoder output
2265 */
2266 LOGH("Warning, Multiple ip/reprocess streams requested!");
2267 }
2268 zslStream = newStream;
2269 }
2270 }
2271 }
2272
2273 /* If a zsl stream is set, we know that we have configured at least one input or
2274 bidirectional stream */
2275 if (NULL != zslStream) {
2276 mInputStreamInfo.dim.width = (int32_t)zslStream->width;
2277 mInputStreamInfo.dim.height = (int32_t)zslStream->height;
2278 mInputStreamInfo.format = zslStream->format;
2279 mInputStreamInfo.usage = zslStream->usage;
2280 LOGD("Input stream configured! %d x %d, format %d, usage %d",
2281 mInputStreamInfo.dim.width,
2282 mInputStreamInfo.dim.height,
2283 mInputStreamInfo.format, mInputStreamInfo.usage);
2284 }
2285
2286 cleanAndSortStreamInfo();
2287 if (mMetadataChannel) {
2288 delete mMetadataChannel;
2289 mMetadataChannel = NULL;
2290 }
2291 if (mSupportChannel) {
2292 delete mSupportChannel;
2293 mSupportChannel = NULL;
2294 }
2295
2296 if (mAnalysisChannel) {
2297 delete mAnalysisChannel;
2298 mAnalysisChannel = NULL;
2299 }
2300
2301 if (mDummyBatchChannel) {
2302 delete mDummyBatchChannel;
2303 mDummyBatchChannel = NULL;
2304 }
2305
Emilian Peev7650c122017-01-19 08:24:33 -08002306 if (mDepthChannel) {
2307 mDepthChannel = NULL;
2308 }
Emilian Peev656e4fa2017-06-02 16:47:04 +01002309 mDepthCloudMode = CAM_PD_DATA_SKIP;
Emilian Peev7650c122017-01-19 08:24:33 -08002310
Chien-Yu Chen3f303522017-05-19 15:21:45 -07002311 mShutterDispatcher.clear();
2312 mOutputBufferDispatcher.clear();
2313
Thierry Strudel2896d122017-02-23 19:18:03 -08002314 char is_type_value[PROPERTY_VALUE_MAX];
2315 property_get("persist.camera.is_type", is_type_value, "4");
2316 m_bEis3PropertyEnabled = (atoi(is_type_value) == IS_TYPE_EIS_3_0);
2317
Binhao Line406f062017-05-03 14:39:44 -07002318 char property_value[PROPERTY_VALUE_MAX];
2319 property_get("persist.camera.gzoom.at", property_value, "0");
2320 int goog_zoom_at = atoi(property_value);
Jason Leec4cf5032017-05-24 18:31:41 -07002321 bool is_goog_zoom_video_enabled = ((goog_zoom_at & 1) > 0) &&
2322 gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
2323 bool is_goog_zoom_preview_enabled = ((goog_zoom_at & 2) > 0) &&
2324 gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
Binhao Line406f062017-05-03 14:39:44 -07002325
2326 property_get("persist.camera.gzoom.4k", property_value, "0");
2327 bool is_goog_zoom_4k_enabled = (atoi(property_value) > 0);
2328
Thierry Strudel3d639192016-09-09 11:52:26 -07002329 //Create metadata channel and initialize it
2330 cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
2331 setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
2332 gCamCapability[mCameraId]->color_arrangement);
2333 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
2334 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002335 setBufferErrorStatus, &padding_info, metadataFeatureMask, this);
Thierry Strudel3d639192016-09-09 11:52:26 -07002336 if (mMetadataChannel == NULL) {
2337 LOGE("failed to allocate metadata channel");
2338 rc = -ENOMEM;
2339 pthread_mutex_unlock(&mMutex);
2340 return rc;
2341 }
Emilian Peev662c05e2017-05-16 10:00:04 +01002342 mMetadataChannel->enableDepthData(depthPresent);
Thierry Strudel3d639192016-09-09 11:52:26 -07002343 rc = mMetadataChannel->initialize(IS_TYPE_NONE);
2344 if (rc < 0) {
2345 LOGE("metadata channel initialization failed");
2346 delete mMetadataChannel;
2347 mMetadataChannel = NULL;
2348 pthread_mutex_unlock(&mMutex);
2349 return rc;
2350 }
2351
Thierry Strudel2896d122017-02-23 19:18:03 -08002352 cam_feature_mask_t zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002353 bool isRawStreamRequested = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002354 bool onlyRaw = true;
Binhao Lincdb362a2017-04-20 13:31:54 -07002355 // Keep track of preview/video streams indices.
2356 // There could be more than one preview streams, but only one video stream.
2357 int32_t video_stream_idx = -1;
2358 int32_t preview_stream_idx[streamList->num_streams];
2359 size_t preview_stream_cnt = 0;
Jason Leea52b77e2017-06-27 16:16:17 -07002360 bool previewTnr[streamList->num_streams];
2361 memset(previewTnr, 0, sizeof(bool) * streamList->num_streams);
2362 bool isFront = gCamCapability[mCameraId]->position == CAM_POSITION_FRONT;
2363 // Loop through once to determine preview TNR conditions before creating channels.
2364 for (size_t i = 0; i < streamList->num_streams; i++) {
2365 camera3_stream_t *newStream = streamList->streams[i];
2366 uint32_t stream_usage = newStream->usage;
2367 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT &&
2368 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
2369 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)
2370 video_stream_idx = (int32_t)i;
2371 else
2372 preview_stream_idx[preview_stream_cnt++] = (int32_t)i;
2373 }
2374 }
2375 // By default, preview stream TNR is disabled.
2376 // Enable TNR to the preview stream if all conditions below are satisfied:
2377 // 1. preview resolution == video resolution.
2378 // 2. video stream TNR is enabled.
2379 // 3. EIS2.0 OR is front camera (which wouldn't use EIS3 even if it's set)
2380 for (size_t i = 0; i < preview_stream_cnt && video_stream_idx != -1; i++) {
2381 camera3_stream_t *video_stream = streamList->streams[video_stream_idx];
2382 camera3_stream_t *preview_stream = streamList->streams[preview_stream_idx[i]];
2383 if (m_bTnrEnabled && m_bTnrVideo &&
2384 (isFront || (atoi(is_type_value) == IS_TYPE_EIS_2_0)) &&
2385 video_stream->width == preview_stream->width &&
2386 video_stream->height == preview_stream->height) {
2387 previewTnr[preview_stream_idx[i]] = true;
2388 }
2389 }
2390
Thierry Strudel3d639192016-09-09 11:52:26 -07002391 memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
2392 /* Allocate channel objects for the requested streams */
2393 for (size_t i = 0; i < streamList->num_streams; i++) {
Binhao Line406f062017-05-03 14:39:44 -07002394
Thierry Strudel3d639192016-09-09 11:52:26 -07002395 camera3_stream_t *newStream = streamList->streams[i];
2396 uint32_t stream_usage = newStream->usage;
2397 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
2398 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
2399 struct camera_info *p_info = NULL;
2400 pthread_mutex_lock(&gCamLock);
2401 p_info = get_cam_info(mCameraId, &mStreamConfigInfo.sync_type);
2402 pthread_mutex_unlock(&gCamLock);
2403 if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
2404 || IS_USAGE_ZSL(newStream->usage)) &&
2405 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
Thierry Strudel2896d122017-02-23 19:18:03 -08002406 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002407 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
Thierry Strudel2896d122017-02-23 19:18:03 -08002408 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2409 if (bUseCommonFeatureMask)
2410 zsl_ppmask = commonFeatureMask;
2411 else
2412 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002413 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08002414 if (numStreamsOnEncoder > 0)
2415 zsl_ppmask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2416 else
2417 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002418 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002419 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002420 } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
Thierry Strudel2896d122017-02-23 19:18:03 -08002421 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002422 LOGH("Input stream configured, reprocess config");
2423 } else {
2424 //for non zsl streams find out the format
2425 switch (newStream->format) {
2426 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
2427 {
Thierry Strudel2896d122017-02-23 19:18:03 -08002428 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002429 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2430 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2431 /* add additional features to pp feature mask */
2432 addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
2433 mStreamConfigInfo.num_streams);
2434
2435 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
2436 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2437 CAM_STREAM_TYPE_VIDEO;
2438 if (m_bTnrEnabled && m_bTnrVideo) {
2439 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2440 CAM_QCOM_FEATURE_CPP_TNR;
2441 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2442 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2443 ~CAM_QCOM_FEATURE_CDS;
2444 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002445 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2446 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2447 CAM_QTI_FEATURE_PPEISCORE;
2448 }
Binhao Line406f062017-05-03 14:39:44 -07002449 if (is_goog_zoom_video_enabled && (is_goog_zoom_4k_enabled || !m_bIs4KVideo)) {
2450 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2451 CAM_QCOM_FEATURE_GOOG_ZOOM;
2452 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002453 } else {
2454 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2455 CAM_STREAM_TYPE_PREVIEW;
Jason Leea52b77e2017-06-27 16:16:17 -07002456 if (m_bTnrEnabled && (previewTnr[i] || m_bTnrPreview)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002457 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2458 CAM_QCOM_FEATURE_CPP_TNR;
2459 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2460 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2461 ~CAM_QCOM_FEATURE_CDS;
2462 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002463 if(!m_bSwTnrPreview) {
2464 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2465 ~CAM_QTI_FEATURE_SW_TNR;
2466 }
Binhao Line406f062017-05-03 14:39:44 -07002467 if (is_goog_zoom_preview_enabled) {
2468 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2469 CAM_QCOM_FEATURE_GOOG_ZOOM;
2470 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002471 padding_info.width_padding = mSurfaceStridePadding;
2472 padding_info.height_padding = CAM_PAD_TO_2;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002473 previewSize.width = (int32_t)newStream->width;
2474 previewSize.height = (int32_t)newStream->height;
Thierry Strudel3d639192016-09-09 11:52:26 -07002475 }
2476 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
2477 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
2478 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2479 newStream->height;
2480 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2481 newStream->width;
2482 }
2483 }
2484 break;
2485 case HAL_PIXEL_FORMAT_YCbCr_420_888:
Thierry Strudel2896d122017-02-23 19:18:03 -08002486 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002487 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
2488 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2489 if (bUseCommonFeatureMask)
2490 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2491 commonFeatureMask;
2492 else
2493 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2494 CAM_QCOM_FEATURE_NONE;
2495 } else {
2496 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2497 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2498 }
2499 break;
2500 case HAL_PIXEL_FORMAT_BLOB:
Thierry Strudel2896d122017-02-23 19:18:03 -08002501 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002502 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
2503 // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
2504 if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
2505 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2506 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel2896d122017-02-23 19:18:03 -08002507 /* Remove rotation if it is not supported
2508 for 4K LiveVideo snapshot case (online processing) */
2509 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask &
2510 CAM_QCOM_FEATURE_ROTATION)) {
2511 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
2512 &= ~CAM_QCOM_FEATURE_ROTATION;
2513 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002514 } else {
2515 if (bUseCommonFeatureMask &&
2516 isOnEncoder(maxViewfinderSize, newStream->width,
2517 newStream->height)) {
2518 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
2519 } else {
2520 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2521 }
2522 }
2523 if (isZsl) {
2524 if (zslStream) {
2525 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2526 (int32_t)zslStream->width;
2527 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2528 (int32_t)zslStream->height;
Thierry Strudel2896d122017-02-23 19:18:03 -08002529 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2530 zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002531 } else {
2532 LOGE("Error, No ZSL stream identified");
2533 pthread_mutex_unlock(&mMutex);
2534 return -EINVAL;
2535 }
2536 } else if (m_bIs4KVideo) {
2537 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
2538 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
2539 } else if (bYuv888OverrideJpeg) {
2540 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2541 (int32_t)largeYuv888Size.width;
2542 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2543 (int32_t)largeYuv888Size.height;
2544 }
2545 break;
2546 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2547 case HAL_PIXEL_FORMAT_RAW16:
2548 case HAL_PIXEL_FORMAT_RAW10:
2549 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2550 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2551 isRawStreamRequested = true;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002552 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
2553 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
2554 mStreamConfigInfo.sub_format_type[mStreamConfigInfo.num_streams] =
2555 gCamCapability[mCameraId]->sub_fmt[mPDIndex];
2556 mStreamConfigInfo.format[mStreamConfigInfo.num_streams] =
2557 gCamCapability[mCameraId]->supported_meta_raw_fmts[mPDIndex];
2558 mStreamConfigInfo.dt[mStreamConfigInfo.num_streams] =
2559 gCamCapability[mCameraId]->dt[mPDIndex];
2560 mStreamConfigInfo.vc[mStreamConfigInfo.num_streams] =
2561 gCamCapability[mCameraId]->vc[mPDIndex];
2562 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002563 break;
2564 default:
Thierry Strudel2896d122017-02-23 19:18:03 -08002565 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002566 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
2567 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2568 break;
2569 }
2570 }
2571
2572 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2573 (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2574 gCamCapability[mCameraId]->color_arrangement);
2575
2576 if (newStream->priv == NULL) {
2577 //New stream, construct channel
2578 switch (newStream->stream_type) {
2579 case CAMERA3_STREAM_INPUT:
2580 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
2581 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
2582 break;
2583 case CAMERA3_STREAM_BIDIRECTIONAL:
2584 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
2585 GRALLOC_USAGE_HW_CAMERA_WRITE;
2586 break;
2587 case CAMERA3_STREAM_OUTPUT:
2588 /* For video encoding stream, set read/write rarely
2589 * flag so that they may be set to un-cached */
2590 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
2591 newStream->usage |=
2592 (GRALLOC_USAGE_SW_READ_RARELY |
2593 GRALLOC_USAGE_SW_WRITE_RARELY |
2594 GRALLOC_USAGE_HW_CAMERA_WRITE);
2595 else if (IS_USAGE_ZSL(newStream->usage))
2596 {
2597 LOGD("ZSL usage flag skipping");
2598 }
2599 else if (newStream == zslStream
2600 || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
2601 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
2602 } else
2603 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
2604 break;
2605 default:
2606 LOGE("Invalid stream_type %d", newStream->stream_type);
2607 break;
2608 }
2609
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002610 bool forcePreviewUBWC = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002611 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
2612 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
2613 QCamera3ProcessingChannel *channel = NULL;
2614 switch (newStream->format) {
2615 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2616 if ((newStream->usage &
2617 private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
2618 (streamList->operation_mode ==
2619 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2620 ) {
2621 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2622 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002623 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002624 this,
2625 newStream,
2626 (cam_stream_type_t)
2627 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2628 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2629 mMetadataChannel,
2630 0); //heap buffers are not required for HFR video channel
2631 if (channel == NULL) {
2632 LOGE("allocation of channel failed");
2633 pthread_mutex_unlock(&mMutex);
2634 return -ENOMEM;
2635 }
2636 //channel->getNumBuffers() will return 0 here so use
2637 //MAX_INFLIGH_HFR_REQUESTS
2638 newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2639 newStream->priv = channel;
2640 LOGI("num video buffers in HFR mode: %d",
2641 MAX_INFLIGHT_HFR_REQUESTS);
2642 } else {
2643 /* Copy stream contents in HFR preview only case to create
2644 * dummy batch channel so that sensor streaming is in
2645 * HFR mode */
2646 if (!m_bIsVideo && (streamList->operation_mode ==
2647 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2648 mDummyBatchStream = *newStream;
2649 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002650 int bufferCount = MAX_INFLIGHT_REQUESTS;
2651 if (mStreamConfigInfo.type[mStreamConfigInfo.num_streams] ==
2652 CAM_STREAM_TYPE_VIDEO) {
Zhijun He6cdf6372017-07-15 14:59:58 -07002653 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2654 // WAR: 4K video can only run <=30fps, reduce the buffer count.
2655 bufferCount = m_bIs4KVideo ?
2656 MAX_30FPS_VIDEO_BUFFERS : MAX_VIDEO_BUFFERS;
2657 }
2658
Thierry Strudel2896d122017-02-23 19:18:03 -08002659 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002660 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2661 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002662 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002663 this,
2664 newStream,
2665 (cam_stream_type_t)
2666 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2667 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2668 mMetadataChannel,
Thierry Strudel2896d122017-02-23 19:18:03 -08002669 bufferCount);
Thierry Strudel3d639192016-09-09 11:52:26 -07002670 if (channel == NULL) {
2671 LOGE("allocation of channel failed");
2672 pthread_mutex_unlock(&mMutex);
2673 return -ENOMEM;
2674 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002675 /* disable UBWC for preview, though supported,
2676 * to take advantage of CPP duplication */
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002677 if (m_bIsVideo && (!QCameraCommon::isVideoUBWCEnabled()) &&
Thierry Strudel2896d122017-02-23 19:18:03 -08002678 (previewSize.width == (int32_t)videoWidth)&&
2679 (previewSize.height == (int32_t)videoHeight)){
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002680 forcePreviewUBWC = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002681 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002682 channel->setUBWCEnabled(forcePreviewUBWC);
Binhao Line406f062017-05-03 14:39:44 -07002683 /* When goog_zoom is linked to the preview or video stream,
2684 * disable ubwc to the linked stream */
2685 if ((mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &
2686 CAM_QCOM_FEATURE_GOOG_ZOOM) != 0) {
2687 channel->setUBWCEnabled(false);
2688 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002689 newStream->max_buffers = channel->getNumBuffers();
2690 newStream->priv = channel;
2691 }
2692 break;
2693 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
2694 channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2695 mChannelHandle,
2696 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002697 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002698 this,
2699 newStream,
2700 (cam_stream_type_t)
2701 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2702 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2703 mMetadataChannel);
2704 if (channel == NULL) {
2705 LOGE("allocation of YUV channel failed");
2706 pthread_mutex_unlock(&mMutex);
2707 return -ENOMEM;
2708 }
2709 newStream->max_buffers = channel->getNumBuffers();
2710 newStream->priv = channel;
2711 break;
2712 }
2713 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2714 case HAL_PIXEL_FORMAT_RAW16:
Emilian Peev0f3c3162017-03-15 12:57:46 +00002715 case HAL_PIXEL_FORMAT_RAW10: {
2716 bool isRAW16 = ((newStream->format == HAL_PIXEL_FORMAT_RAW16) &&
2717 (HAL_DATASPACE_DEPTH != newStream->data_space))
2718 ? true : false;
Thierry Strudel3d639192016-09-09 11:52:26 -07002719 mRawChannel = new QCamera3RawChannel(
2720 mCameraHandle->camera_handle, mChannelHandle,
2721 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002722 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002723 this, newStream,
2724 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
Emilian Peev0f3c3162017-03-15 12:57:46 +00002725 mMetadataChannel, isRAW16);
Thierry Strudel3d639192016-09-09 11:52:26 -07002726 if (mRawChannel == NULL) {
2727 LOGE("allocation of raw channel failed");
2728 pthread_mutex_unlock(&mMutex);
2729 return -ENOMEM;
2730 }
2731 newStream->max_buffers = mRawChannel->getNumBuffers();
2732 newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2733 break;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002734 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002735 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08002736 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
2737 mDepthChannel = new QCamera3DepthChannel(
2738 mCameraHandle->camera_handle, mChannelHandle,
2739 mCameraHandle->ops, NULL, NULL, &padding_info,
2740 0, this, MAX_INFLIGHT_REQUESTS, newStream,
2741 mMetadataChannel);
2742 if (NULL == mDepthChannel) {
2743 LOGE("Allocation of depth channel failed");
2744 pthread_mutex_unlock(&mMutex);
2745 return NO_MEMORY;
2746 }
2747 newStream->priv = mDepthChannel;
2748 newStream->max_buffers = MAX_INFLIGHT_REQUESTS;
2749 } else {
2750 // Max live snapshot inflight buffer is 1. This is to mitigate
2751 // frame drop issues for video snapshot. The more buffers being
2752 // allocated, the more frame drops there are.
2753 mPictureChannel = new QCamera3PicChannel(
2754 mCameraHandle->camera_handle, mChannelHandle,
2755 mCameraHandle->ops, captureResultCb,
2756 setBufferErrorStatus, &padding_info, this, newStream,
2757 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2758 m_bIs4KVideo, isZsl, mMetadataChannel,
2759 (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2760 if (mPictureChannel == NULL) {
2761 LOGE("allocation of channel failed");
2762 pthread_mutex_unlock(&mMutex);
2763 return -ENOMEM;
2764 }
2765 newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2766 newStream->max_buffers = mPictureChannel->getNumBuffers();
2767 mPictureChannel->overrideYuvSize(
2768 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2769 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
Thierry Strudel3d639192016-09-09 11:52:26 -07002770 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002771 break;
2772
2773 default:
2774 LOGE("not a supported format 0x%x", newStream->format);
Thierry Strudel73e91562017-05-15 09:16:18 -07002775 pthread_mutex_unlock(&mMutex);
2776 return -EINVAL;
Thierry Strudel3d639192016-09-09 11:52:26 -07002777 }
2778 } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2779 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2780 } else {
2781 LOGE("Error, Unknown stream type");
2782 pthread_mutex_unlock(&mMutex);
2783 return -EINVAL;
2784 }
2785
2786 QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002787 if (channel != NULL && QCamera3Channel::isUBWCEnabled()) {
Jason Leec4cf5032017-05-24 18:31:41 -07002788 // Here we only care whether it's EIS3 or not
2789 cam_is_type_t isType = m_bEis3PropertyEnabled ? IS_TYPE_EIS_3_0 : IS_TYPE_NONE;
2790 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
2791 mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2792 isType = IS_TYPE_NONE;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002793 cam_format_t fmt = QCamera3Channel::getStreamDefaultFormat(
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07002794 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
Jason Leec4cf5032017-05-24 18:31:41 -07002795 newStream->width, newStream->height, forcePreviewUBWC, isType);
Thierry Strudel3d639192016-09-09 11:52:26 -07002796 if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2797 newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2798 }
2799 }
2800
2801 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2802 it != mStreamInfo.end(); it++) {
2803 if ((*it)->stream == newStream) {
2804 (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2805 break;
2806 }
2807 }
2808 } else {
2809 // Channel already exists for this stream
2810 // Do nothing for now
2811 }
2812 padding_info = gCamCapability[mCameraId]->padding_info;
2813
Emilian Peev7650c122017-01-19 08:24:33 -08002814 /* Do not add entries for input&depth stream in metastream info
Thierry Strudel3d639192016-09-09 11:52:26 -07002815 * since there is no real stream associated with it
2816 */
Emilian Peev7650c122017-01-19 08:24:33 -08002817 if ((newStream->stream_type != CAMERA3_STREAM_INPUT) &&
Emilian Peev0f3c3162017-03-15 12:57:46 +00002818 !((newStream->data_space == HAL_DATASPACE_DEPTH) &&
2819 (newStream->format == HAL_PIXEL_FORMAT_BLOB))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002820 mStreamConfigInfo.num_streams++;
Emilian Peev7650c122017-01-19 08:24:33 -08002821 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002822 }
2823
Chien-Yu Chen3f303522017-05-19 15:21:45 -07002824 // Let buffer dispatcher know the configured streams.
2825 mOutputBufferDispatcher.configureStreams(streamList);
2826
Thierry Strudel2896d122017-02-23 19:18:03 -08002827 if (mOpMode != QCAMERA3_VENDOR_STREAM_CONFIGURATION_RAW_ONLY_MODE) {
2828 onlyRaw = false;
2829 }
2830
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002831 // Create analysis stream all the time, even when h/w support is not available
Thierry Strudel2896d122017-02-23 19:18:03 -08002832 if (!onlyRaw) {
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002833 cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002834 cam_analysis_info_t analysisInfo;
2835 int32_t ret = NO_ERROR;
2836 ret = mCommon.getAnalysisInfo(
2837 FALSE,
2838 analysisFeatureMask,
2839 &analysisInfo);
2840 if (ret == NO_ERROR) {
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002841 cam_color_filter_arrangement_t analysis_color_arrangement =
2842 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2843 CAM_FILTER_ARRANGEMENT_Y :
2844 gCamCapability[mCameraId]->color_arrangement);
2845 setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
2846 analysis_color_arrangement);
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002847 cam_dimension_t analysisDim;
2848 analysisDim = mCommon.getMatchingDimension(previewSize,
2849 analysisInfo.analysis_recommended_res);
2850
2851 mAnalysisChannel = new QCamera3SupportChannel(
2852 mCameraHandle->camera_handle,
2853 mChannelHandle,
2854 mCameraHandle->ops,
2855 &analysisInfo.analysis_padding_info,
2856 analysisFeatureMask,
2857 CAM_STREAM_TYPE_ANALYSIS,
2858 &analysisDim,
2859 (analysisInfo.analysis_format
2860 == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
2861 : CAM_FORMAT_YUV_420_NV21),
2862 analysisInfo.hw_analysis_supported,
2863 gCamCapability[mCameraId]->color_arrangement,
2864 this,
2865 0); // force buffer count to 0
2866 } else {
2867 LOGW("getAnalysisInfo failed, ret = %d", ret);
2868 }
2869 if (!mAnalysisChannel) {
2870 LOGW("Analysis channel cannot be created");
2871 }
2872 }
2873
Thierry Strudel3d639192016-09-09 11:52:26 -07002874 //RAW DUMP channel
2875 if (mEnableRawDump && isRawStreamRequested == false){
2876 cam_dimension_t rawDumpSize;
2877 rawDumpSize = getMaxRawSize(mCameraId);
2878 cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2879 setPAAFSupport(rawDumpFeatureMask,
2880 CAM_STREAM_TYPE_RAW,
2881 gCamCapability[mCameraId]->color_arrangement);
2882 mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2883 mChannelHandle,
2884 mCameraHandle->ops,
2885 rawDumpSize,
2886 &padding_info,
2887 this, rawDumpFeatureMask);
2888 if (!mRawDumpChannel) {
2889 LOGE("Raw Dump channel cannot be created");
2890 pthread_mutex_unlock(&mMutex);
2891 return -ENOMEM;
2892 }
2893 }
2894
Thierry Strudel3d639192016-09-09 11:52:26 -07002895 if (mAnalysisChannel) {
2896 cam_analysis_info_t analysisInfo;
2897 memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2898 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2899 CAM_STREAM_TYPE_ANALYSIS;
2900 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2901 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002902 rc = mCommon.getAnalysisInfo(FALSE,
Thierry Strudel3d639192016-09-09 11:52:26 -07002903 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2904 &analysisInfo);
2905 if (rc != NO_ERROR) {
2906 LOGE("getAnalysisInfo failed, ret = %d", rc);
2907 pthread_mutex_unlock(&mMutex);
2908 return rc;
2909 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002910 cam_color_filter_arrangement_t analysis_color_arrangement =
2911 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2912 CAM_FILTER_ARRANGEMENT_Y :
2913 gCamCapability[mCameraId]->color_arrangement);
2914 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2915 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2916 analysis_color_arrangement);
2917
Thierry Strudel3d639192016-09-09 11:52:26 -07002918 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002919 mCommon.getMatchingDimension(previewSize,
2920 analysisInfo.analysis_recommended_res);
Thierry Strudel3d639192016-09-09 11:52:26 -07002921 mStreamConfigInfo.num_streams++;
2922 }
2923
Thierry Strudel2896d122017-02-23 19:18:03 -08002924 if (!onlyRaw && isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002925 cam_analysis_info_t supportInfo;
2926 memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
2927 cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2928 setPAAFSupport(callbackFeatureMask,
2929 CAM_STREAM_TYPE_CALLBACK,
2930 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002931 int32_t ret = NO_ERROR;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002932 ret = mCommon.getAnalysisInfo(FALSE, callbackFeatureMask, &supportInfo);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002933 if (ret != NO_ERROR) {
2934 /* Ignore the error for Mono camera
2935 * because the PAAF bit mask is only set
2936 * for CAM_STREAM_TYPE_ANALYSIS stream type
2937 */
2938 if (gCamCapability[mCameraId]->color_arrangement != CAM_FILTER_ARRANGEMENT_Y) {
2939 LOGW("getAnalysisInfo failed, ret = %d", ret);
2940 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002941 }
2942 mSupportChannel = new QCamera3SupportChannel(
2943 mCameraHandle->camera_handle,
2944 mChannelHandle,
2945 mCameraHandle->ops,
2946 &gCamCapability[mCameraId]->padding_info,
2947 callbackFeatureMask,
2948 CAM_STREAM_TYPE_CALLBACK,
2949 &QCamera3SupportChannel::kDim,
2950 CAM_FORMAT_YUV_420_NV21,
2951 supportInfo.hw_analysis_supported,
2952 gCamCapability[mCameraId]->color_arrangement,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002953 this, 0);
Thierry Strudel3d639192016-09-09 11:52:26 -07002954 if (!mSupportChannel) {
2955 LOGE("dummy channel cannot be created");
2956 pthread_mutex_unlock(&mMutex);
2957 return -ENOMEM;
2958 }
2959 }
2960
2961 if (mSupportChannel) {
2962 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2963 QCamera3SupportChannel::kDim;
2964 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2965 CAM_STREAM_TYPE_CALLBACK;
2966 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2967 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2968 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2969 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2970 gCamCapability[mCameraId]->color_arrangement);
2971 mStreamConfigInfo.num_streams++;
2972 }
2973
2974 if (mRawDumpChannel) {
2975 cam_dimension_t rawSize;
2976 rawSize = getMaxRawSize(mCameraId);
2977 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2978 rawSize;
2979 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2980 CAM_STREAM_TYPE_RAW;
2981 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2982 CAM_QCOM_FEATURE_NONE;
2983 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2984 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2985 gCamCapability[mCameraId]->color_arrangement);
2986 mStreamConfigInfo.num_streams++;
2987 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002988
2989 if (mHdrPlusRawSrcChannel) {
2990 cam_dimension_t rawSize;
2991 rawSize = getMaxRawSize(mCameraId);
2992 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] = rawSize;
2993 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2994 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2995 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2996 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2997 gCamCapability[mCameraId]->color_arrangement);
2998 mStreamConfigInfo.num_streams++;
2999 }
3000
Thierry Strudel3d639192016-09-09 11:52:26 -07003001 /* In HFR mode, if video stream is not added, create a dummy channel so that
3002 * ISP can create a batch mode even for preview only case. This channel is
3003 * never 'start'ed (no stream-on), it is only 'initialized' */
3004 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
3005 !m_bIsVideo) {
3006 cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
3007 setPAAFSupport(dummyFeatureMask,
3008 CAM_STREAM_TYPE_VIDEO,
3009 gCamCapability[mCameraId]->color_arrangement);
3010 mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
3011 mChannelHandle,
3012 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003013 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07003014 this,
3015 &mDummyBatchStream,
3016 CAM_STREAM_TYPE_VIDEO,
3017 dummyFeatureMask,
3018 mMetadataChannel);
3019 if (NULL == mDummyBatchChannel) {
3020 LOGE("creation of mDummyBatchChannel failed."
3021 "Preview will use non-hfr sensor mode ");
3022 }
3023 }
3024 if (mDummyBatchChannel) {
3025 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
3026 mDummyBatchStream.width;
3027 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
3028 mDummyBatchStream.height;
3029 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
3030 CAM_STREAM_TYPE_VIDEO;
3031 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
3032 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
3033 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
3034 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
3035 gCamCapability[mCameraId]->color_arrangement);
3036 mStreamConfigInfo.num_streams++;
3037 }
3038
3039 mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
3040 mStreamConfigInfo.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08003041 m_bIs4KVideo ? 0 :
Jason Leea46ad5e2017-07-07 15:20:56 -07003042 m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07003043
3044 /* Initialize mPendingRequestInfo and mPendingBuffersMap */
3045 for (pendingRequestIterator i = mPendingRequestsList.begin();
3046 i != mPendingRequestsList.end();) {
3047 i = erasePendingRequest(i);
3048 }
3049 mPendingFrameDropList.clear();
3050 // Initialize/Reset the pending buffers list
3051 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
3052 req.mPendingBufferList.clear();
3053 }
3054 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Emilian Peev30522a12017-08-03 14:36:33 +01003055 mExpectedInflightDuration = 0;
3056 mExpectedFrameDuration = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07003057
Thierry Strudel3d639192016-09-09 11:52:26 -07003058 mCurJpegMeta.clear();
3059 //Get min frame duration for this streams configuration
3060 deriveMinFrameDuration();
3061
Chien-Yu Chenee335912017-02-09 17:53:20 -08003062 mFirstPreviewIntentSeen = false;
3063
3064 // Disable HRD+ if it's enabled;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07003065 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07003066 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
3067 finishHdrPlusClientOpeningLocked(l);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07003068 disableHdrPlusModeLocked();
3069 }
Chien-Yu Chenee335912017-02-09 17:53:20 -08003070
Thierry Strudel3d639192016-09-09 11:52:26 -07003071 // Update state
3072 mState = CONFIGURED;
3073
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003074 mFirstMetadataCallback = true;
3075
Thierry Strudel3d639192016-09-09 11:52:26 -07003076 pthread_mutex_unlock(&mMutex);
3077
3078 return rc;
3079}
3080
3081/*===========================================================================
3082 * FUNCTION : validateCaptureRequest
3083 *
3084 * DESCRIPTION: validate a capture request from camera service
3085 *
3086 * PARAMETERS :
3087 * @request : request from framework to process
3088 *
3089 * RETURN :
3090 *
3091 *==========================================================================*/
3092int QCamera3HardwareInterface::validateCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003093 camera3_capture_request_t *request,
3094 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07003095{
3096 ssize_t idx = 0;
3097 const camera3_stream_buffer_t *b;
3098 CameraMetadata meta;
3099
3100 /* Sanity check the request */
3101 if (request == NULL) {
3102 LOGE("NULL capture request");
3103 return BAD_VALUE;
3104 }
3105
3106 if ((request->settings == NULL) && (mState == CONFIGURED)) {
3107 /*settings cannot be null for the first request*/
3108 return BAD_VALUE;
3109 }
3110
3111 uint32_t frameNumber = request->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003112 if ((request->num_output_buffers < 1 || request->output_buffers == NULL)
3113 && (internallyRequestedStreams.size() == 0)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003114 LOGE("Request %d: No output buffers provided!",
3115 __FUNCTION__, frameNumber);
3116 return BAD_VALUE;
3117 }
3118 if (request->num_output_buffers >= MAX_NUM_STREAMS) {
3119 LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
3120 request->num_output_buffers, MAX_NUM_STREAMS);
3121 return BAD_VALUE;
3122 }
3123 if (request->input_buffer != NULL) {
3124 b = request->input_buffer;
3125 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3126 LOGE("Request %d: Buffer %ld: Status not OK!",
3127 frameNumber, (long)idx);
3128 return BAD_VALUE;
3129 }
3130 if (b->release_fence != -1) {
3131 LOGE("Request %d: Buffer %ld: Has a release fence!",
3132 frameNumber, (long)idx);
3133 return BAD_VALUE;
3134 }
3135 if (b->buffer == NULL) {
3136 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3137 frameNumber, (long)idx);
3138 return BAD_VALUE;
3139 }
3140 }
3141
3142 // Validate all buffers
3143 b = request->output_buffers;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003144 if (b == NULL) {
3145 return BAD_VALUE;
3146 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003147 while (idx < (ssize_t)request->num_output_buffers) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003148 QCamera3ProcessingChannel *channel =
3149 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
3150 if (channel == NULL) {
3151 LOGE("Request %d: Buffer %ld: Unconfigured stream!",
3152 frameNumber, (long)idx);
3153 return BAD_VALUE;
3154 }
3155 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3156 LOGE("Request %d: Buffer %ld: Status not OK!",
3157 frameNumber, (long)idx);
3158 return BAD_VALUE;
3159 }
3160 if (b->release_fence != -1) {
3161 LOGE("Request %d: Buffer %ld: Has a release fence!",
3162 frameNumber, (long)idx);
3163 return BAD_VALUE;
3164 }
3165 if (b->buffer == NULL) {
3166 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3167 frameNumber, (long)idx);
3168 return BAD_VALUE;
3169 }
3170 if (*(b->buffer) == NULL) {
3171 LOGE("Request %d: Buffer %ld: NULL private handle!",
3172 frameNumber, (long)idx);
3173 return BAD_VALUE;
3174 }
3175 idx++;
3176 b = request->output_buffers + idx;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003177 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003178 return NO_ERROR;
3179}
3180
3181/*===========================================================================
3182 * FUNCTION : deriveMinFrameDuration
3183 *
3184 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
3185 * on currently configured streams.
3186 *
3187 * PARAMETERS : NONE
3188 *
3189 * RETURN : NONE
3190 *
3191 *==========================================================================*/
3192void QCamera3HardwareInterface::deriveMinFrameDuration()
3193{
3194 int32_t maxJpegDim, maxProcessedDim, maxRawDim;
Jason Lee2d0ab112017-06-21 18:03:05 -07003195 bool hasRaw = false;
3196
3197 mMinRawFrameDuration = 0;
3198 mMinJpegFrameDuration = 0;
3199 mMinProcessedFrameDuration = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07003200
3201 maxJpegDim = 0;
3202 maxProcessedDim = 0;
3203 maxRawDim = 0;
3204
3205 // Figure out maximum jpeg, processed, and raw dimensions
3206 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
3207 it != mStreamInfo.end(); it++) {
3208
3209 // Input stream doesn't have valid stream_type
3210 if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
3211 continue;
3212
3213 int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
3214 if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3215 if (dimension > maxJpegDim)
3216 maxJpegDim = dimension;
3217 } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3218 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3219 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
Jason Lee2d0ab112017-06-21 18:03:05 -07003220 hasRaw = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07003221 if (dimension > maxRawDim)
3222 maxRawDim = dimension;
3223 } else {
3224 if (dimension > maxProcessedDim)
3225 maxProcessedDim = dimension;
3226 }
3227 }
3228
3229 size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
3230 MAX_SIZES_CNT);
3231
3232 //Assume all jpeg dimensions are in processed dimensions.
3233 if (maxJpegDim > maxProcessedDim)
3234 maxProcessedDim = maxJpegDim;
3235 //Find the smallest raw dimension that is greater or equal to jpeg dimension
Jason Lee2d0ab112017-06-21 18:03:05 -07003236 if (hasRaw && maxProcessedDim > maxRawDim) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003237 maxRawDim = INT32_MAX;
3238
3239 for (size_t i = 0; i < count; i++) {
3240 int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
3241 gCamCapability[mCameraId]->raw_dim[i].height;
3242 if (dimension >= maxProcessedDim && dimension < maxRawDim)
3243 maxRawDim = dimension;
3244 }
3245 }
3246
3247 //Find minimum durations for processed, jpeg, and raw
3248 for (size_t i = 0; i < count; i++) {
3249 if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
3250 gCamCapability[mCameraId]->raw_dim[i].height) {
3251 mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
3252 break;
3253 }
3254 }
3255 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
3256 for (size_t i = 0; i < count; i++) {
3257 if (maxProcessedDim ==
3258 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
3259 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
3260 mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3261 mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3262 break;
3263 }
3264 }
3265}
3266
3267/*===========================================================================
3268 * FUNCTION : getMinFrameDuration
3269 *
3270 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
3271 * and current request configuration.
3272 *
3273 * PARAMETERS : @request: requset sent by the frameworks
3274 *
3275 * RETURN : min farme duration for a particular request
3276 *
3277 *==========================================================================*/
3278int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
3279{
3280 bool hasJpegStream = false;
3281 bool hasRawStream = false;
3282 for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
3283 const camera3_stream_t *stream = request->output_buffers[i].stream;
3284 if (stream->format == HAL_PIXEL_FORMAT_BLOB)
3285 hasJpegStream = true;
3286 else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3287 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3288 stream->format == HAL_PIXEL_FORMAT_RAW16)
3289 hasRawStream = true;
3290 }
3291
3292 if (!hasJpegStream)
3293 return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
3294 else
3295 return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
3296}
3297
3298/*===========================================================================
3299 * FUNCTION : handleBuffersDuringFlushLock
3300 *
3301 * DESCRIPTION: Account for buffers returned from back-end during flush
3302 * This function is executed while mMutex is held by the caller.
3303 *
3304 * PARAMETERS :
3305 * @buffer: image buffer for the callback
3306 *
3307 * RETURN :
3308 *==========================================================================*/
3309void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
3310{
3311 bool buffer_found = false;
3312 for (List<PendingBuffersInRequest>::iterator req =
3313 mPendingBuffersMap.mPendingBuffersInRequest.begin();
3314 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
3315 for (List<PendingBufferInfo>::iterator i =
3316 req->mPendingBufferList.begin();
3317 i != req->mPendingBufferList.end(); i++) {
3318 if (i->buffer == buffer->buffer) {
3319 mPendingBuffersMap.numPendingBufsAtFlush--;
3320 LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
3321 buffer->buffer, req->frame_number,
3322 mPendingBuffersMap.numPendingBufsAtFlush);
3323 buffer_found = true;
3324 break;
3325 }
3326 }
3327 if (buffer_found) {
3328 break;
3329 }
3330 }
3331 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
3332 //signal the flush()
3333 LOGD("All buffers returned to HAL. Continue flush");
3334 pthread_cond_signal(&mBuffersCond);
3335 }
3336}
3337
Thierry Strudel3d639192016-09-09 11:52:26 -07003338/*===========================================================================
3339 * FUNCTION : handleBatchMetadata
3340 *
3341 * DESCRIPTION: Handles metadata buffer callback in batch mode
3342 *
3343 * PARAMETERS : @metadata_buf: metadata buffer
3344 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3345 * the meta buf in this method
3346 *
3347 * RETURN :
3348 *
3349 *==========================================================================*/
3350void QCamera3HardwareInterface::handleBatchMetadata(
3351 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
3352{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003353 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BATCH_METADATA);
Thierry Strudel3d639192016-09-09 11:52:26 -07003354
3355 if (NULL == metadata_buf) {
3356 LOGE("metadata_buf is NULL");
3357 return;
3358 }
3359 /* In batch mode, the metdata will contain the frame number and timestamp of
3360 * the last frame in the batch. Eg: a batch containing buffers from request
3361 * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
3362 * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
3363 * multiple process_capture_results */
3364 metadata_buffer_t *metadata =
3365 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3366 int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
3367 uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
3368 uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
3369 uint32_t frame_number = 0, urgent_frame_number = 0;
3370 int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
3371 bool invalid_metadata = false;
3372 size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
3373 size_t loopCount = 1;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003374 bool is_metabuf_queued = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07003375
3376 int32_t *p_frame_number_valid =
3377 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3378 uint32_t *p_frame_number =
3379 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3380 int64_t *p_capture_time =
3381 POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3382 int32_t *p_urgent_frame_number_valid =
3383 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3384 uint32_t *p_urgent_frame_number =
3385 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3386
3387 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
3388 (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
3389 (NULL == p_urgent_frame_number)) {
3390 LOGE("Invalid metadata");
3391 invalid_metadata = true;
3392 } else {
3393 frame_number_valid = *p_frame_number_valid;
3394 last_frame_number = *p_frame_number;
3395 last_frame_capture_time = *p_capture_time;
3396 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3397 last_urgent_frame_number = *p_urgent_frame_number;
3398 }
3399
3400 /* In batchmode, when no video buffers are requested, set_parms are sent
3401 * for every capture_request. The difference between consecutive urgent
3402 * frame numbers and frame numbers should be used to interpolate the
3403 * corresponding frame numbers and time stamps */
3404 pthread_mutex_lock(&mMutex);
3405 if (urgent_frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003406 ssize_t idx = mPendingBatchMap.indexOfKey(last_urgent_frame_number);
3407 if(idx < 0) {
3408 LOGE("Invalid urgent frame number received: %d. Irrecoverable error",
3409 last_urgent_frame_number);
3410 mState = ERROR;
3411 pthread_mutex_unlock(&mMutex);
3412 return;
3413 }
3414 first_urgent_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003415 urgentFrameNumDiff = last_urgent_frame_number + 1 -
3416 first_urgent_frame_number;
3417
3418 LOGD("urgent_frm: valid: %d frm_num: %d - %d",
3419 urgent_frame_number_valid,
3420 first_urgent_frame_number, last_urgent_frame_number);
3421 }
3422
3423 if (frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003424 ssize_t idx = mPendingBatchMap.indexOfKey(last_frame_number);
3425 if(idx < 0) {
3426 LOGE("Invalid frame number received: %d. Irrecoverable error",
3427 last_frame_number);
3428 mState = ERROR;
3429 pthread_mutex_unlock(&mMutex);
3430 return;
3431 }
3432 first_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003433 frameNumDiff = last_frame_number + 1 -
3434 first_frame_number;
3435 mPendingBatchMap.removeItem(last_frame_number);
3436
3437 LOGD("frm: valid: %d frm_num: %d - %d",
3438 frame_number_valid,
3439 first_frame_number, last_frame_number);
3440
3441 }
3442 pthread_mutex_unlock(&mMutex);
3443
3444 if (urgent_frame_number_valid || frame_number_valid) {
3445 loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
3446 if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
3447 LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
3448 urgentFrameNumDiff, last_urgent_frame_number);
3449 if (frameNumDiff > MAX_HFR_BATCH_SIZE)
3450 LOGE("frameNumDiff: %d frameNum: %d",
3451 frameNumDiff, last_frame_number);
3452 }
3453
3454 for (size_t i = 0; i < loopCount; i++) {
3455 /* handleMetadataWithLock is called even for invalid_metadata for
3456 * pipeline depth calculation */
3457 if (!invalid_metadata) {
3458 /* Infer frame number. Batch metadata contains frame number of the
3459 * last frame */
3460 if (urgent_frame_number_valid) {
3461 if (i < urgentFrameNumDiff) {
3462 urgent_frame_number =
3463 first_urgent_frame_number + i;
3464 LOGD("inferred urgent frame_number: %d",
3465 urgent_frame_number);
3466 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3467 CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
3468 } else {
3469 /* This is to handle when urgentFrameNumDiff < frameNumDiff */
3470 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3471 CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
3472 }
3473 }
3474
3475 /* Infer frame number. Batch metadata contains frame number of the
3476 * last frame */
3477 if (frame_number_valid) {
3478 if (i < frameNumDiff) {
3479 frame_number = first_frame_number + i;
3480 LOGD("inferred frame_number: %d", frame_number);
3481 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3482 CAM_INTF_META_FRAME_NUMBER, frame_number);
3483 } else {
3484 /* This is to handle when urgentFrameNumDiff > frameNumDiff */
3485 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3486 CAM_INTF_META_FRAME_NUMBER_VALID, 0);
3487 }
3488 }
3489
3490 if (last_frame_capture_time) {
3491 //Infer timestamp
3492 first_frame_capture_time = last_frame_capture_time -
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003493 (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003494 capture_time =
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003495 first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003496 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3497 CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
3498 LOGD("batch capture_time: %lld, capture_time: %lld",
3499 last_frame_capture_time, capture_time);
3500 }
3501 }
3502 pthread_mutex_lock(&mMutex);
3503 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003504 false /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003505 (i == urgentFrameNumDiff-1), /* last urgent metadata in the batch */
3506 (i == frameNumDiff-1), /* last metadata in the batch metadata */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003507 &is_metabuf_queued /* if metabuf isqueued or not */);
Thierry Strudel3d639192016-09-09 11:52:26 -07003508 pthread_mutex_unlock(&mMutex);
3509 }
3510
3511 /* BufDone metadata buffer */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003512 if (free_and_bufdone_meta_buf && !is_metabuf_queued) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003513 mMetadataChannel->bufDone(metadata_buf);
3514 free(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003515 metadata_buf = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -07003516 }
3517}
3518
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003519void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
3520 camera3_error_msg_code_t errorCode)
3521{
3522 camera3_notify_msg_t notify_msg;
3523 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3524 notify_msg.type = CAMERA3_MSG_ERROR;
3525 notify_msg.message.error.error_code = errorCode;
3526 notify_msg.message.error.error_stream = NULL;
3527 notify_msg.message.error.frame_number = frameNumber;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003528 orchestrateNotify(&notify_msg);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003529
3530 return;
3531}
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003532
3533/*===========================================================================
3534 * FUNCTION : sendPartialMetadataWithLock
3535 *
3536 * DESCRIPTION: Send partial capture result callback with mMutex lock held.
3537 *
3538 * PARAMETERS : @metadata: metadata buffer
3539 * @requestIter: The iterator for the pending capture request for
3540 * which the partial result is being sen
3541 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3542 * last urgent metadata in a batch. Always true for non-batch mode
Shuzhen Wang485e2442017-08-02 12:21:08 -07003543 * @isJumpstartMetadata: Whether this is a partial metadata for
3544 * jumpstart, i.e. even though it doesn't map to a valid partial
3545 * frame number, its metadata entries should be kept.
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003546 *
3547 * RETURN :
3548 *
3549 *==========================================================================*/
3550
3551void QCamera3HardwareInterface::sendPartialMetadataWithLock(
3552 metadata_buffer_t *metadata,
3553 const pendingRequestIterator requestIter,
Shuzhen Wang485e2442017-08-02 12:21:08 -07003554 bool lastUrgentMetadataInBatch,
3555 bool isJumpstartMetadata)
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003556{
3557 camera3_capture_result_t result;
3558 memset(&result, 0, sizeof(camera3_capture_result_t));
3559
3560 requestIter->partial_result_cnt++;
3561
3562 // Extract 3A metadata
3563 result.result = translateCbUrgentMetadataToResultMetadata(
Shuzhen Wang485e2442017-08-02 12:21:08 -07003564 metadata, lastUrgentMetadataInBatch, requestIter->frame_number,
3565 isJumpstartMetadata);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003566 // Populate metadata result
3567 result.frame_number = requestIter->frame_number;
3568 result.num_output_buffers = 0;
3569 result.output_buffers = NULL;
3570 result.partial_result = requestIter->partial_result_cnt;
3571
3572 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07003573 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003574 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
3575 // Notify HDR+ client about the partial metadata.
3576 gHdrPlusClient->notifyFrameMetadata(result.frame_number, *result.result,
3577 result.partial_result == PARTIAL_RESULT_COUNT);
3578 }
3579 }
3580
3581 orchestrateResult(&result);
3582 LOGD("urgent frame_number = %u", result.frame_number);
3583 free_camera_metadata((camera_metadata_t *)result.result);
3584}
3585
Thierry Strudel3d639192016-09-09 11:52:26 -07003586/*===========================================================================
3587 * FUNCTION : handleMetadataWithLock
3588 *
3589 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
3590 *
3591 * PARAMETERS : @metadata_buf: metadata buffer
3592 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3593 * the meta buf in this method
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003594 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3595 * last urgent metadata in a batch. Always true for non-batch mode
3596 * @lastMetadataInBatch: Boolean to indicate whether this is the
3597 * last metadata in a batch. Always true for non-batch mode
Thierry Strudel54dc9782017-02-15 12:12:10 -08003598 * @p_is_metabuf_queued: Pointer to Boolean to check if metadata
3599 * buffer is enqueued or not.
Thierry Strudel3d639192016-09-09 11:52:26 -07003600 *
3601 * RETURN :
3602 *
3603 *==========================================================================*/
3604void QCamera3HardwareInterface::handleMetadataWithLock(
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003605 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003606 bool lastUrgentMetadataInBatch, bool lastMetadataInBatch,
3607 bool *p_is_metabuf_queued)
Thierry Strudel3d639192016-09-09 11:52:26 -07003608{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003609 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_METADATA_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003610 if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
3611 //during flush do not send metadata from this thread
3612 LOGD("not sending metadata during flush or when mState is error");
3613 if (free_and_bufdone_meta_buf) {
3614 mMetadataChannel->bufDone(metadata_buf);
3615 free(metadata_buf);
3616 }
3617 return;
3618 }
3619
3620 //not in flush
3621 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3622 int32_t frame_number_valid, urgent_frame_number_valid;
3623 uint32_t frame_number, urgent_frame_number;
Jason Lee603176d2017-05-31 11:43:27 -07003624 int64_t capture_time, capture_time_av;
Thierry Strudel3d639192016-09-09 11:52:26 -07003625 nsecs_t currentSysTime;
3626
3627 int32_t *p_frame_number_valid =
3628 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3629 uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3630 int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
Jason Lee603176d2017-05-31 11:43:27 -07003631 int64_t *p_capture_time_av = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP_AV, metadata);
Thierry Strudel3d639192016-09-09 11:52:26 -07003632 int32_t *p_urgent_frame_number_valid =
3633 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3634 uint32_t *p_urgent_frame_number =
3635 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3636 IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
3637 metadata) {
3638 LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
3639 *p_frame_number_valid, *p_frame_number);
3640 }
3641
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003642 camera_metadata_t *resultMetadata = nullptr;
3643
Thierry Strudel3d639192016-09-09 11:52:26 -07003644 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
3645 (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
3646 LOGE("Invalid metadata");
3647 if (free_and_bufdone_meta_buf) {
3648 mMetadataChannel->bufDone(metadata_buf);
3649 free(metadata_buf);
3650 }
3651 goto done_metadata;
3652 }
3653 frame_number_valid = *p_frame_number_valid;
3654 frame_number = *p_frame_number;
3655 capture_time = *p_capture_time;
Jason Lee603176d2017-05-31 11:43:27 -07003656 capture_time_av = *p_capture_time_av;
Thierry Strudel3d639192016-09-09 11:52:26 -07003657 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3658 urgent_frame_number = *p_urgent_frame_number;
3659 currentSysTime = systemTime(CLOCK_MONOTONIC);
3660
Jason Lee603176d2017-05-31 11:43:27 -07003661 if (!gCamCapability[mCameraId]->timestamp_calibrated) {
3662 const int tries = 3;
3663 nsecs_t bestGap, measured;
3664 for (int i = 0; i < tries; ++i) {
3665 const nsecs_t tmono = systemTime(SYSTEM_TIME_MONOTONIC);
3666 const nsecs_t tbase = systemTime(SYSTEM_TIME_BOOTTIME);
3667 const nsecs_t tmono2 = systemTime(SYSTEM_TIME_MONOTONIC);
3668 const nsecs_t gap = tmono2 - tmono;
3669 if (i == 0 || gap < bestGap) {
3670 bestGap = gap;
3671 measured = tbase - ((tmono + tmono2) >> 1);
3672 }
3673 }
3674 capture_time -= measured;
3675 }
3676
Thierry Strudel3d639192016-09-09 11:52:26 -07003677 // Detect if buffers from any requests are overdue
3678 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003679 int64_t timeout;
3680 {
3681 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
3682 // If there is a pending HDR+ request, the following requests may be blocked until the
3683 // HDR+ request is done. So allow a longer timeout.
3684 timeout = (mHdrPlusPendingRequests.size() > 0) ?
3685 MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT : MISSING_REQUEST_BUF_TIMEOUT;
Emilian Peev30522a12017-08-03 14:36:33 +01003686 if (timeout < mExpectedInflightDuration) {
3687 timeout = mExpectedInflightDuration;
3688 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003689 }
3690
3691 if ( (currentSysTime - req.timestamp) > s2ns(timeout) ) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003692 for (auto &missed : req.mPendingBufferList) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003693 assert(missed.stream->priv);
3694 if (missed.stream->priv) {
3695 QCamera3Channel *ch = (QCamera3Channel *)(missed.stream->priv);
3696 assert(ch->mStreams[0]);
3697 if (ch->mStreams[0]) {
3698 LOGE("Cancel missing frame = %d, buffer = %p,"
3699 "stream type = %d, stream format = %d",
3700 req.frame_number, missed.buffer,
3701 ch->mStreams[0]->getMyType(), missed.stream->format);
3702 ch->timeoutFrame(req.frame_number);
3703 }
3704 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003705 }
3706 }
3707 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003708 //For the very first metadata callback, regardless whether it contains valid
3709 //frame number, send the partial metadata for the jumpstarting requests.
3710 //Note that this has to be done even if the metadata doesn't contain valid
3711 //urgent frame number, because in the case only 1 request is ever submitted
3712 //to HAL, there won't be subsequent valid urgent frame number.
3713 if (mFirstMetadataCallback) {
3714 for (pendingRequestIterator i =
3715 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3716 if (i->bUseFirstPartial) {
Shuzhen Wang485e2442017-08-02 12:21:08 -07003717 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch,
3718 true /*isJumpstartMetadata*/);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003719 }
3720 }
3721 mFirstMetadataCallback = false;
3722 }
3723
Thierry Strudel3d639192016-09-09 11:52:26 -07003724 //Partial result on process_capture_result for timestamp
3725 if (urgent_frame_number_valid) {
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003726 LOGD("valid urgent frame_number = %u", urgent_frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003727
3728 //Recieved an urgent Frame Number, handle it
3729 //using partial results
3730 for (pendingRequestIterator i =
3731 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3732 LOGD("Iterator Frame = %d urgent frame = %d",
3733 i->frame_number, urgent_frame_number);
3734
Chien-Yu Chen29fd1d72017-04-27 18:42:09 -07003735 if ((!i->input_buffer) && (!i->hdrplus) && (i->frame_number < urgent_frame_number) &&
Thierry Strudel3d639192016-09-09 11:52:26 -07003736 (i->partial_result_cnt == 0)) {
3737 LOGE("Error: HAL missed urgent metadata for frame number %d",
3738 i->frame_number);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07003739 i->partial_result_cnt++;
Thierry Strudel3d639192016-09-09 11:52:26 -07003740 }
3741
3742 if (i->frame_number == urgent_frame_number &&
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003743 i->partial_result_cnt == 0) {
Shuzhen Wang485e2442017-08-02 12:21:08 -07003744 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch,
3745 false /*isJumpstartMetadata*/);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003746 if (mResetInstantAEC && mInstantAECSettledFrameNumber == 0) {
3747 // Instant AEC settled for this frame.
3748 LOGH("instant AEC settled for frame number %d", urgent_frame_number);
3749 mInstantAECSettledFrameNumber = urgent_frame_number;
3750 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003751 break;
3752 }
3753 }
3754 }
3755
3756 if (!frame_number_valid) {
3757 LOGD("Not a valid normal frame number, used as SOF only");
3758 if (free_and_bufdone_meta_buf) {
3759 mMetadataChannel->bufDone(metadata_buf);
3760 free(metadata_buf);
3761 }
3762 goto done_metadata;
3763 }
3764 LOGH("valid frame_number = %u, capture_time = %lld",
3765 frame_number, capture_time);
3766
Emilian Peev4e0fe952017-06-30 12:40:09 -07003767 handleDepthDataLocked(metadata->depth_data, frame_number,
3768 metadata->is_depth_data_valid);
Emilian Peev7650c122017-01-19 08:24:33 -08003769
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003770 // Check whether any stream buffer corresponding to this is dropped or not
3771 // If dropped, then send the ERROR_BUFFER for the corresponding stream
3772 // OR check if instant AEC is enabled, then need to drop frames untill AEC is settled.
3773 for (auto & pendingRequest : mPendingRequestsList) {
3774 if (p_cam_frame_drop || (mInstantAEC || pendingRequest.frame_number <
3775 mInstantAECSettledFrameNumber)) {
3776 camera3_notify_msg_t notify_msg = {};
3777 for (auto & buffer : pendingRequest.buffers) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003778 bool dropFrame = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003779 QCamera3ProcessingChannel *channel =
3780 (QCamera3ProcessingChannel *)buffer.stream->priv;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003781 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003782 if (p_cam_frame_drop) {
3783 for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003784 if (streamID == p_cam_frame_drop->stream_request[k].streamID) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003785 // Got the stream ID for drop frame.
3786 dropFrame = true;
3787 break;
3788 }
3789 }
3790 } else {
3791 // This is instant AEC case.
3792 // For instant AEC drop the stream untill AEC is settled.
3793 dropFrame = true;
3794 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003795
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003796 if (dropFrame) {
3797 // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
3798 if (p_cam_frame_drop) {
3799 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003800 LOGE("Start of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003801 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003802 } else {
3803 // For instant AEC, inform frame drop and frame number
3804 LOGH("Start of reporting error frame#=%u for instant AEC, streamID=%u, "
3805 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003806 pendingRequest.frame_number, streamID,
3807 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003808 }
3809 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003810 notify_msg.message.error.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003811 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003812 notify_msg.message.error.error_stream = buffer.stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003813 orchestrateNotify(&notify_msg);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003814 if (p_cam_frame_drop) {
3815 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003816 LOGE("End of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003817 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003818 } else {
3819 // For instant AEC, inform frame drop and frame number
3820 LOGH("End of reporting error frame#=%u for instant AEC, streamID=%u, "
3821 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003822 pendingRequest.frame_number, streamID,
3823 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003824 }
3825 PendingFrameDropInfo PendingFrameDrop;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003826 PendingFrameDrop.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003827 PendingFrameDrop.stream_ID = streamID;
3828 // Add the Frame drop info to mPendingFrameDropList
3829 mPendingFrameDropList.push_back(PendingFrameDrop);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003830 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003831 }
3832 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003833 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003834
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003835 for (auto & pendingRequest : mPendingRequestsList) {
3836 // Find the pending request with the frame number.
3837 if (pendingRequest.frame_number == frame_number) {
3838 // Update the sensor timestamp.
3839 pendingRequest.timestamp = capture_time;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003840
Thierry Strudel3d639192016-09-09 11:52:26 -07003841
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003842 /* Set the timestamp in display metadata so that clients aware of
3843 private_handle such as VT can use this un-modified timestamps.
3844 Camera framework is unaware of this timestamp and cannot change this */
Jason Lee603176d2017-05-31 11:43:27 -07003845 updateTimeStampInPendingBuffers(pendingRequest.frame_number, capture_time_av);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003846
Thierry Strudel3d639192016-09-09 11:52:26 -07003847 // Find channel requiring metadata, meaning internal offline postprocess
3848 // is needed.
3849 //TODO: for now, we don't support two streams requiring metadata at the same time.
3850 // (because we are not making copies, and metadata buffer is not reference counted.
3851 bool internalPproc = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003852 for (pendingBufferIterator iter = pendingRequest.buffers.begin();
3853 iter != pendingRequest.buffers.end(); iter++) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003854 if (iter->need_metadata) {
3855 internalPproc = true;
3856 QCamera3ProcessingChannel *channel =
3857 (QCamera3ProcessingChannel *)iter->stream->priv;
3858 channel->queueReprocMetadata(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003859 if(p_is_metabuf_queued != NULL) {
3860 *p_is_metabuf_queued = true;
3861 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003862 break;
3863 }
3864 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003865 for (auto itr = pendingRequest.internalRequestList.begin();
3866 itr != pendingRequest.internalRequestList.end(); itr++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003867 if (itr->need_metadata) {
3868 internalPproc = true;
3869 QCamera3ProcessingChannel *channel =
3870 (QCamera3ProcessingChannel *)itr->stream->priv;
3871 channel->queueReprocMetadata(metadata_buf);
3872 break;
3873 }
3874 }
3875
Thierry Strudel54dc9782017-02-15 12:12:10 -08003876 saveExifParams(metadata);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003877
3878 bool *enableZsl = nullptr;
3879 if (gExposeEnableZslKey) {
3880 enableZsl = &pendingRequest.enableZsl;
3881 }
3882
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003883 resultMetadata = translateFromHalMetadata(metadata,
Shuzhen Wang181c57b2017-07-21 11:39:44 -07003884 pendingRequest, internalPproc,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003885 lastMetadataInBatch, enableZsl);
Thierry Strudel3d639192016-09-09 11:52:26 -07003886
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003887 updateFpsInPreviewBuffer(metadata, pendingRequest.frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003888
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003889 if (pendingRequest.blob_request) {
3890 //Dump tuning metadata if enabled and available
3891 char prop[PROPERTY_VALUE_MAX];
3892 memset(prop, 0, sizeof(prop));
3893 property_get("persist.camera.dumpmetadata", prop, "0");
3894 int32_t enabled = atoi(prop);
3895 if (enabled && metadata->is_tuning_params_valid) {
3896 dumpMetadataToFile(metadata->tuning_params,
3897 mMetaFrameCount,
3898 enabled,
3899 "Snapshot",
3900 frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003901 }
3902 }
3903
3904 if (!internalPproc) {
3905 LOGD("couldn't find need_metadata for this metadata");
3906 // Return metadata buffer
3907 if (free_and_bufdone_meta_buf) {
3908 mMetadataChannel->bufDone(metadata_buf);
3909 free(metadata_buf);
3910 }
3911 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003912
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003913 break;
Thierry Strudel3d639192016-09-09 11:52:26 -07003914 }
3915 }
3916
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003917 mShutterDispatcher.markShutterReady(frame_number, capture_time);
3918
3919 // Try to send out capture result metadata.
3920 handlePendingResultMetadataWithLock(frame_number, resultMetadata);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003921 return;
3922
Thierry Strudel3d639192016-09-09 11:52:26 -07003923done_metadata:
3924 for (pendingRequestIterator i = mPendingRequestsList.begin();
3925 i != mPendingRequestsList.end() ;i++) {
3926 i->pipeline_depth++;
3927 }
3928 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3929 unblockRequestIfNecessary();
3930}
3931
3932/*===========================================================================
Emilian Peev7650c122017-01-19 08:24:33 -08003933 * FUNCTION : handleDepthDataWithLock
3934 *
3935 * DESCRIPTION: Handles incoming depth data
3936 *
3937 * PARAMETERS : @depthData : Depth data
3938 * @frameNumber: Frame number of the incoming depth data
Emilian Peev4e0fe952017-06-30 12:40:09 -07003939 * @valid : Valid flag for the incoming data
Emilian Peev7650c122017-01-19 08:24:33 -08003940 *
3941 * RETURN :
3942 *
3943 *==========================================================================*/
3944void QCamera3HardwareInterface::handleDepthDataLocked(
Emilian Peev4e0fe952017-06-30 12:40:09 -07003945 const cam_depth_data_t &depthData, uint32_t frameNumber, uint8_t valid) {
Emilian Peev7650c122017-01-19 08:24:33 -08003946 uint32_t currentFrameNumber;
3947 buffer_handle_t *depthBuffer;
3948
3949 if (nullptr == mDepthChannel) {
Emilian Peev7650c122017-01-19 08:24:33 -08003950 return;
3951 }
3952
3953 camera3_stream_buffer_t resultBuffer =
3954 {.acquire_fence = -1,
3955 .release_fence = -1,
3956 .status = CAMERA3_BUFFER_STATUS_OK,
3957 .buffer = nullptr,
3958 .stream = mDepthChannel->getStream()};
Emilian Peev7650c122017-01-19 08:24:33 -08003959 do {
3960 depthBuffer = mDepthChannel->getOldestFrame(currentFrameNumber);
3961 if (nullptr == depthBuffer) {
3962 break;
3963 }
3964
Emilian Peev7650c122017-01-19 08:24:33 -08003965 resultBuffer.buffer = depthBuffer;
3966 if (currentFrameNumber == frameNumber) {
Emilian Peev4e0fe952017-06-30 12:40:09 -07003967 if (valid) {
3968 int32_t rc = mDepthChannel->populateDepthData(depthData,
3969 frameNumber);
3970 if (NO_ERROR != rc) {
3971 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3972 } else {
3973 resultBuffer.status = CAMERA3_BUFFER_STATUS_OK;
3974 }
Emilian Peev7650c122017-01-19 08:24:33 -08003975 } else {
Emilian Peev4e0fe952017-06-30 12:40:09 -07003976 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
Emilian Peev7650c122017-01-19 08:24:33 -08003977 }
3978 } else if (currentFrameNumber > frameNumber) {
3979 break;
3980 } else {
3981 camera3_notify_msg_t notify_msg = {.type = CAMERA3_MSG_ERROR,
3982 {{currentFrameNumber, mDepthChannel->getStream(),
3983 CAMERA3_MSG_ERROR_BUFFER}}};
3984 orchestrateNotify(&notify_msg);
3985
3986 LOGE("Depth buffer for frame number: %d is missing "
3987 "returning back!", currentFrameNumber);
3988 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3989 }
3990 mDepthChannel->unmapBuffer(currentFrameNumber);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003991 mOutputBufferDispatcher.markBufferReady(currentFrameNumber, resultBuffer);
Emilian Peev7650c122017-01-19 08:24:33 -08003992 } while (currentFrameNumber < frameNumber);
3993}
3994
3995/*===========================================================================
3996 * FUNCTION : notifyErrorFoPendingDepthData
3997 *
3998 * DESCRIPTION: Returns error for any pending depth buffers
3999 *
4000 * PARAMETERS : depthCh - depth channel that needs to get flushed
4001 *
4002 * RETURN :
4003 *
4004 *==========================================================================*/
4005void QCamera3HardwareInterface::notifyErrorFoPendingDepthData(
4006 QCamera3DepthChannel *depthCh) {
4007 uint32_t currentFrameNumber;
4008 buffer_handle_t *depthBuffer;
4009
4010 if (nullptr == depthCh) {
4011 return;
4012 }
4013
4014 camera3_notify_msg_t notify_msg =
4015 {.type = CAMERA3_MSG_ERROR,
4016 {{0, depthCh->getStream(), CAMERA3_MSG_ERROR_BUFFER}}};
4017 camera3_stream_buffer_t resultBuffer =
4018 {.acquire_fence = -1,
4019 .release_fence = -1,
4020 .buffer = nullptr,
4021 .stream = depthCh->getStream(),
4022 .status = CAMERA3_BUFFER_STATUS_ERROR};
Emilian Peev7650c122017-01-19 08:24:33 -08004023
4024 while (nullptr !=
4025 (depthBuffer = depthCh->getOldestFrame(currentFrameNumber))) {
4026 depthCh->unmapBuffer(currentFrameNumber);
4027
4028 notify_msg.message.error.frame_number = currentFrameNumber;
4029 orchestrateNotify(&notify_msg);
4030
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004031 mOutputBufferDispatcher.markBufferReady(currentFrameNumber, resultBuffer);
Emilian Peev7650c122017-01-19 08:24:33 -08004032 };
4033}
4034
4035/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07004036 * FUNCTION : hdrPlusPerfLock
4037 *
4038 * DESCRIPTION: perf lock for HDR+ using custom intent
4039 *
4040 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
4041 *
4042 * RETURN : None
4043 *
4044 *==========================================================================*/
4045void QCamera3HardwareInterface::hdrPlusPerfLock(
4046 mm_camera_super_buf_t *metadata_buf)
4047{
4048 if (NULL == metadata_buf) {
4049 LOGE("metadata_buf is NULL");
4050 return;
4051 }
4052 metadata_buffer_t *metadata =
4053 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
4054 int32_t *p_frame_number_valid =
4055 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
4056 uint32_t *p_frame_number =
4057 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
4058
4059 if (p_frame_number_valid == NULL || p_frame_number == NULL) {
4060 LOGE("%s: Invalid metadata", __func__);
4061 return;
4062 }
4063
Wei Wang01385482017-08-03 10:49:34 -07004064 //acquire perf lock for 2 secs after the last HDR frame is captured
4065 constexpr uint32_t HDR_PLUS_PERF_TIME_OUT = 2000;
Thierry Strudel3d639192016-09-09 11:52:26 -07004066 if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
4067 if ((p_frame_number != NULL) &&
4068 (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004069 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT, HDR_PLUS_PERF_TIME_OUT);
Thierry Strudel3d639192016-09-09 11:52:26 -07004070 }
4071 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004072}
4073
4074/*===========================================================================
4075 * FUNCTION : handleInputBufferWithLock
4076 *
4077 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
4078 *
4079 * PARAMETERS : @frame_number: frame number of the input buffer
4080 *
4081 * RETURN :
4082 *
4083 *==========================================================================*/
4084void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
4085{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004086 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_IN_BUF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07004087 pendingRequestIterator i = mPendingRequestsList.begin();
4088 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
4089 i++;
4090 }
4091 if (i != mPendingRequestsList.end() && i->input_buffer) {
4092 //found the right request
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004093 CameraMetadata settings;
4094 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
4095 if(i->settings) {
4096 settings = i->settings;
4097 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
4098 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -07004099 } else {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004100 LOGE("No timestamp in input settings! Using current one.");
Thierry Strudel3d639192016-09-09 11:52:26 -07004101 }
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004102 } else {
4103 LOGE("Input settings missing!");
Thierry Strudel3d639192016-09-09 11:52:26 -07004104 }
4105
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004106 mShutterDispatcher.markShutterReady(frame_number, capture_time);
4107 LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
4108 i->frame_number, capture_time);
Thierry Strudel3d639192016-09-09 11:52:26 -07004109
4110 camera3_capture_result result;
4111 memset(&result, 0, sizeof(camera3_capture_result));
4112 result.frame_number = frame_number;
4113 result.result = i->settings;
4114 result.input_buffer = i->input_buffer;
4115 result.partial_result = PARTIAL_RESULT_COUNT;
4116
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004117 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07004118 LOGD("Input request metadata and input buffer frame_number = %u",
4119 i->frame_number);
4120 i = erasePendingRequest(i);
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004121
4122 // Dispatch result metadata that may be just unblocked by this reprocess result.
4123 dispatchResultMetadataWithLock(frame_number, /*isLiveRequest*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -07004124 } else {
4125 LOGE("Could not find input request for frame number %d", frame_number);
4126 }
4127}
4128
4129/*===========================================================================
4130 * FUNCTION : handleBufferWithLock
4131 *
4132 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
4133 *
4134 * PARAMETERS : @buffer: image buffer for the callback
4135 * @frame_number: frame number of the image buffer
4136 *
4137 * RETURN :
4138 *
4139 *==========================================================================*/
4140void QCamera3HardwareInterface::handleBufferWithLock(
4141 camera3_stream_buffer_t *buffer, uint32_t frame_number)
4142{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004143 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BUF_LKD);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004144
4145 if (buffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
4146 mPerfLockMgr.releasePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
4147 }
4148
Thierry Strudel3d639192016-09-09 11:52:26 -07004149 /* Nothing to be done during error state */
4150 if ((ERROR == mState) || (DEINIT == mState)) {
4151 return;
4152 }
4153 if (mFlushPerf) {
4154 handleBuffersDuringFlushLock(buffer);
4155 return;
4156 }
4157 //not in flush
4158 // If the frame number doesn't exist in the pending request list,
4159 // directly send the buffer to the frameworks, and update pending buffers map
4160 // Otherwise, book-keep the buffer.
4161 pendingRequestIterator i = mPendingRequestsList.begin();
4162 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
4163 i++;
4164 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004165
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004166 if (i != mPendingRequestsList.end()) {
Thierry Strudel3d639192016-09-09 11:52:26 -07004167 if (i->input_buffer) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004168 // For a reprocessing request, try to send out result metadata.
4169 handlePendingResultMetadataWithLock(frame_number, nullptr);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004170 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004171 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004172
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004173 // Check if this frame was dropped.
4174 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
4175 m != mPendingFrameDropList.end(); m++) {
4176 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4177 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
4178 if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
4179 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
4180 LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
4181 frame_number, streamID);
4182 m = mPendingFrameDropList.erase(m);
4183 break;
4184 }
4185 }
4186
Binhao Lin09245482017-08-31 18:25:29 -07004187 // WAR for encoder avtimer timestamp issue
4188 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4189 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask() &&
4190 m_bAVTimerEnabled) {
4191 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
4192 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
4193 if (req->frame_number != frame_number)
4194 continue;
4195 if(req->av_timestamp == 0) {
4196 buffer->status |= CAMERA3_BUFFER_STATUS_ERROR;
4197 }
4198 else {
4199 struct private_handle_t *priv_handle =
4200 (struct private_handle_t *) (*(buffer->buffer));
4201 setMetaData(priv_handle, SET_VT_TIMESTAMP, &(req->av_timestamp));
4202 }
4203 }
4204 }
4205
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004206 buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
4207 LOGH("result frame_number = %d, buffer = %p",
4208 frame_number, buffer->buffer);
4209
4210 mPendingBuffersMap.removeBuf(buffer->buffer);
4211 mOutputBufferDispatcher.markBufferReady(frame_number, *buffer);
4212
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004213 if (mPreviewStarted == false) {
4214 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4215 if ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask()) {
Chien-Yu Chen509314b2017-04-07 15:27:55 -07004216 logEaselEvent("EASEL_STARTUP_LATENCY", "Preview Started");
4217
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004218 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
4219 mPerfLockMgr.releasePerfLock(PERF_LOCK_OPEN_CAMERA);
4220 mPreviewStarted = true;
4221
4222 // Set power hint for preview
4223 mPerfLockMgr.acquirePerfLock(PERF_LOCK_POWERHINT_ENCODE, 0);
4224 }
4225 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004226}
4227
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004228void QCamera3HardwareInterface::handlePendingResultMetadataWithLock(uint32_t frameNumber,
Chien-Yu Chenbc730232017-07-12 14:49:55 -07004229 camera_metadata_t *resultMetadata)
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004230{
4231 // Find the pending request for this result metadata.
4232 auto requestIter = mPendingRequestsList.begin();
4233 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != frameNumber) {
4234 requestIter++;
4235 }
4236
4237 if (requestIter == mPendingRequestsList.end()) {
4238 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, frameNumber);
4239 return;
4240 }
4241
4242 // Update the result metadata
4243 requestIter->resultMetadata = resultMetadata;
4244
4245 // Check what type of request this is.
4246 bool liveRequest = false;
4247 if (requestIter->hdrplus) {
Chien-Yu Chen9264fe92017-04-29 03:28:46 +00004248 // HDR+ request doesn't have partial results.
4249 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004250 } else if (requestIter->input_buffer != nullptr) {
4251 // Reprocessing request result is the same as settings.
4252 requestIter->resultMetadata = requestIter->settings;
4253 // Reprocessing request doesn't have partial results.
4254 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4255 } else {
4256 liveRequest = true;
Chien-Yu Chen0a921f92017-08-27 17:25:33 -07004257 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004258 mPendingLiveRequest--;
4259
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07004260 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07004261 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07004262 // For a live request, send the metadata to HDR+ client.
4263 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
4264 gHdrPlusClient->notifyFrameMetadata(frameNumber, *resultMetadata,
4265 requestIter->partial_result_cnt == PARTIAL_RESULT_COUNT);
4266 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004267 }
4268 }
4269
Chien-Yu Chenbc730232017-07-12 14:49:55 -07004270 // Remove len shading map if it's not requested.
4271 if (requestIter->requestedLensShadingMapMode == ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF) {
4272 CameraMetadata metadata;
4273 metadata.acquire(resultMetadata);
4274 metadata.erase(ANDROID_STATISTICS_LENS_SHADING_MAP);
4275 metadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE,
4276 &requestIter->requestedLensShadingMapMode, 1);
4277
4278 requestIter->resultMetadata = metadata.release();
4279 }
4280
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004281 dispatchResultMetadataWithLock(frameNumber, liveRequest);
4282}
4283
4284void QCamera3HardwareInterface::dispatchResultMetadataWithLock(uint32_t frameNumber,
4285 bool isLiveRequest) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004286 // The pending requests are ordered by increasing frame numbers. The result metadata are ready
4287 // to be sent if all previous pending requests are ready to be sent.
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004288 bool readyToSend = true;
4289
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004290 // Iterate through the pending requests to send out result metadata that are ready. Also if
4291 // this result metadata belongs to a live request, notify errors for previous live requests
4292 // that don't have result metadata yet.
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004293 auto iter = mPendingRequestsList.begin();
4294 while (iter != mPendingRequestsList.end()) {
4295 // Check if current pending request is ready. If it's not ready, the following pending
4296 // requests are also not ready.
4297 if (readyToSend && iter->resultMetadata == nullptr) {
4298 readyToSend = false;
4299 }
4300
4301 bool thisLiveRequest = iter->hdrplus == false && iter->input_buffer == nullptr;
4302
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004303 camera3_capture_result_t result = {};
4304 result.frame_number = iter->frame_number;
4305 result.result = iter->resultMetadata;
4306 result.partial_result = iter->partial_result_cnt;
4307
4308 // If this pending buffer has result metadata, we may be able to send out shutter callback
4309 // and result metadata.
4310 if (iter->resultMetadata != nullptr) {
4311 if (!readyToSend) {
4312 // If any of the previous pending request is not ready, this pending request is
4313 // also not ready to send in order to keep shutter callbacks and result metadata
4314 // in order.
4315 iter++;
4316 continue;
4317 }
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004318 } else if (iter->frame_number < frameNumber && isLiveRequest && thisLiveRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004319 // If the result metadata belongs to a live request, notify errors for previous pending
4320 // live requests.
4321 mPendingLiveRequest--;
4322
4323 CameraMetadata dummyMetadata;
4324 dummyMetadata.update(ANDROID_REQUEST_ID, &(iter->request_id), 1);
4325 result.result = dummyMetadata.release();
4326
4327 notifyError(iter->frame_number, CAMERA3_MSG_ERROR_RESULT);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07004328
4329 // partial_result should be PARTIAL_RESULT_CNT in case of
4330 // ERROR_RESULT.
4331 iter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4332 result.partial_result = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004333 } else {
4334 iter++;
4335 continue;
4336 }
4337
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004338 result.output_buffers = nullptr;
4339 result.num_output_buffers = 0;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004340 orchestrateResult(&result);
4341
4342 // For reprocessing, result metadata is the same as settings so do not free it here to
4343 // avoid double free.
4344 if (result.result != iter->settings) {
4345 free_camera_metadata((camera_metadata_t *)result.result);
4346 }
4347 iter->resultMetadata = nullptr;
4348 iter = erasePendingRequest(iter);
4349 }
4350
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004351 if (isLiveRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004352 for (auto &iter : mPendingRequestsList) {
4353 // Increment pipeline depth for the following pending requests.
4354 if (iter.frame_number > frameNumber) {
4355 iter.pipeline_depth++;
4356 }
4357 }
4358 }
4359
4360 unblockRequestIfNecessary();
4361}
4362
Thierry Strudel3d639192016-09-09 11:52:26 -07004363/*===========================================================================
4364 * FUNCTION : unblockRequestIfNecessary
4365 *
4366 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
4367 * that mMutex is held when this function is called.
4368 *
4369 * PARAMETERS :
4370 *
4371 * RETURN :
4372 *
4373 *==========================================================================*/
4374void QCamera3HardwareInterface::unblockRequestIfNecessary()
4375{
4376 // Unblock process_capture_request
4377 pthread_cond_signal(&mRequestCond);
4378}
4379
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004380/*===========================================================================
4381 * FUNCTION : isHdrSnapshotRequest
4382 *
4383 * DESCRIPTION: Function to determine if the request is for a HDR snapshot
4384 *
4385 * PARAMETERS : camera3 request structure
4386 *
4387 * RETURN : boolean decision variable
4388 *
4389 *==========================================================================*/
4390bool QCamera3HardwareInterface::isHdrSnapshotRequest(camera3_capture_request *request)
4391{
4392 if (request == NULL) {
4393 LOGE("Invalid request handle");
4394 assert(0);
4395 return false;
4396 }
4397
4398 if (!mForceHdrSnapshot) {
4399 CameraMetadata frame_settings;
4400 frame_settings = request->settings;
4401
4402 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
4403 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
4404 if (metaMode != ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
4405 return false;
4406 }
4407 } else {
4408 return false;
4409 }
4410
4411 if (frame_settings.exists(ANDROID_CONTROL_SCENE_MODE)) {
4412 uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
4413 if (fwk_sceneMode != ANDROID_CONTROL_SCENE_MODE_HDR) {
4414 return false;
4415 }
4416 } else {
4417 return false;
4418 }
4419 }
4420
4421 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4422 if (request->output_buffers[i].stream->format
4423 == HAL_PIXEL_FORMAT_BLOB) {
4424 return true;
4425 }
4426 }
4427
4428 return false;
4429}
4430/*===========================================================================
4431 * FUNCTION : orchestrateRequest
4432 *
4433 * DESCRIPTION: Orchestrates a capture request from camera service
4434 *
4435 * PARAMETERS :
4436 * @request : request from framework to process
4437 *
4438 * RETURN : Error status codes
4439 *
4440 *==========================================================================*/
4441int32_t QCamera3HardwareInterface::orchestrateRequest(
4442 camera3_capture_request_t *request)
4443{
4444
4445 uint32_t originalFrameNumber = request->frame_number;
4446 uint32_t originalOutputCount = request->num_output_buffers;
4447 const camera_metadata_t *original_settings = request->settings;
4448 List<InternalRequest> internallyRequestedStreams;
4449 List<InternalRequest> emptyInternalList;
4450
4451 if (isHdrSnapshotRequest(request) && request->input_buffer == NULL) {
4452 LOGD("Framework requested:%d buffers in HDR snapshot", request->num_output_buffers);
4453 uint32_t internalFrameNumber;
4454 CameraMetadata modified_meta;
4455
4456
4457 /* Add Blob channel to list of internally requested streams */
4458 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4459 if (request->output_buffers[i].stream->format
4460 == HAL_PIXEL_FORMAT_BLOB) {
4461 InternalRequest streamRequested;
4462 streamRequested.meteringOnly = 1;
4463 streamRequested.need_metadata = 0;
4464 streamRequested.stream = request->output_buffers[i].stream;
4465 internallyRequestedStreams.push_back(streamRequested);
4466 }
4467 }
4468 request->num_output_buffers = 0;
4469 auto itr = internallyRequestedStreams.begin();
4470
4471 /* Modify setting to set compensation */
4472 modified_meta = request->settings;
4473 int32_t expCompensation = GB_HDR_HALF_STEP_EV;
4474 uint8_t aeLock = 1;
4475 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4476 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4477 camera_metadata_t *modified_settings = modified_meta.release();
4478 request->settings = modified_settings;
4479
4480 /* Capture Settling & -2x frame */
4481 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4482 request->frame_number = internalFrameNumber;
4483 processCaptureRequest(request, internallyRequestedStreams);
4484
4485 request->num_output_buffers = originalOutputCount;
4486 _orchestrationDb.allocStoreInternalFrameNumber(originalFrameNumber, internalFrameNumber);
4487 request->frame_number = internalFrameNumber;
4488 processCaptureRequest(request, emptyInternalList);
4489 request->num_output_buffers = 0;
4490
4491 modified_meta = modified_settings;
4492 expCompensation = 0;
4493 aeLock = 1;
4494 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4495 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4496 modified_settings = modified_meta.release();
4497 request->settings = modified_settings;
4498
4499 /* Capture Settling & 0X frame */
4500
4501 itr = internallyRequestedStreams.begin();
4502 if (itr == internallyRequestedStreams.end()) {
4503 LOGE("Error Internally Requested Stream list is empty");
4504 assert(0);
4505 } else {
4506 itr->need_metadata = 0;
4507 itr->meteringOnly = 1;
4508 }
4509
4510 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4511 request->frame_number = internalFrameNumber;
4512 processCaptureRequest(request, internallyRequestedStreams);
4513
4514 itr = internallyRequestedStreams.begin();
4515 if (itr == internallyRequestedStreams.end()) {
4516 ALOGE("Error Internally Requested Stream list is empty");
4517 assert(0);
4518 } else {
4519 itr->need_metadata = 1;
4520 itr->meteringOnly = 0;
4521 }
4522
4523 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4524 request->frame_number = internalFrameNumber;
4525 processCaptureRequest(request, internallyRequestedStreams);
4526
4527 /* Capture 2X frame*/
4528 modified_meta = modified_settings;
4529 expCompensation = GB_HDR_2X_STEP_EV;
4530 aeLock = 1;
4531 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4532 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4533 modified_settings = modified_meta.release();
4534 request->settings = modified_settings;
4535
4536 itr = internallyRequestedStreams.begin();
4537 if (itr == internallyRequestedStreams.end()) {
4538 ALOGE("Error Internally Requested Stream list is empty");
4539 assert(0);
4540 } else {
4541 itr->need_metadata = 0;
4542 itr->meteringOnly = 1;
4543 }
4544 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4545 request->frame_number = internalFrameNumber;
4546 processCaptureRequest(request, internallyRequestedStreams);
4547
4548 itr = internallyRequestedStreams.begin();
4549 if (itr == internallyRequestedStreams.end()) {
4550 ALOGE("Error Internally Requested Stream list is empty");
4551 assert(0);
4552 } else {
4553 itr->need_metadata = 1;
4554 itr->meteringOnly = 0;
4555 }
4556
4557 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4558 request->frame_number = internalFrameNumber;
4559 processCaptureRequest(request, internallyRequestedStreams);
4560
4561
4562 /* Capture 2X on original streaming config*/
4563 internallyRequestedStreams.clear();
4564
4565 /* Restore original settings pointer */
4566 request->settings = original_settings;
4567 } else {
4568 uint32_t internalFrameNumber;
4569 _orchestrationDb.allocStoreInternalFrameNumber(request->frame_number, internalFrameNumber);
4570 request->frame_number = internalFrameNumber;
4571 return processCaptureRequest(request, internallyRequestedStreams);
4572 }
4573
4574 return NO_ERROR;
4575}
4576
4577/*===========================================================================
4578 * FUNCTION : orchestrateResult
4579 *
4580 * DESCRIPTION: Orchestrates a capture result to camera service
4581 *
4582 * PARAMETERS :
4583 * @request : request from framework to process
4584 *
4585 * RETURN :
4586 *
4587 *==========================================================================*/
4588void QCamera3HardwareInterface::orchestrateResult(
4589 camera3_capture_result_t *result)
4590{
4591 uint32_t frameworkFrameNumber;
4592 int32_t rc = _orchestrationDb.getFrameworkFrameNumber(result->frame_number,
4593 frameworkFrameNumber);
4594 if (rc != NO_ERROR) {
4595 LOGE("Cannot find translated frameworkFrameNumber");
4596 assert(0);
4597 } else {
4598 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004599 LOGD("Internal Request drop the result");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004600 } else {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004601 if (result->result != NULL) {
Binhao Lin299ffc92017-04-27 11:22:47 -07004602 camera_metadata_t *metadata = const_cast<camera_metadata_t*>(result->result);
4603 camera_metadata_entry_t entry;
4604 int ret = find_camera_metadata_entry(metadata, ANDROID_SYNC_FRAME_NUMBER, &entry);
4605 if (ret == OK) {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004606 int64_t sync_frame_number = frameworkFrameNumber;
Binhao Lin299ffc92017-04-27 11:22:47 -07004607 ret = update_camera_metadata_entry(metadata, entry.index, &sync_frame_number, 1, &entry);
4608 if (ret != OK)
4609 LOGE("Update ANDROID_SYNC_FRAME_NUMBER Error!");
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004610 }
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004611 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004612 result->frame_number = frameworkFrameNumber;
4613 mCallbackOps->process_capture_result(mCallbackOps, result);
4614 }
4615 }
4616}
4617
4618/*===========================================================================
4619 * FUNCTION : orchestrateNotify
4620 *
4621 * DESCRIPTION: Orchestrates a notify to camera service
4622 *
4623 * PARAMETERS :
4624 * @request : request from framework to process
4625 *
4626 * RETURN :
4627 *
4628 *==========================================================================*/
4629void QCamera3HardwareInterface::orchestrateNotify(camera3_notify_msg_t *notify_msg)
4630{
4631 uint32_t frameworkFrameNumber;
4632 uint32_t internalFrameNumber = notify_msg->message.shutter.frame_number;
Thierry Strudel2896d122017-02-23 19:18:03 -08004633 int32_t rc = NO_ERROR;
4634
4635 rc = _orchestrationDb.getFrameworkFrameNumber(internalFrameNumber,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004636 frameworkFrameNumber);
Thierry Strudel2896d122017-02-23 19:18:03 -08004637
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004638 if (rc != NO_ERROR) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004639 if (notify_msg->message.error.error_code == CAMERA3_MSG_ERROR_DEVICE) {
4640 LOGD("Sending CAMERA3_MSG_ERROR_DEVICE to framework");
4641 frameworkFrameNumber = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004642 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08004643 LOGE("Cannot find translated frameworkFrameNumber");
4644 assert(0);
4645 return;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004646 }
4647 }
Thierry Strudel2896d122017-02-23 19:18:03 -08004648
4649 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
4650 LOGD("Internal Request drop the notifyCb");
4651 } else {
4652 notify_msg->message.shutter.frame_number = frameworkFrameNumber;
4653 mCallbackOps->notify(mCallbackOps, notify_msg);
4654 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004655}
4656
4657/*===========================================================================
4658 * FUNCTION : FrameNumberRegistry
4659 *
4660 * DESCRIPTION: Constructor
4661 *
4662 * PARAMETERS :
4663 *
4664 * RETURN :
4665 *
4666 *==========================================================================*/
4667FrameNumberRegistry::FrameNumberRegistry()
4668{
4669 _nextFreeInternalNumber = INTERNAL_FRAME_STARTING_NUMBER;
4670}
4671
4672/*===========================================================================
4673 * FUNCTION : ~FrameNumberRegistry
4674 *
4675 * DESCRIPTION: Destructor
4676 *
4677 * PARAMETERS :
4678 *
4679 * RETURN :
4680 *
4681 *==========================================================================*/
4682FrameNumberRegistry::~FrameNumberRegistry()
4683{
4684}
4685
4686/*===========================================================================
4687 * FUNCTION : PurgeOldEntriesLocked
4688 *
4689 * DESCRIPTION: Maintainance function to trigger LRU cleanup mechanism
4690 *
4691 * PARAMETERS :
4692 *
4693 * RETURN : NONE
4694 *
4695 *==========================================================================*/
4696void FrameNumberRegistry::purgeOldEntriesLocked()
4697{
4698 while (_register.begin() != _register.end()) {
4699 auto itr = _register.begin();
4700 if (itr->first < (_nextFreeInternalNumber - FRAME_REGISTER_LRU_SIZE)) {
4701 _register.erase(itr);
4702 } else {
4703 return;
4704 }
4705 }
4706}
4707
4708/*===========================================================================
4709 * FUNCTION : allocStoreInternalFrameNumber
4710 *
4711 * DESCRIPTION: Method to note down a framework request and associate a new
4712 * internal request number against it
4713 *
4714 * PARAMETERS :
4715 * @fFrameNumber: Identifier given by framework
4716 * @internalFN : Output parameter which will have the newly generated internal
4717 * entry
4718 *
4719 * RETURN : Error code
4720 *
4721 *==========================================================================*/
4722int32_t FrameNumberRegistry::allocStoreInternalFrameNumber(uint32_t frameworkFrameNumber,
4723 uint32_t &internalFrameNumber)
4724{
4725 Mutex::Autolock lock(mRegistryLock);
4726 internalFrameNumber = _nextFreeInternalNumber++;
4727 LOGD("Storing ff#:%d, with internal:%d", frameworkFrameNumber, internalFrameNumber);
4728 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, frameworkFrameNumber));
4729 purgeOldEntriesLocked();
4730 return NO_ERROR;
4731}
4732
4733/*===========================================================================
4734 * FUNCTION : generateStoreInternalFrameNumber
4735 *
4736 * DESCRIPTION: Method to associate a new internal request number independent
4737 * of any associate with framework requests
4738 *
4739 * PARAMETERS :
4740 * @internalFrame#: Output parameter which will have the newly generated internal
4741 *
4742 *
4743 * RETURN : Error code
4744 *
4745 *==========================================================================*/
4746int32_t FrameNumberRegistry::generateStoreInternalFrameNumber(uint32_t &internalFrameNumber)
4747{
4748 Mutex::Autolock lock(mRegistryLock);
4749 internalFrameNumber = _nextFreeInternalNumber++;
4750 LOGD("Generated internal framenumber:%d", internalFrameNumber);
4751 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, EMPTY_FRAMEWORK_FRAME_NUMBER));
4752 purgeOldEntriesLocked();
4753 return NO_ERROR;
4754}
4755
4756/*===========================================================================
4757 * FUNCTION : getFrameworkFrameNumber
4758 *
4759 * DESCRIPTION: Method to query the framework framenumber given an internal #
4760 *
4761 * PARAMETERS :
4762 * @internalFrame#: Internal reference
4763 * @frameworkframenumber: Output parameter holding framework frame entry
4764 *
4765 * RETURN : Error code
4766 *
4767 *==========================================================================*/
4768int32_t FrameNumberRegistry::getFrameworkFrameNumber(uint32_t internalFrameNumber,
4769 uint32_t &frameworkFrameNumber)
4770{
4771 Mutex::Autolock lock(mRegistryLock);
4772 auto itr = _register.find(internalFrameNumber);
4773 if (itr == _register.end()) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004774 LOGE("Cannot find internal#: %d", internalFrameNumber);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004775 return -ENOENT;
4776 }
4777
4778 frameworkFrameNumber = itr->second;
4779 purgeOldEntriesLocked();
4780 return NO_ERROR;
4781}
Thierry Strudel3d639192016-09-09 11:52:26 -07004782
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004783status_t QCamera3HardwareInterface::fillPbStreamConfig(
4784 pbcamera::StreamConfiguration *config, uint32_t pbStreamId, int pbStreamFormat,
4785 QCamera3Channel *channel, uint32_t streamIndex) {
4786 if (config == nullptr) {
4787 LOGE("%s: config is null", __FUNCTION__);
4788 return BAD_VALUE;
4789 }
4790
4791 if (channel == nullptr) {
4792 LOGE("%s: channel is null", __FUNCTION__);
4793 return BAD_VALUE;
4794 }
4795
4796 QCamera3Stream *stream = channel->getStreamByIndex(streamIndex);
4797 if (stream == nullptr) {
4798 LOGE("%s: Failed to get stream %d in channel.", __FUNCTION__, streamIndex);
4799 return NAME_NOT_FOUND;
4800 }
4801
4802 const cam_stream_info_t* streamInfo = stream->getStreamInfo();
4803 if (streamInfo == nullptr) {
4804 LOGE("%s: Failed to get stream info for stream %d in channel.", __FUNCTION__, streamIndex);
4805 return NAME_NOT_FOUND;
4806 }
4807
4808 config->id = pbStreamId;
4809 config->image.width = streamInfo->dim.width;
4810 config->image.height = streamInfo->dim.height;
4811 config->image.padding = 0;
4812 config->image.format = pbStreamFormat;
4813
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004814 uint32_t totalPlaneSize = 0;
4815
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004816 // Fill plane information.
4817 for (uint32_t i = 0; i < streamInfo->buf_planes.plane_info.num_planes; i++) {
4818 pbcamera::PlaneConfiguration plane;
4819 plane.stride = streamInfo->buf_planes.plane_info.mp[i].stride_in_bytes;
4820 plane.scanline = streamInfo->buf_planes.plane_info.mp[i].scanline;
4821 config->image.planes.push_back(plane);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004822
4823 totalPlaneSize += (plane.stride * plane.scanline);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004824 }
4825
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004826 config->image.padding = streamInfo->buf_planes.plane_info.frame_len - totalPlaneSize;
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004827 return OK;
4828}
4829
Thierry Strudel3d639192016-09-09 11:52:26 -07004830/*===========================================================================
4831 * FUNCTION : processCaptureRequest
4832 *
4833 * DESCRIPTION: process a capture request from camera service
4834 *
4835 * PARAMETERS :
4836 * @request : request from framework to process
4837 *
4838 * RETURN :
4839 *
4840 *==========================================================================*/
4841int QCamera3HardwareInterface::processCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004842 camera3_capture_request_t *request,
4843 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07004844{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004845 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CAP_REQ);
Thierry Strudel3d639192016-09-09 11:52:26 -07004846 int rc = NO_ERROR;
4847 int32_t request_id;
4848 CameraMetadata meta;
Thierry Strudel3d639192016-09-09 11:52:26 -07004849 bool isVidBufRequested = false;
4850 camera3_stream_buffer_t *pInputBuffer = NULL;
Thierry Strudel54dc9782017-02-15 12:12:10 -08004851 char prop[PROPERTY_VALUE_MAX];
Thierry Strudel3d639192016-09-09 11:52:26 -07004852
4853 pthread_mutex_lock(&mMutex);
4854
4855 // Validate current state
4856 switch (mState) {
4857 case CONFIGURED:
4858 case STARTED:
4859 /* valid state */
4860 break;
4861
4862 case ERROR:
4863 pthread_mutex_unlock(&mMutex);
4864 handleCameraDeviceError();
4865 return -ENODEV;
4866
4867 default:
4868 LOGE("Invalid state %d", mState);
4869 pthread_mutex_unlock(&mMutex);
4870 return -ENODEV;
4871 }
4872
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004873 rc = validateCaptureRequest(request, internallyRequestedStreams);
Thierry Strudel3d639192016-09-09 11:52:26 -07004874 if (rc != NO_ERROR) {
4875 LOGE("incoming request is not valid");
4876 pthread_mutex_unlock(&mMutex);
4877 return rc;
4878 }
4879
4880 meta = request->settings;
4881
4882 // For first capture request, send capture intent, and
4883 // stream on all streams
4884 if (mState == CONFIGURED) {
Chien-Yu Chene96475e2017-04-11 11:53:26 -07004885 logEaselEvent("EASEL_STARTUP_LATENCY", "First request");
Thierry Strudel3d639192016-09-09 11:52:26 -07004886 // send an unconfigure to the backend so that the isp
4887 // resources are deallocated
4888 if (!mFirstConfiguration) {
4889 cam_stream_size_info_t stream_config_info;
4890 int32_t hal_version = CAM_HAL_V3;
4891 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
4892 stream_config_info.buffer_info.min_buffers =
4893 MIN_INFLIGHT_REQUESTS;
4894 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08004895 m_bIs4KVideo ? 0 :
Jason Leea46ad5e2017-07-07 15:20:56 -07004896 m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004897 clear_metadata_buffer(mParameters);
4898 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4899 CAM_INTF_PARM_HAL_VERSION, hal_version);
4900 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4901 CAM_INTF_META_STREAM_INFO, stream_config_info);
4902 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4903 mParameters);
4904 if (rc < 0) {
4905 LOGE("set_parms for unconfigure failed");
4906 pthread_mutex_unlock(&mMutex);
4907 return rc;
4908 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07004909
Thierry Strudel3d639192016-09-09 11:52:26 -07004910 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004911 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004912 /* get eis information for stream configuration */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004913 cam_is_type_t isTypeVideo, isTypePreview, is_type=IS_TYPE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07004914 char is_type_value[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004915 property_get("persist.camera.is_type", is_type_value, "4");
4916 isTypeVideo = static_cast<cam_is_type_t>(atoi(is_type_value));
4917 // Make default value for preview IS_TYPE as IS_TYPE_EIS_2_0
4918 property_get("persist.camera.is_type_preview", is_type_value, "4");
4919 isTypePreview = static_cast<cam_is_type_t>(atoi(is_type_value));
4920 LOGD("isTypeVideo: %d isTypePreview: %d", isTypeVideo, isTypePreview);
Thierry Strudel3d639192016-09-09 11:52:26 -07004921
4922 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4923 int32_t hal_version = CAM_HAL_V3;
4924 uint8_t captureIntent =
4925 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4926 mCaptureIntent = captureIntent;
4927 clear_metadata_buffer(mParameters);
4928 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
4929 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
4930 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004931 if (mFirstConfiguration) {
4932 // configure instant AEC
4933 // Instant AEC is a session based parameter and it is needed only
4934 // once per complete session after open camera.
4935 // i.e. This is set only once for the first capture request, after open camera.
4936 setInstantAEC(meta);
4937 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004938 uint8_t fwkVideoStabMode=0;
4939 if (meta.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
4940 fwkVideoStabMode = meta.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
4941 }
4942
Xue Tuecac74e2017-04-17 13:58:15 -07004943 // If EIS setprop is enabled then only turn it on for video/preview
4944 bool setEis = m_bEisEnable && m_bEisSupportedSize &&
Jason Lee603176d2017-05-31 11:43:27 -07004945 (isTypeVideo >= IS_TYPE_EIS_2_0) && !meta.exists(QCAMERA3_USE_AV_TIMER);
Thierry Strudel3d639192016-09-09 11:52:26 -07004946 int32_t vsMode;
4947 vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
4948 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
4949 rc = BAD_VALUE;
4950 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004951 LOGD("setEis %d", setEis);
4952 bool eis3Supported = false;
4953 size_t count = IS_TYPE_MAX;
4954 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
4955 for (size_t i = 0; i < count; i++) {
4956 if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0) {
4957 eis3Supported = true;
4958 break;
4959 }
4960 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004961
4962 //IS type will be 0 unless EIS is supported. If EIS is supported
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004963 //it could either be 4 or 5 depending on the stream and video size
Thierry Strudel3d639192016-09-09 11:52:26 -07004964 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4965 if (setEis) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004966 if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_PREVIEW) {
4967 is_type = isTypePreview;
4968 } else if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_VIDEO ) {
4969 if ( (isTypeVideo == IS_TYPE_EIS_3_0) && (eis3Supported == FALSE) ) {
4970 LOGW(" EIS_3.0 is not supported and so setting EIS_2.0");
Thierry Strudel3d639192016-09-09 11:52:26 -07004971 is_type = IS_TYPE_EIS_2_0;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004972 } else {
4973 is_type = isTypeVideo;
Thierry Strudel3d639192016-09-09 11:52:26 -07004974 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004975 } else {
4976 is_type = IS_TYPE_NONE;
4977 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004978 mStreamConfigInfo.is_type[i] = is_type;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004979 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004980 mStreamConfigInfo.is_type[i] = IS_TYPE_NONE;
4981 }
4982 }
4983
4984 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4985 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
4986
Thierry Strudel54dc9782017-02-15 12:12:10 -08004987 //Disable tintless only if the property is set to 0
4988 memset(prop, 0, sizeof(prop));
4989 property_get("persist.camera.tintless.enable", prop, "1");
4990 int32_t tintless_value = atoi(prop);
4991
Thierry Strudel3d639192016-09-09 11:52:26 -07004992 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4993 CAM_INTF_PARM_TINTLESS, tintless_value);
Thierry Strudel54dc9782017-02-15 12:12:10 -08004994
Thierry Strudel3d639192016-09-09 11:52:26 -07004995 //Disable CDS for HFR mode or if DIS/EIS is on.
4996 //CDS is a session parameter in the backend/ISP, so need to be set/reset
4997 //after every configure_stream
4998 if ((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
4999 (m_bIsVideo)) {
5000 int32_t cds = CAM_CDS_MODE_OFF;
5001 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5002 CAM_INTF_PARM_CDS_MODE, cds))
5003 LOGE("Failed to disable CDS for HFR mode");
5004
5005 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005006
5007 if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
5008 uint8_t* use_av_timer = NULL;
5009
5010 if (m_debug_avtimer){
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005011 LOGI(" Enabling AV timer through setprop");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005012 use_av_timer = &m_debug_avtimer;
Binhao Lin09245482017-08-31 18:25:29 -07005013 m_bAVTimerEnabled = true;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005014 }
5015 else{
5016 use_av_timer =
5017 meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005018 if (use_av_timer) {
Binhao Lin09245482017-08-31 18:25:29 -07005019 m_bAVTimerEnabled = true;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005020 LOGI("Enabling AV timer through Metadata: use_av_timer: %d", *use_av_timer);
5021 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005022 }
5023
5024 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
5025 rc = BAD_VALUE;
5026 }
5027 }
5028
Thierry Strudel3d639192016-09-09 11:52:26 -07005029 setMobicat();
5030
Emilian Peev49c4c6b2017-04-24 10:21:34 +01005031 uint8_t nrMode = 0;
5032 if (meta.exists(ANDROID_NOISE_REDUCTION_MODE)) {
5033 nrMode = meta.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
5034 }
5035
Thierry Strudel3d639192016-09-09 11:52:26 -07005036 /* Set fps and hfr mode while sending meta stream info so that sensor
5037 * can configure appropriate streaming mode */
5038 mHFRVideoFps = DEFAULT_VIDEO_FPS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005039 mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
5040 mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07005041 if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
5042 rc = setHalFpsRange(meta, mParameters);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005043 if (rc == NO_ERROR) {
5044 int32_t max_fps =
5045 (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
Zhijun He21b864a2016-06-24 13:41:19 -07005046 if (max_fps == 60 || mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005047 mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
5048 }
5049 /* For HFR, more buffers are dequeued upfront to improve the performance */
5050 if (mBatchSize) {
5051 mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
5052 mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
5053 }
5054 }
5055 else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005056 LOGE("setHalFpsRange failed");
5057 }
5058 }
5059 if (meta.exists(ANDROID_CONTROL_MODE)) {
5060 uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
5061 rc = extractSceneMode(meta, metaMode, mParameters);
5062 if (rc != NO_ERROR) {
5063 LOGE("extractSceneMode failed");
5064 }
5065 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005066 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07005067
Thierry Strudel04e026f2016-10-10 11:27:36 -07005068 if (meta.exists(QCAMERA3_VIDEO_HDR_MODE)) {
5069 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
5070 meta.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
5071 rc = setVideoHdrMode(mParameters, vhdr);
5072 if (rc != NO_ERROR) {
5073 LOGE("setVideoHDR is failed");
5074 }
5075 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005076
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07005077 if (meta.exists(TANGO_MODE_DATA_SENSOR_FULLFOV)) {
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07005078 uint8_t sensorModeFullFov =
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07005079 meta.find(TANGO_MODE_DATA_SENSOR_FULLFOV).data.u8[0];
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07005080 LOGD("SENSOR_MODE_FULLFOV %d" , sensorModeFullFov);
5081 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_SENSOR_MODE_FULLFOV,
5082 sensorModeFullFov)) {
5083 rc = BAD_VALUE;
5084 }
5085 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005086 //TODO: validate the arguments, HSV scenemode should have only the
5087 //advertised fps ranges
5088
5089 /*set the capture intent, hal version, tintless, stream info,
5090 *and disenable parameters to the backend*/
5091 LOGD("set_parms META_STREAM_INFO " );
5092 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
Thierry Strudel2896d122017-02-23 19:18:03 -08005093 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%" PRIx64
5094 ", Format:%d is_type: %d",
Thierry Strudel3d639192016-09-09 11:52:26 -07005095 mStreamConfigInfo.type[i],
5096 mStreamConfigInfo.stream_sizes[i].width,
5097 mStreamConfigInfo.stream_sizes[i].height,
5098 mStreamConfigInfo.postprocess_mask[i],
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005099 mStreamConfigInfo.format[i],
5100 mStreamConfigInfo.is_type[i]);
Thierry Strudel3d639192016-09-09 11:52:26 -07005101 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005102
Thierry Strudel3d639192016-09-09 11:52:26 -07005103 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5104 mParameters);
5105 if (rc < 0) {
5106 LOGE("set_parms failed for hal version, stream info");
5107 }
5108
Chien-Yu Chen605c3872017-06-14 11:09:23 -07005109 cam_sensor_mode_info_t sensorModeInfo = {};
5110 rc = getSensorModeInfo(sensorModeInfo);
Thierry Strudel3d639192016-09-09 11:52:26 -07005111 if (rc != NO_ERROR) {
5112 LOGE("Failed to get sensor output size");
5113 pthread_mutex_unlock(&mMutex);
5114 goto error_exit;
5115 }
5116
5117 mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
5118 gCamCapability[mCameraId]->active_array_size.height,
Chien-Yu Chen605c3872017-06-14 11:09:23 -07005119 sensorModeInfo.active_array_size.width,
5120 sensorModeInfo.active_array_size.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07005121
5122 /* Set batchmode before initializing channel. Since registerBuffer
5123 * internally initializes some of the channels, better set batchmode
5124 * even before first register buffer */
5125 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5126 it != mStreamInfo.end(); it++) {
5127 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5128 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5129 && mBatchSize) {
5130 rc = channel->setBatchSize(mBatchSize);
5131 //Disable per frame map unmap for HFR/batchmode case
5132 rc |= channel->setPerFrameMapUnmap(false);
5133 if (NO_ERROR != rc) {
5134 LOGE("Channel init failed %d", rc);
5135 pthread_mutex_unlock(&mMutex);
5136 goto error_exit;
5137 }
5138 }
5139 }
5140
5141 //First initialize all streams
5142 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5143 it != mStreamInfo.end(); it++) {
5144 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
Emilian Peev49c4c6b2017-04-24 10:21:34 +01005145
5146 /* Initial value of NR mode is needed before stream on */
5147 channel->setNRMode(nrMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07005148 if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
5149 ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005150 setEis) {
5151 for (size_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
5152 if ( (1U << mStreamConfigInfo.type[i]) == channel->getStreamTypeMask() ) {
5153 is_type = mStreamConfigInfo.is_type[i];
5154 break;
5155 }
5156 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005157 rc = channel->initialize(is_type);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005158 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005159 rc = channel->initialize(IS_TYPE_NONE);
5160 }
5161 if (NO_ERROR != rc) {
5162 LOGE("Channel initialization failed %d", rc);
5163 pthread_mutex_unlock(&mMutex);
5164 goto error_exit;
5165 }
5166 }
5167
5168 if (mRawDumpChannel) {
5169 rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
5170 if (rc != NO_ERROR) {
5171 LOGE("Error: Raw Dump Channel init failed");
5172 pthread_mutex_unlock(&mMutex);
5173 goto error_exit;
5174 }
5175 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005176 if (mHdrPlusRawSrcChannel) {
5177 rc = mHdrPlusRawSrcChannel->initialize(IS_TYPE_NONE);
5178 if (rc != NO_ERROR) {
5179 LOGE("Error: HDR+ RAW Source Channel init failed");
5180 pthread_mutex_unlock(&mMutex);
5181 goto error_exit;
5182 }
5183 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005184 if (mSupportChannel) {
5185 rc = mSupportChannel->initialize(IS_TYPE_NONE);
5186 if (rc < 0) {
5187 LOGE("Support channel initialization failed");
5188 pthread_mutex_unlock(&mMutex);
5189 goto error_exit;
5190 }
5191 }
5192 if (mAnalysisChannel) {
5193 rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
5194 if (rc < 0) {
5195 LOGE("Analysis channel initialization failed");
5196 pthread_mutex_unlock(&mMutex);
5197 goto error_exit;
5198 }
5199 }
5200 if (mDummyBatchChannel) {
5201 rc = mDummyBatchChannel->setBatchSize(mBatchSize);
5202 if (rc < 0) {
5203 LOGE("mDummyBatchChannel setBatchSize failed");
5204 pthread_mutex_unlock(&mMutex);
5205 goto error_exit;
5206 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005207 rc = mDummyBatchChannel->initialize(IS_TYPE_NONE);
Thierry Strudel3d639192016-09-09 11:52:26 -07005208 if (rc < 0) {
5209 LOGE("mDummyBatchChannel initialization failed");
5210 pthread_mutex_unlock(&mMutex);
5211 goto error_exit;
5212 }
5213 }
5214
5215 // Set bundle info
5216 rc = setBundleInfo();
5217 if (rc < 0) {
5218 LOGE("setBundleInfo failed %d", rc);
5219 pthread_mutex_unlock(&mMutex);
5220 goto error_exit;
5221 }
5222
5223 //update settings from app here
5224 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
5225 mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
5226 LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
5227 }
5228 if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
5229 mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
5230 LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
5231 }
5232 if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
5233 mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
5234 LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
5235
5236 if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
5237 (mLinkedCameraId != mCameraId) ) {
5238 LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
5239 mLinkedCameraId, mCameraId);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005240 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005241 goto error_exit;
5242 }
5243 }
5244
5245 // add bundle related cameras
5246 LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
5247 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005248 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
5249 &m_pDualCamCmdPtr->bundle_info;
5250 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005251 if (mIsDeviceLinked)
5252 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
5253 else
5254 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
5255
5256 pthread_mutex_lock(&gCamLock);
5257
5258 if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
5259 LOGE("Dualcam: Invalid Session Id ");
5260 pthread_mutex_unlock(&gCamLock);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005261 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005262 goto error_exit;
5263 }
5264
5265 if (mIsMainCamera == 1) {
5266 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
5267 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005268 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005269 m_pRelCamSyncBuf->cam_role = CAM_ROLE_BAYER;
Thierry Strudel3d639192016-09-09 11:52:26 -07005270 // related session id should be session id of linked session
5271 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5272 } else {
5273 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
5274 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005275 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005276 m_pRelCamSyncBuf->cam_role = CAM_ROLE_MONO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005277 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5278 }
Thierry Strudel2896d122017-02-23 19:18:03 -08005279 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07005280 pthread_mutex_unlock(&gCamLock);
5281
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005282 rc = mCameraHandle->ops->set_dual_cam_cmd(
5283 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07005284 if (rc < 0) {
5285 LOGE("Dualcam: link failed");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005286 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005287 goto error_exit;
5288 }
5289 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005290 goto no_error;
5291error_exit:
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005292 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005293 return rc;
5294no_error:
Thierry Strudel3d639192016-09-09 11:52:26 -07005295 mWokenUpByDaemon = false;
5296 mPendingLiveRequest = 0;
5297 mFirstConfiguration = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005298 }
5299
5300 uint32_t frameNumber = request->frame_number;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005301 cam_stream_ID_t streamsArray;
Thierry Strudel3d639192016-09-09 11:52:26 -07005302
5303 if (mFlushPerf) {
5304 //we cannot accept any requests during flush
5305 LOGE("process_capture_request cannot proceed during flush");
5306 pthread_mutex_unlock(&mMutex);
5307 return NO_ERROR; //should return an error
5308 }
5309
5310 if (meta.exists(ANDROID_REQUEST_ID)) {
5311 request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
5312 mCurrentRequestId = request_id;
5313 LOGD("Received request with id: %d", request_id);
5314 } else if (mState == CONFIGURED || mCurrentRequestId == -1){
5315 LOGE("Unable to find request id field, \
5316 & no previous id available");
5317 pthread_mutex_unlock(&mMutex);
5318 return NAME_NOT_FOUND;
5319 } else {
5320 LOGD("Re-using old request id");
5321 request_id = mCurrentRequestId;
5322 }
5323
5324 LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
5325 request->num_output_buffers,
5326 request->input_buffer,
5327 frameNumber);
5328 // Acquire all request buffers first
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005329 streamsArray.num_streams = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005330 int blob_request = 0;
Emilian Peev7650c122017-01-19 08:24:33 -08005331 bool depthRequestPresent = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005332 uint32_t snapshotStreamId = 0;
5333 for (size_t i = 0; i < request->num_output_buffers; i++) {
5334 const camera3_stream_buffer_t& output = request->output_buffers[i];
5335 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5336
Emilian Peev7650c122017-01-19 08:24:33 -08005337 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5338 (output.stream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005339 //FIXME??:Call function to store local copy of jpeg data for encode params.
Thierry Strudel3d639192016-09-09 11:52:26 -07005340 blob_request = 1;
5341 snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
5342 }
5343
5344 if (output.acquire_fence != -1) {
5345 rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
5346 close(output.acquire_fence);
5347 if (rc != OK) {
5348 LOGE("sync wait failed %d", rc);
5349 pthread_mutex_unlock(&mMutex);
5350 return rc;
5351 }
5352 }
5353
Emilian Peev0f3c3162017-03-15 12:57:46 +00005354 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5355 (output.stream->data_space == HAL_DATASPACE_DEPTH)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005356 depthRequestPresent = true;
5357 continue;
5358 }
5359
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005360 streamsArray.stream_request[streamsArray.num_streams++].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005361 channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel3d639192016-09-09 11:52:26 -07005362
5363 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5364 isVidBufRequested = true;
5365 }
5366 }
5367
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005368 //FIXME: Add checks to ensure to dups in validateCaptureRequest
5369 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5370 itr++) {
5371 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5372 streamsArray.stream_request[streamsArray.num_streams++].streamID =
5373 channel->getStreamID(channel->getStreamTypeMask());
5374
5375 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5376 isVidBufRequested = true;
5377 }
5378 }
5379
Thierry Strudel3d639192016-09-09 11:52:26 -07005380 if (blob_request) {
Shuzhen Wang850a7c22017-05-02 14:48:23 -07005381 ATRACE_ASYNC_BEGIN("SNAPSHOT", frameNumber);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005382 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
Thierry Strudel3d639192016-09-09 11:52:26 -07005383 }
5384 if (blob_request && mRawDumpChannel) {
5385 LOGD("Trigger Raw based on blob request if Raw dump is enabled");
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005386 streamsArray.stream_request[streamsArray.num_streams].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005387 mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005388 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
Thierry Strudel3d639192016-09-09 11:52:26 -07005389 }
5390
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005391 {
5392 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5393 // Request a RAW buffer if
5394 // 1. mHdrPlusRawSrcChannel is valid.
5395 // 2. frameNumber is multiples of kHdrPlusRawPeriod (in order to limit RAW capture rate.)
5396 // 3. There is no pending HDR+ request.
5397 if (mHdrPlusRawSrcChannel && frameNumber % kHdrPlusRawPeriod == 0 &&
5398 mHdrPlusPendingRequests.size() == 0) {
5399 streamsArray.stream_request[streamsArray.num_streams].streamID =
5400 mHdrPlusRawSrcChannel->getStreamID(mHdrPlusRawSrcChannel->getStreamTypeMask());
5401 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
5402 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005403 }
5404
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005405 //extract capture intent
5406 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
5407 mCaptureIntent =
5408 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
5409 }
5410
5411 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5412 mCacMode =
5413 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5414 }
5415
Chien-Yu Chenbc730232017-07-12 14:49:55 -07005416 uint8_t requestedLensShadingMapMode;
5417 // Get the shading map mode.
5418 if (meta.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
5419 mLastRequestedLensShadingMapMode = requestedLensShadingMapMode =
5420 meta.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
5421 } else {
5422 requestedLensShadingMapMode = mLastRequestedLensShadingMapMode;
5423 }
5424
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005425 bool hdrPlusRequest = false;
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005426 HdrPlusPendingRequest pendingHdrPlusRequest = {};
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005427
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005428 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07005429 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005430 // If this request has a still capture intent, try to submit an HDR+ request.
5431 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled &&
5432 mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE) {
5433 hdrPlusRequest = trySubmittingHdrPlusRequestLocked(&pendingHdrPlusRequest, *request, meta);
5434 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005435 }
5436
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005437 if (hdrPlusRequest) {
5438 // For a HDR+ request, just set the frame parameters.
5439 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
5440 if (rc < 0) {
5441 LOGE("fail to set frame parameters");
5442 pthread_mutex_unlock(&mMutex);
5443 return rc;
5444 }
5445 } else if(request->input_buffer == NULL) {
Thierry Strudel3d639192016-09-09 11:52:26 -07005446 /* Parse the settings:
5447 * - For every request in NORMAL MODE
5448 * - For every request in HFR mode during preview only case
5449 * - For first request of every batch in HFR mode during video
5450 * recording. In batchmode the same settings except frame number is
5451 * repeated in each request of the batch.
5452 */
5453 if (!mBatchSize ||
5454 (mBatchSize && !isVidBufRequested) ||
5455 (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005456 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
Thierry Strudel3d639192016-09-09 11:52:26 -07005457 if (rc < 0) {
5458 LOGE("fail to set frame parameters");
5459 pthread_mutex_unlock(&mMutex);
5460 return rc;
5461 }
Chien-Yu Chenbc730232017-07-12 14:49:55 -07005462
5463 {
5464 // If HDR+ mode is enabled, override lens shading mode to ON so lens shading map
5465 // will be reported in result metadata.
5466 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
5467 if (mHdrPlusModeEnabled) {
5468 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_LENS_SHADING_MAP_MODE,
5469 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON);
5470 }
5471 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005472 }
5473 /* For batchMode HFR, setFrameParameters is not called for every
5474 * request. But only frame number of the latest request is parsed.
5475 * Keep track of first and last frame numbers in a batch so that
5476 * metadata for the frame numbers of batch can be duplicated in
5477 * handleBatchMetadta */
5478 if (mBatchSize) {
5479 if (!mToBeQueuedVidBufs) {
5480 //start of the batch
5481 mFirstFrameNumberInBatch = request->frame_number;
5482 }
5483 if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5484 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
5485 LOGE("Failed to set the frame number in the parameters");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005486 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005487 return BAD_VALUE;
5488 }
5489 }
5490 if (mNeedSensorRestart) {
5491 /* Unlock the mutex as restartSensor waits on the channels to be
5492 * stopped, which in turn calls stream callback functions -
5493 * handleBufferWithLock and handleMetadataWithLock */
5494 pthread_mutex_unlock(&mMutex);
5495 rc = dynamicUpdateMetaStreamInfo();
5496 if (rc != NO_ERROR) {
5497 LOGE("Restarting the sensor failed");
5498 return BAD_VALUE;
5499 }
5500 mNeedSensorRestart = false;
5501 pthread_mutex_lock(&mMutex);
5502 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005503 if(mResetInstantAEC) {
5504 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5505 CAM_INTF_PARM_INSTANT_AEC, (uint8_t)CAM_AEC_NORMAL_CONVERGENCE);
5506 mResetInstantAEC = false;
5507 }
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005508 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005509 if (request->input_buffer->acquire_fence != -1) {
5510 rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
5511 close(request->input_buffer->acquire_fence);
5512 if (rc != OK) {
5513 LOGE("input buffer sync wait failed %d", rc);
5514 pthread_mutex_unlock(&mMutex);
5515 return rc;
5516 }
5517 }
5518 }
5519
5520 if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
5521 mLastCustIntentFrmNum = frameNumber;
5522 }
5523 /* Update pending request list and pending buffers map */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005524 PendingRequestInfo pendingRequest = {};
Thierry Strudel3d639192016-09-09 11:52:26 -07005525 pendingRequestIterator latestRequest;
5526 pendingRequest.frame_number = frameNumber;
Emilian Peev7650c122017-01-19 08:24:33 -08005527 pendingRequest.num_buffers = depthRequestPresent ?
5528 (request->num_output_buffers - 1 ) : request->num_output_buffers;
Thierry Strudel3d639192016-09-09 11:52:26 -07005529 pendingRequest.request_id = request_id;
5530 pendingRequest.blob_request = blob_request;
5531 pendingRequest.timestamp = 0;
Chien-Yu Chenbc730232017-07-12 14:49:55 -07005532 pendingRequest.requestedLensShadingMapMode = requestedLensShadingMapMode;
Thierry Strudel3d639192016-09-09 11:52:26 -07005533 if (request->input_buffer) {
5534 pendingRequest.input_buffer =
5535 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
5536 *(pendingRequest.input_buffer) = *(request->input_buffer);
5537 pInputBuffer = pendingRequest.input_buffer;
5538 } else {
5539 pendingRequest.input_buffer = NULL;
5540 pInputBuffer = NULL;
5541 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005542 pendingRequest.bUseFirstPartial = (mState == CONFIGURED && !request->input_buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07005543
5544 pendingRequest.pipeline_depth = 0;
5545 pendingRequest.partial_result_cnt = 0;
5546 extractJpegMetadata(mCurJpegMeta, request);
5547 pendingRequest.jpegMetadata = mCurJpegMeta;
5548 pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
Thierry Strudel3d639192016-09-09 11:52:26 -07005549 pendingRequest.capture_intent = mCaptureIntent;
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07005550 if (meta.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
Shuzhen Wang77b049a2017-08-30 12:24:36 -07005551 pendingRequest.hybrid_ae_enable =
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07005552 meta.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8[0];
5553 }
Chien-Yu Chen98b126c2017-03-14 14:55:32 -07005554
Samuel Ha68ba5172016-12-15 18:41:12 -08005555 /* DevCamDebug metadata processCaptureRequest */
5556 if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
5557 mDevCamDebugMetaEnable =
5558 meta.find(DEVCAMDEBUG_META_ENABLE).data.u8[0];
5559 }
5560 pendingRequest.DevCamDebug_meta_enable = mDevCamDebugMetaEnable;
5561 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07005562
5563 //extract CAC info
5564 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5565 mCacMode =
5566 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5567 }
5568 pendingRequest.fwkCacMode = mCacMode;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005569 pendingRequest.hdrplus = hdrPlusRequest;
Emilian Peev30522a12017-08-03 14:36:33 +01005570 pendingRequest.expectedFrameDuration = mExpectedFrameDuration;
5571 mExpectedInflightDuration += mExpectedFrameDuration;
Thierry Strudel3d639192016-09-09 11:52:26 -07005572
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07005573 // extract enableZsl info
5574 if (gExposeEnableZslKey) {
5575 if (meta.exists(ANDROID_CONTROL_ENABLE_ZSL)) {
5576 pendingRequest.enableZsl = meta.find(ANDROID_CONTROL_ENABLE_ZSL).data.u8[0];
5577 mZslEnabled = pendingRequest.enableZsl;
5578 } else {
5579 pendingRequest.enableZsl = mZslEnabled;
5580 }
5581 }
5582
Thierry Strudel3d639192016-09-09 11:52:26 -07005583 PendingBuffersInRequest bufsForCurRequest;
5584 bufsForCurRequest.frame_number = frameNumber;
5585 // Mark current timestamp for the new request
5586 bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
Binhao Lin09245482017-08-31 18:25:29 -07005587 bufsForCurRequest.av_timestamp = 0;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005588 bufsForCurRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005589
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005590 if (hdrPlusRequest) {
5591 // Save settings for this request.
5592 pendingHdrPlusRequest.settings = std::make_shared<metadata_buffer_t>();
5593 memcpy(pendingHdrPlusRequest.settings.get(), mParameters, sizeof(metadata_buffer_t));
5594
5595 // Add to pending HDR+ request queue.
5596 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5597 mHdrPlusPendingRequests.emplace(frameNumber, pendingHdrPlusRequest);
5598
5599 ALOGD("%s: frame number %u is an HDR+ request.", __FUNCTION__, frameNumber);
5600 }
5601
Thierry Strudel3d639192016-09-09 11:52:26 -07005602 for (size_t i = 0; i < request->num_output_buffers; i++) {
Emilian Peev0f3c3162017-03-15 12:57:46 +00005603 if ((request->output_buffers[i].stream->data_space ==
5604 HAL_DATASPACE_DEPTH) &&
5605 (HAL_PIXEL_FORMAT_BLOB ==
5606 request->output_buffers[i].stream->format)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005607 continue;
5608 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005609 RequestedBufferInfo requestedBuf;
5610 memset(&requestedBuf, 0, sizeof(requestedBuf));
5611 requestedBuf.stream = request->output_buffers[i].stream;
5612 requestedBuf.buffer = NULL;
5613 pendingRequest.buffers.push_back(requestedBuf);
5614
5615 // Add to buffer handle the pending buffers list
5616 PendingBufferInfo bufferInfo;
5617 bufferInfo.buffer = request->output_buffers[i].buffer;
5618 bufferInfo.stream = request->output_buffers[i].stream;
5619 bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
5620 QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
5621 LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
5622 frameNumber, bufferInfo.buffer,
5623 channel->getStreamTypeMask(), bufferInfo.stream->format);
5624 }
5625 // Add this request packet into mPendingBuffersMap
5626 mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
5627 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
5628 mPendingBuffersMap.get_num_overall_buffers());
5629
5630 latestRequest = mPendingRequestsList.insert(
5631 mPendingRequestsList.end(), pendingRequest);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07005632
5633 // Let shutter dispatcher and buffer dispatcher know shutter and output buffers are expected
5634 // for the frame number.
Chien-Yu Chena7f98612017-06-20 16:54:10 -07005635 mShutterDispatcher.expectShutter(frameNumber, request->input_buffer != nullptr);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07005636 for (size_t i = 0; i < request->num_output_buffers; i++) {
5637 mOutputBufferDispatcher.expectBuffer(frameNumber, request->output_buffers[i].stream);
5638 }
5639
Thierry Strudel3d639192016-09-09 11:52:26 -07005640 if(mFlush) {
5641 LOGI("mFlush is true");
5642 pthread_mutex_unlock(&mMutex);
5643 return NO_ERROR;
5644 }
5645
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005646 // If this is not an HDR+ request, send the request to metadata and each output buffer's
5647 // channel.
5648 if (!hdrPlusRequest) {
5649 int indexUsed;
5650 // Notify metadata channel we receive a request
5651 mMetadataChannel->request(NULL, frameNumber, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07005652
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005653 if(request->input_buffer != NULL){
5654 LOGD("Input request, frame_number %d", frameNumber);
5655 rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
5656 if (NO_ERROR != rc) {
5657 LOGE("fail to set reproc parameters");
5658 pthread_mutex_unlock(&mMutex);
5659 return rc;
5660 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005661 }
5662
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005663 // Call request on other streams
5664 uint32_t streams_need_metadata = 0;
5665 pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
5666 for (size_t i = 0; i < request->num_output_buffers; i++) {
5667 const camera3_stream_buffer_t& output = request->output_buffers[i];
5668 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5669
5670 if (channel == NULL) {
5671 LOGW("invalid channel pointer for stream");
5672 continue;
5673 }
5674
5675 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
5676 LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
5677 output.buffer, request->input_buffer, frameNumber);
5678 if(request->input_buffer != NULL){
Thierry Strudel3d639192016-09-09 11:52:26 -07005679 rc = channel->request(output.buffer, frameNumber,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005680 pInputBuffer, &mReprocMeta, indexUsed, false, false);
5681 if (rc < 0) {
5682 LOGE("Fail to request on picture channel");
5683 pthread_mutex_unlock(&mMutex);
5684 return rc;
5685 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005686 } else {
Emilian Peev7650c122017-01-19 08:24:33 -08005687 if (HAL_DATASPACE_DEPTH == output.stream->data_space) {
5688 assert(NULL != mDepthChannel);
5689 assert(mDepthChannel == output.stream->priv);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005690
Emilian Peev7650c122017-01-19 08:24:33 -08005691 rc = mDepthChannel->mapBuffer(output.buffer, request->frame_number);
5692 if (rc < 0) {
5693 LOGE("Fail to map on depth buffer");
5694 pthread_mutex_unlock(&mMutex);
5695 return rc;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005696 }
Emilian Peev4e0fe952017-06-30 12:40:09 -07005697 continue;
Emilian Peev7650c122017-01-19 08:24:33 -08005698 } else {
5699 LOGD("snapshot request with buffer %p, frame_number %d",
5700 output.buffer, frameNumber);
5701 if (!request->settings) {
5702 rc = channel->request(output.buffer, frameNumber,
5703 NULL, mPrevParameters, indexUsed);
5704 } else {
5705 rc = channel->request(output.buffer, frameNumber,
5706 NULL, mParameters, indexUsed);
5707 }
5708 if (rc < 0) {
5709 LOGE("Fail to request on picture channel");
5710 pthread_mutex_unlock(&mMutex);
5711 return rc;
5712 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005713
Emilian Peev7650c122017-01-19 08:24:33 -08005714 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5715 uint32_t j = 0;
5716 for (j = 0; j < streamsArray.num_streams; j++) {
5717 if (streamsArray.stream_request[j].streamID == streamId) {
5718 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5719 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5720 else
5721 streamsArray.stream_request[j].buf_index = indexUsed;
5722 break;
5723 }
5724 }
5725 if (j == streamsArray.num_streams) {
5726 LOGE("Did not find matching stream to update index");
5727 assert(0);
5728 }
5729
5730 pendingBufferIter->need_metadata = true;
5731 streams_need_metadata++;
5732 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005733 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005734 } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
5735 bool needMetadata = false;
5736 QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
5737 rc = yuvChannel->request(output.buffer, frameNumber,
5738 pInputBuffer, (pInputBuffer ? &mReprocMeta : mParameters),
5739 needMetadata, indexUsed, false, false);
Thierry Strudel3d639192016-09-09 11:52:26 -07005740 if (rc < 0) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005741 LOGE("Fail to request on YUV channel");
Thierry Strudel3d639192016-09-09 11:52:26 -07005742 pthread_mutex_unlock(&mMutex);
5743 return rc;
5744 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005745
5746 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5747 uint32_t j = 0;
5748 for (j = 0; j < streamsArray.num_streams; j++) {
5749 if (streamsArray.stream_request[j].streamID == streamId) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005750 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5751 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5752 else
5753 streamsArray.stream_request[j].buf_index = indexUsed;
5754 break;
5755 }
5756 }
5757 if (j == streamsArray.num_streams) {
5758 LOGE("Did not find matching stream to update index");
5759 assert(0);
5760 }
5761
5762 pendingBufferIter->need_metadata = needMetadata;
5763 if (needMetadata)
5764 streams_need_metadata += 1;
5765 LOGD("calling YUV channel request, need_metadata is %d",
5766 needMetadata);
5767 } else {
5768 LOGD("request with buffer %p, frame_number %d",
5769 output.buffer, frameNumber);
5770
5771 rc = channel->request(output.buffer, frameNumber, indexUsed);
5772
5773 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5774 uint32_t j = 0;
5775 for (j = 0; j < streamsArray.num_streams; j++) {
5776 if (streamsArray.stream_request[j].streamID == streamId) {
5777 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5778 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5779 else
5780 streamsArray.stream_request[j].buf_index = indexUsed;
5781 break;
5782 }
5783 }
5784 if (j == streamsArray.num_streams) {
5785 LOGE("Did not find matching stream to update index");
5786 assert(0);
5787 }
5788
5789 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5790 && mBatchSize) {
5791 mToBeQueuedVidBufs++;
5792 if (mToBeQueuedVidBufs == mBatchSize) {
5793 channel->queueBatchBuf();
5794 }
5795 }
5796 if (rc < 0) {
5797 LOGE("request failed");
5798 pthread_mutex_unlock(&mMutex);
5799 return rc;
5800 }
5801 }
5802 pendingBufferIter++;
5803 }
5804
5805 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5806 itr++) {
5807 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5808
5809 if (channel == NULL) {
5810 LOGE("invalid channel pointer for stream");
5811 assert(0);
Shuzhen Wang3a1b92d2017-08-09 13:39:47 -07005812 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005813 return BAD_VALUE;
5814 }
5815
5816 InternalRequest requestedStream;
5817 requestedStream = (*itr);
5818
5819
5820 if ((*itr).stream->format == HAL_PIXEL_FORMAT_BLOB) {
5821 LOGD("snapshot request internally input buffer %p, frame_number %d",
5822 request->input_buffer, frameNumber);
5823 if(request->input_buffer != NULL){
5824 rc = channel->request(NULL, frameNumber,
5825 pInputBuffer, &mReprocMeta, indexUsed, true,
5826 requestedStream.meteringOnly);
5827 if (rc < 0) {
5828 LOGE("Fail to request on picture channel");
5829 pthread_mutex_unlock(&mMutex);
5830 return rc;
5831 }
5832 } else {
5833 LOGD("snapshot request with frame_number %d", frameNumber);
5834 if (!request->settings) {
5835 rc = channel->request(NULL, frameNumber,
5836 NULL, mPrevParameters, indexUsed, true,
5837 requestedStream.meteringOnly);
5838 } else {
5839 rc = channel->request(NULL, frameNumber,
5840 NULL, mParameters, indexUsed, true, requestedStream.meteringOnly);
5841 }
5842 if (rc < 0) {
5843 LOGE("Fail to request on picture channel");
5844 pthread_mutex_unlock(&mMutex);
5845 return rc;
5846 }
5847
5848 if ((*itr).meteringOnly != 1) {
5849 requestedStream.need_metadata = 1;
5850 streams_need_metadata++;
5851 }
5852 }
5853
5854 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5855 uint32_t j = 0;
5856 for (j = 0; j < streamsArray.num_streams; j++) {
5857 if (streamsArray.stream_request[j].streamID == streamId) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005858 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5859 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5860 else
5861 streamsArray.stream_request[j].buf_index = indexUsed;
5862 break;
5863 }
5864 }
5865 if (j == streamsArray.num_streams) {
5866 LOGE("Did not find matching stream to update index");
5867 assert(0);
5868 }
5869
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005870 } else {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005871 LOGE("Internal requests not supported on this stream type");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005872 assert(0);
Shuzhen Wang3a1b92d2017-08-09 13:39:47 -07005873 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005874 return INVALID_OPERATION;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005875 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005876 latestRequest->internalRequestList.push_back(requestedStream);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005877 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005878
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005879 //If 2 streams have need_metadata set to true, fail the request, unless
5880 //we copy/reference count the metadata buffer
5881 if (streams_need_metadata > 1) {
5882 LOGE("not supporting request in which two streams requires"
5883 " 2 HAL metadata for reprocessing");
5884 pthread_mutex_unlock(&mMutex);
5885 return -EINVAL;
5886 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005887
Emilian Peev656e4fa2017-06-02 16:47:04 +01005888 cam_sensor_pd_data_t pdafEnable = (nullptr != mDepthChannel) ?
5889 CAM_PD_DATA_SKIP : CAM_PD_DATA_DISABLED;
5890 if (depthRequestPresent && mDepthChannel) {
5891 if (request->settings) {
5892 camera_metadata_ro_entry entry;
5893 if (find_camera_metadata_ro_entry(request->settings,
5894 NEXUS_EXPERIMENTAL_2017_PD_DATA_ENABLE, &entry) == 0) {
5895 if (entry.data.u8[0]) {
5896 pdafEnable = CAM_PD_DATA_ENABLED;
5897 } else {
5898 pdafEnable = CAM_PD_DATA_SKIP;
5899 }
5900 mDepthCloudMode = pdafEnable;
5901 } else {
5902 pdafEnable = mDepthCloudMode;
5903 }
5904 } else {
5905 pdafEnable = mDepthCloudMode;
5906 }
5907 }
5908
Emilian Peev7650c122017-01-19 08:24:33 -08005909 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5910 CAM_INTF_META_PDAF_DATA_ENABLE, pdafEnable)) {
5911 LOGE("%s: Failed to enable PDAF data in parameters!", __func__);
5912 pthread_mutex_unlock(&mMutex);
5913 return BAD_VALUE;
5914 }
Emilian Peev656e4fa2017-06-02 16:47:04 +01005915
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005916 if (request->input_buffer == NULL) {
5917 /* Set the parameters to backend:
5918 * - For every request in NORMAL MODE
5919 * - For every request in HFR mode during preview only case
5920 * - Once every batch in HFR mode during video recording
5921 */
5922 if (!mBatchSize ||
5923 (mBatchSize && !isVidBufRequested) ||
5924 (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
5925 LOGD("set_parms batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
5926 mBatchSize, isVidBufRequested,
5927 mToBeQueuedVidBufs);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005928
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005929 if(mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize)) {
5930 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5931 uint32_t m = 0;
5932 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5933 if (streamsArray.stream_request[k].streamID ==
5934 mBatchedStreamsArray.stream_request[m].streamID)
5935 break;
5936 }
5937 if (m == mBatchedStreamsArray.num_streams) {
5938 mBatchedStreamsArray.stream_request\
5939 [mBatchedStreamsArray.num_streams].streamID =
5940 streamsArray.stream_request[k].streamID;
5941 mBatchedStreamsArray.stream_request\
5942 [mBatchedStreamsArray.num_streams].buf_index =
5943 streamsArray.stream_request[k].buf_index;
5944 mBatchedStreamsArray.num_streams =
5945 mBatchedStreamsArray.num_streams + 1;
5946 }
5947 }
5948 streamsArray = mBatchedStreamsArray;
5949 }
5950 /* Update stream id of all the requested buffers */
5951 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID,
5952 streamsArray)) {
5953 LOGE("Failed to set stream type mask in the parameters");
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005954 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005955 return BAD_VALUE;
5956 }
5957
5958 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5959 mParameters);
5960 if (rc < 0) {
5961 LOGE("set_parms failed");
5962 }
5963 /* reset to zero coz, the batch is queued */
5964 mToBeQueuedVidBufs = 0;
5965 mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
5966 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
5967 } else if (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs != mBatchSize)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005968 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5969 uint32_t m = 0;
5970 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5971 if (streamsArray.stream_request[k].streamID ==
5972 mBatchedStreamsArray.stream_request[m].streamID)
5973 break;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005974 }
5975 if (m == mBatchedStreamsArray.num_streams) {
5976 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5977 streamID = streamsArray.stream_request[k].streamID;
5978 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5979 buf_index = streamsArray.stream_request[k].buf_index;
5980 mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
5981 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005982 }
5983 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005984 mPendingLiveRequest++;
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005985
5986 // Start all streams after the first setting is sent, so that the
5987 // setting can be applied sooner: (0 + apply_delay)th frame.
5988 if (mState == CONFIGURED && mChannelHandle) {
5989 //Then start them.
5990 LOGH("Start META Channel");
5991 rc = mMetadataChannel->start();
5992 if (rc < 0) {
5993 LOGE("META channel start failed");
5994 pthread_mutex_unlock(&mMutex);
5995 return rc;
5996 }
5997
5998 if (mAnalysisChannel) {
5999 rc = mAnalysisChannel->start();
6000 if (rc < 0) {
6001 LOGE("Analysis channel start failed");
6002 mMetadataChannel->stop();
6003 pthread_mutex_unlock(&mMutex);
6004 return rc;
6005 }
6006 }
6007
6008 if (mSupportChannel) {
6009 rc = mSupportChannel->start();
6010 if (rc < 0) {
6011 LOGE("Support channel start failed");
6012 mMetadataChannel->stop();
6013 /* Although support and analysis are mutually exclusive today
6014 adding it in anycase for future proofing */
6015 if (mAnalysisChannel) {
6016 mAnalysisChannel->stop();
6017 }
6018 pthread_mutex_unlock(&mMutex);
6019 return rc;
6020 }
6021 }
6022 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6023 it != mStreamInfo.end(); it++) {
6024 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
6025 LOGH("Start Processing Channel mask=%d",
6026 channel->getStreamTypeMask());
6027 rc = channel->start();
6028 if (rc < 0) {
6029 LOGE("channel start failed");
6030 pthread_mutex_unlock(&mMutex);
6031 return rc;
6032 }
6033 }
6034
6035 if (mRawDumpChannel) {
6036 LOGD("Starting raw dump stream");
6037 rc = mRawDumpChannel->start();
6038 if (rc != NO_ERROR) {
6039 LOGE("Error Starting Raw Dump Channel");
6040 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6041 it != mStreamInfo.end(); it++) {
6042 QCamera3Channel *channel =
6043 (QCamera3Channel *)(*it)->stream->priv;
6044 LOGH("Stopping Processing Channel mask=%d",
6045 channel->getStreamTypeMask());
6046 channel->stop();
6047 }
6048 if (mSupportChannel)
6049 mSupportChannel->stop();
6050 if (mAnalysisChannel) {
6051 mAnalysisChannel->stop();
6052 }
6053 mMetadataChannel->stop();
6054 pthread_mutex_unlock(&mMutex);
6055 return rc;
6056 }
6057 }
6058
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006059 // Configure modules for stream on.
Shuzhen Wang3c077d72017-04-20 22:48:59 -07006060 rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006061 mChannelHandle, /*start_sensor_streaming*/false);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07006062 if (rc != NO_ERROR) {
6063 LOGE("start_channel failed %d", rc);
6064 pthread_mutex_unlock(&mMutex);
6065 return rc;
6066 }
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006067
6068 {
6069 // Configure Easel for stream on.
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07006070 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen605c3872017-06-14 11:09:23 -07006071
6072 // Now that sensor mode should have been selected, get the selected sensor mode
6073 // info.
6074 memset(&mSensorModeInfo, 0, sizeof(mSensorModeInfo));
6075 getCurrentSensorModeInfo(mSensorModeInfo);
6076
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006077 if (EaselManagerClientOpened) {
6078 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI");
Chien-Yu Chend77a5462017-06-02 18:00:38 -07006079 rc = gEaselManagerClient->startMipi(mCameraId, mSensorModeInfo.op_pixel_clk,
6080 /*enableCapture*/true);
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006081 if (rc != OK) {
6082 ALOGE("%s: Failed to start MIPI rate for camera %u to %u", __FUNCTION__,
6083 mCameraId, mSensorModeInfo.op_pixel_clk);
6084 pthread_mutex_unlock(&mMutex);
6085 return rc;
6086 }
Chien-Yu Chene96475e2017-04-11 11:53:26 -07006087 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI done");
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006088 }
6089 }
6090
6091 // Start sensor streaming.
6092 rc = mCameraHandle->ops->start_sensor_streaming(mCameraHandle->camera_handle,
6093 mChannelHandle);
6094 if (rc != NO_ERROR) {
6095 LOGE("start_sensor_stream_on failed %d", rc);
6096 pthread_mutex_unlock(&mMutex);
6097 return rc;
6098 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07006099 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006100 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006101 }
6102
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07006103 // Enable HDR+ mode for the first PREVIEW_INTENT request.
Chenjie Luo4a761802017-06-13 17:35:54 +00006104 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07006105 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chend77a5462017-06-02 18:00:38 -07006106 if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice() &&
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07006107 !gEaselBypassOnly && !mFirstPreviewIntentSeen &&
6108 meta.exists(ANDROID_CONTROL_CAPTURE_INTENT) &&
6109 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0] ==
6110 ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW) {
Chien-Yu Chendeaebad2017-06-30 11:46:34 -07006111
6112 if (isSessionHdrPlusModeCompatible()) {
6113 rc = enableHdrPlusModeLocked();
6114 if (rc != OK) {
6115 LOGE("%s: Failed to open HDR+ asynchronously", __FUNCTION__);
6116 pthread_mutex_unlock(&mMutex);
6117 return rc;
6118 }
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07006119 }
6120
6121 mFirstPreviewIntentSeen = true;
6122 }
6123 }
6124
Thierry Strudel3d639192016-09-09 11:52:26 -07006125 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
6126
6127 mState = STARTED;
6128 // Added a timed condition wait
6129 struct timespec ts;
6130 uint8_t isValidTimeout = 1;
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006131 rc = clock_gettime(CLOCK_MONOTONIC, &ts);
Thierry Strudel3d639192016-09-09 11:52:26 -07006132 if (rc < 0) {
6133 isValidTimeout = 0;
6134 LOGE("Error reading the real time clock!!");
6135 }
6136 else {
6137 // Make timeout as 5 sec for request to be honored
Chien-Yu Chene687bd02016-12-07 18:30:26 -08006138 int64_t timeout = 5;
6139 {
6140 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
6141 // If there is a pending HDR+ request, the following requests may be blocked until the
6142 // HDR+ request is done. So allow a longer timeout.
6143 if (mHdrPlusPendingRequests.size() > 0) {
6144 timeout = MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT;
6145 }
6146 }
6147 ts.tv_sec += timeout;
Thierry Strudel3d639192016-09-09 11:52:26 -07006148 }
6149 //Block on conditional variable
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006150 while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
Thierry Strudel3d639192016-09-09 11:52:26 -07006151 (mState != ERROR) && (mState != DEINIT)) {
6152 if (!isValidTimeout) {
6153 LOGD("Blocking on conditional wait");
6154 pthread_cond_wait(&mRequestCond, &mMutex);
6155 }
6156 else {
6157 LOGD("Blocking on timed conditional wait");
6158 rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
6159 if (rc == ETIMEDOUT) {
6160 rc = -ENODEV;
6161 LOGE("Unblocked on timeout!!!!");
6162 break;
6163 }
6164 }
6165 LOGD("Unblocked");
6166 if (mWokenUpByDaemon) {
6167 mWokenUpByDaemon = false;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006168 if (mPendingLiveRequest < mMaxInFlightRequests)
Thierry Strudel3d639192016-09-09 11:52:26 -07006169 break;
6170 }
6171 }
6172 pthread_mutex_unlock(&mMutex);
6173
6174 return rc;
6175}
6176
6177/*===========================================================================
6178 * FUNCTION : dump
6179 *
6180 * DESCRIPTION:
6181 *
6182 * PARAMETERS :
6183 *
6184 *
6185 * RETURN :
6186 *==========================================================================*/
6187void QCamera3HardwareInterface::dump(int fd)
6188{
6189 pthread_mutex_lock(&mMutex);
6190 dprintf(fd, "\n Camera HAL3 information Begin \n");
6191
6192 dprintf(fd, "\nNumber of pending requests: %zu \n",
6193 mPendingRequestsList.size());
6194 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6195 dprintf(fd, " Frame | Number of Buffers | Req Id: | Blob Req | Input buffer present\n");
6196 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6197 for(pendingRequestIterator i = mPendingRequestsList.begin();
6198 i != mPendingRequestsList.end(); i++) {
6199 dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
6200 i->frame_number, i->num_buffers, i->request_id, i->blob_request,
6201 i->input_buffer);
6202 }
6203 dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
6204 mPendingBuffersMap.get_num_overall_buffers());
6205 dprintf(fd, "-------+------------------\n");
6206 dprintf(fd, " Frame | Stream type mask \n");
6207 dprintf(fd, "-------+------------------\n");
6208 for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
6209 for(auto &j : req.mPendingBufferList) {
6210 QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
6211 dprintf(fd, " %5d | %11d \n",
6212 req.frame_number, channel->getStreamTypeMask());
6213 }
6214 }
6215 dprintf(fd, "-------+------------------\n");
6216
6217 dprintf(fd, "\nPending frame drop list: %zu\n",
6218 mPendingFrameDropList.size());
6219 dprintf(fd, "-------+-----------\n");
6220 dprintf(fd, " Frame | Stream ID \n");
6221 dprintf(fd, "-------+-----------\n");
6222 for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
6223 i != mPendingFrameDropList.end(); i++) {
6224 dprintf(fd, " %5d | %9d \n",
6225 i->frame_number, i->stream_ID);
6226 }
6227 dprintf(fd, "-------+-----------\n");
6228
6229 dprintf(fd, "\n Camera HAL3 information End \n");
6230
6231 /* use dumpsys media.camera as trigger to send update debug level event */
6232 mUpdateDebugLevel = true;
6233 pthread_mutex_unlock(&mMutex);
6234 return;
6235}
6236
6237/*===========================================================================
6238 * FUNCTION : flush
6239 *
6240 * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
6241 * conditionally restarts channels
6242 *
6243 * PARAMETERS :
6244 * @ restartChannels: re-start all channels
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006245 * @ stopChannelImmediately: stop the channel immediately. This should be used
6246 * when device encountered an error and MIPI may has
6247 * been stopped.
Thierry Strudel3d639192016-09-09 11:52:26 -07006248 *
6249 * RETURN :
6250 * 0 on success
6251 * Error code on failure
6252 *==========================================================================*/
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006253int QCamera3HardwareInterface::flush(bool restartChannels, bool stopChannelImmediately)
Thierry Strudel3d639192016-09-09 11:52:26 -07006254{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006255 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006256 int32_t rc = NO_ERROR;
6257
6258 LOGD("Unblocking Process Capture Request");
6259 pthread_mutex_lock(&mMutex);
6260 mFlush = true;
6261 pthread_mutex_unlock(&mMutex);
6262
6263 rc = stopAllChannels();
6264 // unlink of dualcam
6265 if (mIsDeviceLinked) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006266 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
6267 &m_pDualCamCmdPtr->bundle_info;
6268 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07006269 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
6270 pthread_mutex_lock(&gCamLock);
6271
6272 if (mIsMainCamera == 1) {
6273 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
6274 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006275 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006276 // related session id should be session id of linked session
6277 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6278 } else {
6279 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
6280 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006281 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006282 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6283 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006284 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07006285 pthread_mutex_unlock(&gCamLock);
6286
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006287 rc = mCameraHandle->ops->set_dual_cam_cmd(
6288 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07006289 if (rc < 0) {
6290 LOGE("Dualcam: Unlink failed, but still proceed to close");
6291 }
6292 }
6293
6294 if (rc < 0) {
6295 LOGE("stopAllChannels failed");
6296 return rc;
6297 }
6298 if (mChannelHandle) {
6299 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006300 mChannelHandle, stopChannelImmediately);
Thierry Strudel3d639192016-09-09 11:52:26 -07006301 }
6302
6303 // Reset bundle info
6304 rc = setBundleInfo();
6305 if (rc < 0) {
6306 LOGE("setBundleInfo failed %d", rc);
6307 return rc;
6308 }
6309
6310 // Mutex Lock
6311 pthread_mutex_lock(&mMutex);
6312
6313 // Unblock process_capture_request
6314 mPendingLiveRequest = 0;
6315 pthread_cond_signal(&mRequestCond);
6316
6317 rc = notifyErrorForPendingRequests();
6318 if (rc < 0) {
6319 LOGE("notifyErrorForPendingRequests failed");
6320 pthread_mutex_unlock(&mMutex);
6321 return rc;
6322 }
6323
6324 mFlush = false;
6325
6326 // Start the Streams/Channels
6327 if (restartChannels) {
6328 rc = startAllChannels();
6329 if (rc < 0) {
6330 LOGE("startAllChannels failed");
6331 pthread_mutex_unlock(&mMutex);
6332 return rc;
6333 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006334 if (mChannelHandle) {
6335 mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006336 mChannelHandle, /*start_sensor_streaming*/true);
Thierry Strudel2896d122017-02-23 19:18:03 -08006337 if (rc < 0) {
6338 LOGE("start_channel failed");
6339 pthread_mutex_unlock(&mMutex);
6340 return rc;
6341 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006342 }
6343 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006344 pthread_mutex_unlock(&mMutex);
6345
6346 return 0;
6347}
6348
6349/*===========================================================================
6350 * FUNCTION : flushPerf
6351 *
6352 * DESCRIPTION: This is the performance optimization version of flush that does
6353 * not use stream off, rather flushes the system
6354 *
6355 * PARAMETERS :
6356 *
6357 *
6358 * RETURN : 0 : success
6359 * -EINVAL: input is malformed (device is not valid)
6360 * -ENODEV: if the device has encountered a serious error
6361 *==========================================================================*/
6362int QCamera3HardwareInterface::flushPerf()
6363{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006364 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006365 int32_t rc = 0;
6366 struct timespec timeout;
6367 bool timed_wait = false;
6368
6369 pthread_mutex_lock(&mMutex);
6370 mFlushPerf = true;
6371 mPendingBuffersMap.numPendingBufsAtFlush =
6372 mPendingBuffersMap.get_num_overall_buffers();
6373 LOGD("Calling flush. Wait for %d buffers to return",
6374 mPendingBuffersMap.numPendingBufsAtFlush);
6375
6376 /* send the flush event to the backend */
6377 rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
6378 if (rc < 0) {
6379 LOGE("Error in flush: IOCTL failure");
6380 mFlushPerf = false;
6381 pthread_mutex_unlock(&mMutex);
6382 return -ENODEV;
6383 }
6384
6385 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
6386 LOGD("No pending buffers in HAL, return flush");
6387 mFlushPerf = false;
6388 pthread_mutex_unlock(&mMutex);
6389 return rc;
6390 }
6391
6392 /* wait on a signal that buffers were received */
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006393 rc = clock_gettime(CLOCK_MONOTONIC, &timeout);
Thierry Strudel3d639192016-09-09 11:52:26 -07006394 if (rc < 0) {
6395 LOGE("Error reading the real time clock, cannot use timed wait");
6396 } else {
6397 timeout.tv_sec += FLUSH_TIMEOUT;
6398 timed_wait = true;
6399 }
6400
6401 //Block on conditional variable
6402 while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
6403 LOGD("Waiting on mBuffersCond");
6404 if (!timed_wait) {
6405 rc = pthread_cond_wait(&mBuffersCond, &mMutex);
6406 if (rc != 0) {
6407 LOGE("pthread_cond_wait failed due to rc = %s",
6408 strerror(rc));
6409 break;
6410 }
6411 } else {
6412 rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
6413 if (rc != 0) {
6414 LOGE("pthread_cond_timedwait failed due to rc = %s",
6415 strerror(rc));
6416 break;
6417 }
6418 }
6419 }
6420 if (rc != 0) {
6421 mFlushPerf = false;
6422 pthread_mutex_unlock(&mMutex);
6423 return -ENODEV;
6424 }
6425
6426 LOGD("Received buffers, now safe to return them");
6427
6428 //make sure the channels handle flush
6429 //currently only required for the picture channel to release snapshot resources
6430 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6431 it != mStreamInfo.end(); it++) {
6432 QCamera3Channel *channel = (*it)->channel;
6433 if (channel) {
6434 rc = channel->flush();
6435 if (rc) {
6436 LOGE("Flushing the channels failed with error %d", rc);
6437 // even though the channel flush failed we need to continue and
6438 // return the buffers we have to the framework, however the return
6439 // value will be an error
6440 rc = -ENODEV;
6441 }
6442 }
6443 }
6444
6445 /* notify the frameworks and send errored results */
6446 rc = notifyErrorForPendingRequests();
6447 if (rc < 0) {
6448 LOGE("notifyErrorForPendingRequests failed");
6449 pthread_mutex_unlock(&mMutex);
6450 return rc;
6451 }
6452
6453 //unblock process_capture_request
6454 mPendingLiveRequest = 0;
6455 unblockRequestIfNecessary();
6456
6457 mFlushPerf = false;
6458 pthread_mutex_unlock(&mMutex);
6459 LOGD ("Flush Operation complete. rc = %d", rc);
6460 return rc;
6461}
6462
6463/*===========================================================================
6464 * FUNCTION : handleCameraDeviceError
6465 *
6466 * DESCRIPTION: This function calls internal flush and notifies the error to
6467 * framework and updates the state variable.
6468 *
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006469 * PARAMETERS :
6470 * @stopChannelImmediately : stop channels immediately without waiting for
6471 * frame boundary.
Thierry Strudel3d639192016-09-09 11:52:26 -07006472 *
6473 * RETURN : NO_ERROR on Success
6474 * Error code on failure
6475 *==========================================================================*/
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006476int32_t QCamera3HardwareInterface::handleCameraDeviceError(bool stopChannelImmediately)
Thierry Strudel3d639192016-09-09 11:52:26 -07006477{
6478 int32_t rc = NO_ERROR;
6479
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006480 {
6481 Mutex::Autolock lock(mFlushLock);
6482 pthread_mutex_lock(&mMutex);
6483 if (mState != ERROR) {
6484 //if mState != ERROR, nothing to be done
6485 pthread_mutex_unlock(&mMutex);
6486 return NO_ERROR;
6487 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006488 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07006489
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006490 rc = flush(false /* restart channels */, stopChannelImmediately);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006491 if (NO_ERROR != rc) {
6492 LOGE("internal flush to handle mState = ERROR failed");
6493 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006494
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006495 pthread_mutex_lock(&mMutex);
6496 mState = DEINIT;
6497 pthread_mutex_unlock(&mMutex);
6498 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006499
6500 camera3_notify_msg_t notify_msg;
6501 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
6502 notify_msg.type = CAMERA3_MSG_ERROR;
6503 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
6504 notify_msg.message.error.error_stream = NULL;
6505 notify_msg.message.error.frame_number = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006506 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07006507
6508 return rc;
6509}
6510
6511/*===========================================================================
6512 * FUNCTION : captureResultCb
6513 *
6514 * DESCRIPTION: Callback handler for all capture result
6515 * (streams, as well as metadata)
6516 *
6517 * PARAMETERS :
6518 * @metadata : metadata information
6519 * @buffer : actual gralloc buffer to be returned to frameworks.
6520 * NULL if metadata.
6521 *
6522 * RETURN : NONE
6523 *==========================================================================*/
6524void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
6525 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
6526{
6527 if (metadata_buf) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006528 pthread_mutex_lock(&mMutex);
6529 uint8_t batchSize = mBatchSize;
6530 pthread_mutex_unlock(&mMutex);
6531 if (batchSize) {
Thierry Strudel3d639192016-09-09 11:52:26 -07006532 handleBatchMetadata(metadata_buf,
6533 true /* free_and_bufdone_meta_buf */);
6534 } else { /* mBatchSize = 0 */
6535 hdrPlusPerfLock(metadata_buf);
6536 pthread_mutex_lock(&mMutex);
6537 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006538 true /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006539 true /* last urgent frame of batch metadata */,
6540 true /* last frame of batch metadata */,
Thierry Strudel54dc9782017-02-15 12:12:10 -08006541 NULL);
Thierry Strudel3d639192016-09-09 11:52:26 -07006542 pthread_mutex_unlock(&mMutex);
6543 }
6544 } else if (isInputBuffer) {
6545 pthread_mutex_lock(&mMutex);
6546 handleInputBufferWithLock(frame_number);
6547 pthread_mutex_unlock(&mMutex);
6548 } else {
6549 pthread_mutex_lock(&mMutex);
6550 handleBufferWithLock(buffer, frame_number);
6551 pthread_mutex_unlock(&mMutex);
6552 }
6553 return;
6554}
6555
6556/*===========================================================================
6557 * FUNCTION : getReprocessibleOutputStreamId
6558 *
6559 * DESCRIPTION: Get source output stream id for the input reprocess stream
6560 * based on size and format, which would be the largest
6561 * output stream if an input stream exists.
6562 *
6563 * PARAMETERS :
6564 * @id : return the stream id if found
6565 *
6566 * RETURN : int32_t type of status
6567 * NO_ERROR -- success
6568 * none-zero failure code
6569 *==========================================================================*/
6570int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
6571{
6572 /* check if any output or bidirectional stream with the same size and format
6573 and return that stream */
6574 if ((mInputStreamInfo.dim.width > 0) &&
6575 (mInputStreamInfo.dim.height > 0)) {
6576 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6577 it != mStreamInfo.end(); it++) {
6578
6579 camera3_stream_t *stream = (*it)->stream;
6580 if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
6581 (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
6582 (stream->format == mInputStreamInfo.format)) {
6583 // Usage flag for an input stream and the source output stream
6584 // may be different.
6585 LOGD("Found reprocessible output stream! %p", *it);
6586 LOGD("input stream usage 0x%x, current stream usage 0x%x",
6587 stream->usage, mInputStreamInfo.usage);
6588
6589 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
6590 if (channel != NULL && channel->mStreams[0]) {
6591 id = channel->mStreams[0]->getMyServerID();
6592 return NO_ERROR;
6593 }
6594 }
6595 }
6596 } else {
6597 LOGD("No input stream, so no reprocessible output stream");
6598 }
6599 return NAME_NOT_FOUND;
6600}
6601
6602/*===========================================================================
6603 * FUNCTION : lookupFwkName
6604 *
6605 * DESCRIPTION: In case the enum is not same in fwk and backend
6606 * make sure the parameter is correctly propogated
6607 *
6608 * PARAMETERS :
6609 * @arr : map between the two enums
6610 * @len : len of the map
6611 * @hal_name : name of the hal_parm to map
6612 *
6613 * RETURN : int type of status
6614 * fwk_name -- success
6615 * none-zero failure code
6616 *==========================================================================*/
6617template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
6618 size_t len, halType hal_name)
6619{
6620
6621 for (size_t i = 0; i < len; i++) {
6622 if (arr[i].hal_name == hal_name) {
6623 return arr[i].fwk_name;
6624 }
6625 }
6626
6627 /* Not able to find matching framework type is not necessarily
6628 * an error case. This happens when mm-camera supports more attributes
6629 * than the frameworks do */
6630 LOGH("Cannot find matching framework type");
6631 return NAME_NOT_FOUND;
6632}
6633
6634/*===========================================================================
6635 * FUNCTION : lookupHalName
6636 *
6637 * DESCRIPTION: In case the enum is not same in fwk and backend
6638 * make sure the parameter is correctly propogated
6639 *
6640 * PARAMETERS :
6641 * @arr : map between the two enums
6642 * @len : len of the map
6643 * @fwk_name : name of the hal_parm to map
6644 *
6645 * RETURN : int32_t type of status
6646 * hal_name -- success
6647 * none-zero failure code
6648 *==========================================================================*/
6649template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
6650 size_t len, fwkType fwk_name)
6651{
6652 for (size_t i = 0; i < len; i++) {
6653 if (arr[i].fwk_name == fwk_name) {
6654 return arr[i].hal_name;
6655 }
6656 }
6657
6658 LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
6659 return NAME_NOT_FOUND;
6660}
6661
6662/*===========================================================================
6663 * FUNCTION : lookupProp
6664 *
6665 * DESCRIPTION: lookup a value by its name
6666 *
6667 * PARAMETERS :
6668 * @arr : map between the two enums
6669 * @len : size of the map
6670 * @name : name to be looked up
6671 *
6672 * RETURN : Value if found
6673 * CAM_CDS_MODE_MAX if not found
6674 *==========================================================================*/
6675template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
6676 size_t len, const char *name)
6677{
6678 if (name) {
6679 for (size_t i = 0; i < len; i++) {
6680 if (!strcmp(arr[i].desc, name)) {
6681 return arr[i].val;
6682 }
6683 }
6684 }
6685 return CAM_CDS_MODE_MAX;
6686}
6687
6688/*===========================================================================
6689 *
6690 * DESCRIPTION:
6691 *
6692 * PARAMETERS :
6693 * @metadata : metadata information from callback
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006694 * @pendingRequest: pending request for this metadata
Thierry Strudel3d639192016-09-09 11:52:26 -07006695 * @pprocDone: whether internal offline postprocsesing is done
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006696 * @lastMetadataInBatch: Boolean to indicate whether this is the last metadata
6697 * in a batch. Always true for non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07006698 *
6699 * RETURN : camera_metadata_t*
6700 * metadata in a format specified by fwk
6701 *==========================================================================*/
6702camera_metadata_t*
6703QCamera3HardwareInterface::translateFromHalMetadata(
6704 metadata_buffer_t *metadata,
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006705 const PendingRequestInfo& pendingRequest,
Thierry Strudel3d639192016-09-09 11:52:26 -07006706 bool pprocDone,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07006707 bool lastMetadataInBatch,
6708 const bool *enableZsl)
Thierry Strudel3d639192016-09-09 11:52:26 -07006709{
6710 CameraMetadata camMetadata;
6711 camera_metadata_t *resultMetadata;
6712
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006713 if (!lastMetadataInBatch) {
Shuzhen Wang8fe62792017-03-20 16:10:25 -07006714 /* In batch mode, only populate SENSOR_TIMESTAMP if this is not the last in batch.
6715 * Timestamp is needed because it's used for shutter notify calculation.
6716 * */
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006717 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &pendingRequest.timestamp, 1);
Shuzhen Wang8fe62792017-03-20 16:10:25 -07006718 resultMetadata = camMetadata.release();
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006719 return resultMetadata;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006720 }
6721
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006722 if (pendingRequest.jpegMetadata.entryCount())
6723 camMetadata.append(pendingRequest.jpegMetadata);
Thierry Strudel3d639192016-09-09 11:52:26 -07006724
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006725 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &pendingRequest.timestamp, 1);
6726 camMetadata.update(ANDROID_REQUEST_ID, &pendingRequest.request_id, 1);
6727 camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pendingRequest.pipeline_depth, 1);
6728 camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &pendingRequest.capture_intent, 1);
6729 camMetadata.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &pendingRequest.hybrid_ae_enable, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08006730 if (mBatchSize == 0) {
6731 // DevCamDebug metadata translateFromHalMetadata. Only update this one for non-HFR mode
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006732 camMetadata.update(DEVCAMDEBUG_META_ENABLE, &pendingRequest.DevCamDebug_meta_enable, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08006733 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006734
Samuel Ha68ba5172016-12-15 18:41:12 -08006735 // atrace_begin(ATRACE_TAG_ALWAYS, "DevCamDebugInfo");
6736 // Only update DevCameraDebug metadta conditionally: non-HFR mode and it is enabled.
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006737 if (mBatchSize == 0 && pendingRequest.DevCamDebug_meta_enable != 0) {
Samuel Ha68ba5172016-12-15 18:41:12 -08006738 // DevCamDebug metadata translateFromHalMetadata AF
6739 IF_META_AVAILABLE(int32_t, DevCamDebug_af_lens_position,
6740 CAM_INTF_META_DEV_CAM_AF_LENS_POSITION, metadata) {
6741 int32_t fwk_DevCamDebug_af_lens_position = *DevCamDebug_af_lens_position;
6742 camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
6743 }
6744 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_confidence,
6745 CAM_INTF_META_DEV_CAM_AF_TOF_CONFIDENCE, metadata) {
6746 int32_t fwk_DevCamDebug_af_tof_confidence = *DevCamDebug_af_tof_confidence;
6747 camMetadata.update(DEVCAMDEBUG_AF_TOF_CONFIDENCE, &fwk_DevCamDebug_af_tof_confidence, 1);
6748 }
6749 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_distance,
6750 CAM_INTF_META_DEV_CAM_AF_TOF_DISTANCE, metadata) {
6751 int32_t fwk_DevCamDebug_af_tof_distance = *DevCamDebug_af_tof_distance;
6752 camMetadata.update(DEVCAMDEBUG_AF_TOF_DISTANCE, &fwk_DevCamDebug_af_tof_distance, 1);
6753 }
6754 IF_META_AVAILABLE(int32_t, DevCamDebug_af_luma,
6755 CAM_INTF_META_DEV_CAM_AF_LUMA, metadata) {
6756 int32_t fwk_DevCamDebug_af_luma = *DevCamDebug_af_luma;
6757 camMetadata.update(DEVCAMDEBUG_AF_LUMA, &fwk_DevCamDebug_af_luma, 1);
6758 }
6759 IF_META_AVAILABLE(int32_t, DevCamDebug_af_haf_state,
6760 CAM_INTF_META_DEV_CAM_AF_HAF_STATE, metadata) {
6761 int32_t fwk_DevCamDebug_af_haf_state = *DevCamDebug_af_haf_state;
6762 camMetadata.update(DEVCAMDEBUG_AF_HAF_STATE, &fwk_DevCamDebug_af_haf_state, 1);
6763 }
6764 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_target_pos,
6765 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS, metadata) {
6766 int32_t fwk_DevCamDebug_af_monitor_pdaf_target_pos =
6767 *DevCamDebug_af_monitor_pdaf_target_pos;
6768 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
6769 &fwk_DevCamDebug_af_monitor_pdaf_target_pos, 1);
6770 }
6771 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_confidence,
6772 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_CONFIDENCE, metadata) {
6773 int32_t fwk_DevCamDebug_af_monitor_pdaf_confidence =
6774 *DevCamDebug_af_monitor_pdaf_confidence;
6775 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
6776 &fwk_DevCamDebug_af_monitor_pdaf_confidence, 1);
6777 }
6778 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_refocus,
6779 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_REFOCUS, metadata) {
6780 int32_t fwk_DevCamDebug_af_monitor_pdaf_refocus = *DevCamDebug_af_monitor_pdaf_refocus;
6781 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
6782 &fwk_DevCamDebug_af_monitor_pdaf_refocus, 1);
6783 }
6784 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_target_pos,
6785 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_TARGET_POS, metadata) {
6786 int32_t fwk_DevCamDebug_af_monitor_tof_target_pos =
6787 *DevCamDebug_af_monitor_tof_target_pos;
6788 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
6789 &fwk_DevCamDebug_af_monitor_tof_target_pos, 1);
6790 }
6791 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_confidence,
6792 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_CONFIDENCE, metadata) {
6793 int32_t fwk_DevCamDebug_af_monitor_tof_confidence =
6794 *DevCamDebug_af_monitor_tof_confidence;
6795 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
6796 &fwk_DevCamDebug_af_monitor_tof_confidence, 1);
6797 }
6798 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_refocus,
6799 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_REFOCUS, metadata) {
6800 int32_t fwk_DevCamDebug_af_monitor_tof_refocus = *DevCamDebug_af_monitor_tof_refocus;
6801 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
6802 &fwk_DevCamDebug_af_monitor_tof_refocus, 1);
6803 }
6804 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_type_select,
6805 CAM_INTF_META_DEV_CAM_AF_MONITOR_TYPE_SELECT, metadata) {
6806 int32_t fwk_DevCamDebug_af_monitor_type_select = *DevCamDebug_af_monitor_type_select;
6807 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
6808 &fwk_DevCamDebug_af_monitor_type_select, 1);
6809 }
6810 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_refocus,
6811 CAM_INTF_META_DEV_CAM_AF_MONITOR_REFOCUS, metadata) {
6812 int32_t fwk_DevCamDebug_af_monitor_refocus = *DevCamDebug_af_monitor_refocus;
6813 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_REFOCUS,
6814 &fwk_DevCamDebug_af_monitor_refocus, 1);
6815 }
6816 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_target_pos,
6817 CAM_INTF_META_DEV_CAM_AF_MONITOR_TARGET_POS, metadata) {
6818 int32_t fwk_DevCamDebug_af_monitor_target_pos = *DevCamDebug_af_monitor_target_pos;
6819 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
6820 &fwk_DevCamDebug_af_monitor_target_pos, 1);
6821 }
6822 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_target_pos,
6823 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_TARGET_POS, metadata) {
6824 int32_t fwk_DevCamDebug_af_search_pdaf_target_pos =
6825 *DevCamDebug_af_search_pdaf_target_pos;
6826 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
6827 &fwk_DevCamDebug_af_search_pdaf_target_pos, 1);
6828 }
6829 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_next_pos,
6830 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEXT_POS, metadata) {
6831 int32_t fwk_DevCamDebug_af_search_pdaf_next_pos = *DevCamDebug_af_search_pdaf_next_pos;
6832 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
6833 &fwk_DevCamDebug_af_search_pdaf_next_pos, 1);
6834 }
6835 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_near_pos,
6836 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEAR_POS, metadata) {
6837 int32_t fwk_DevCamDebug_af_search_pdaf_near_pos = *DevCamDebug_af_search_pdaf_near_pos;
6838 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
6839 &fwk_DevCamDebug_af_search_pdaf_near_pos, 1);
6840 }
6841 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_far_pos,
6842 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_FAR_POS, metadata) {
6843 int32_t fwk_DevCamDebug_af_search_pdaf_far_pos = *DevCamDebug_af_search_pdaf_far_pos;
6844 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
6845 &fwk_DevCamDebug_af_search_pdaf_far_pos, 1);
6846 }
6847 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_confidence,
6848 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_CONFIDENCE, metadata) {
6849 int32_t fwk_DevCamDebug_af_search_pdaf_confidence = *DevCamDebug_af_search_pdaf_confidence;
6850 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
6851 &fwk_DevCamDebug_af_search_pdaf_confidence, 1);
6852 }
6853 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_target_pos,
6854 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_TARGET_POS, metadata) {
6855 int32_t fwk_DevCamDebug_af_search_tof_target_pos =
6856 *DevCamDebug_af_search_tof_target_pos;
6857 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
6858 &fwk_DevCamDebug_af_search_tof_target_pos, 1);
6859 }
6860 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_next_pos,
6861 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEXT_POS, metadata) {
6862 int32_t fwk_DevCamDebug_af_search_tof_next_pos = *DevCamDebug_af_search_tof_next_pos;
6863 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
6864 &fwk_DevCamDebug_af_search_tof_next_pos, 1);
6865 }
6866 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_near_pos,
6867 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEAR_POS, metadata) {
6868 int32_t fwk_DevCamDebug_af_search_tof_near_pos = *DevCamDebug_af_search_tof_near_pos;
6869 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
6870 &fwk_DevCamDebug_af_search_tof_near_pos, 1);
6871 }
6872 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_far_pos,
6873 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_FAR_POS, metadata) {
6874 int32_t fwk_DevCamDebug_af_search_tof_far_pos = *DevCamDebug_af_search_tof_far_pos;
6875 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
6876 &fwk_DevCamDebug_af_search_tof_far_pos, 1);
6877 }
6878 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_confidence,
6879 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_CONFIDENCE, metadata) {
6880 int32_t fwk_DevCamDebug_af_search_tof_confidence = *DevCamDebug_af_search_tof_confidence;
6881 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
6882 &fwk_DevCamDebug_af_search_tof_confidence, 1);
6883 }
6884 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_type_select,
6885 CAM_INTF_META_DEV_CAM_AF_SEARCH_TYPE_SELECT, metadata) {
6886 int32_t fwk_DevCamDebug_af_search_type_select = *DevCamDebug_af_search_type_select;
6887 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
6888 &fwk_DevCamDebug_af_search_type_select, 1);
6889 }
6890 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_next_pos,
6891 CAM_INTF_META_DEV_CAM_AF_SEARCH_NEXT_POS, metadata) {
6892 int32_t fwk_DevCamDebug_af_search_next_pos = *DevCamDebug_af_search_next_pos;
6893 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
6894 &fwk_DevCamDebug_af_search_next_pos, 1);
6895 }
6896 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_target_pos,
6897 CAM_INTF_META_DEV_CAM_AF_SEARCH_TARGET_POS, metadata) {
6898 int32_t fwk_DevCamDebug_af_search_target_pos = *DevCamDebug_af_search_target_pos;
6899 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
6900 &fwk_DevCamDebug_af_search_target_pos, 1);
6901 }
6902 // DevCamDebug metadata translateFromHalMetadata AEC
6903 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_target_luma,
6904 CAM_INTF_META_DEV_CAM_AEC_TARGET_LUMA, metadata) {
6905 int32_t fwk_DevCamDebug_aec_target_luma = *DevCamDebug_aec_target_luma;
6906 camMetadata.update(DEVCAMDEBUG_AEC_TARGET_LUMA, &fwk_DevCamDebug_aec_target_luma, 1);
6907 }
6908 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_comp_luma,
6909 CAM_INTF_META_DEV_CAM_AEC_COMP_LUMA, metadata) {
6910 int32_t fwk_DevCamDebug_aec_comp_luma = *DevCamDebug_aec_comp_luma;
6911 camMetadata.update(DEVCAMDEBUG_AEC_COMP_LUMA, &fwk_DevCamDebug_aec_comp_luma, 1);
6912 }
6913 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_avg_luma,
6914 CAM_INTF_META_DEV_CAM_AEC_AVG_LUMA, metadata) {
6915 int32_t fwk_DevCamDebug_aec_avg_luma = *DevCamDebug_aec_avg_luma;
6916 camMetadata.update(DEVCAMDEBUG_AEC_AVG_LUMA, &fwk_DevCamDebug_aec_avg_luma, 1);
6917 }
6918 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_cur_luma,
6919 CAM_INTF_META_DEV_CAM_AEC_CUR_LUMA, metadata) {
6920 int32_t fwk_DevCamDebug_aec_cur_luma = *DevCamDebug_aec_cur_luma;
6921 camMetadata.update(DEVCAMDEBUG_AEC_CUR_LUMA, &fwk_DevCamDebug_aec_cur_luma, 1);
6922 }
6923 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_linecount,
6924 CAM_INTF_META_DEV_CAM_AEC_LINECOUNT, metadata) {
6925 int32_t fwk_DevCamDebug_aec_linecount = *DevCamDebug_aec_linecount;
6926 camMetadata.update(DEVCAMDEBUG_AEC_LINECOUNT, &fwk_DevCamDebug_aec_linecount, 1);
6927 }
6928 IF_META_AVAILABLE(float, DevCamDebug_aec_real_gain,
6929 CAM_INTF_META_DEV_CAM_AEC_REAL_GAIN, metadata) {
6930 float fwk_DevCamDebug_aec_real_gain = *DevCamDebug_aec_real_gain;
6931 camMetadata.update(DEVCAMDEBUG_AEC_REAL_GAIN, &fwk_DevCamDebug_aec_real_gain, 1);
6932 }
6933 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_exp_index,
6934 CAM_INTF_META_DEV_CAM_AEC_EXP_INDEX, metadata) {
6935 int32_t fwk_DevCamDebug_aec_exp_index = *DevCamDebug_aec_exp_index;
6936 camMetadata.update(DEVCAMDEBUG_AEC_EXP_INDEX, &fwk_DevCamDebug_aec_exp_index, 1);
6937 }
6938 IF_META_AVAILABLE(float, DevCamDebug_aec_lux_idx,
6939 CAM_INTF_META_DEV_CAM_AEC_LUX_IDX, metadata) {
6940 float fwk_DevCamDebug_aec_lux_idx = *DevCamDebug_aec_lux_idx;
6941 camMetadata.update(DEVCAMDEBUG_AEC_LUX_IDX, &fwk_DevCamDebug_aec_lux_idx, 1);
6942 }
Samuel Ha34229982017-02-17 13:51:11 -08006943 // DevCamDebug metadata translateFromHalMetadata zzHDR
6944 IF_META_AVAILABLE(float, DevCamDebug_aec_l_real_gain,
6945 CAM_INTF_META_DEV_CAM_AEC_L_REAL_GAIN, metadata) {
6946 float fwk_DevCamDebug_aec_l_real_gain = *DevCamDebug_aec_l_real_gain;
6947 camMetadata.update(DEVCAMDEBUG_AEC_L_REAL_GAIN, &fwk_DevCamDebug_aec_l_real_gain, 1);
6948 }
6949 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_l_linecount,
6950 CAM_INTF_META_DEV_CAM_AEC_L_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006951 int32_t fwk_DevCamDebug_aec_l_linecount = *DevCamDebug_aec_l_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006952 camMetadata.update(DEVCAMDEBUG_AEC_L_LINECOUNT, &fwk_DevCamDebug_aec_l_linecount, 1);
6953 }
6954 IF_META_AVAILABLE(float, DevCamDebug_aec_s_real_gain,
6955 CAM_INTF_META_DEV_CAM_AEC_S_REAL_GAIN, metadata) {
6956 float fwk_DevCamDebug_aec_s_real_gain = *DevCamDebug_aec_s_real_gain;
6957 camMetadata.update(DEVCAMDEBUG_AEC_S_REAL_GAIN, &fwk_DevCamDebug_aec_s_real_gain, 1);
6958 }
6959 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_s_linecount,
6960 CAM_INTF_META_DEV_CAM_AEC_S_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07006961 int32_t fwk_DevCamDebug_aec_s_linecount = *DevCamDebug_aec_s_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08006962 camMetadata.update(DEVCAMDEBUG_AEC_S_LINECOUNT, &fwk_DevCamDebug_aec_s_linecount, 1);
6963 }
6964 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_sensitivity_ratio,
6965 CAM_INTF_META_DEV_CAM_AEC_HDR_SENSITIVITY_RATIO, metadata) {
6966 float fwk_DevCamDebug_aec_hdr_sensitivity_ratio =
6967 *DevCamDebug_aec_hdr_sensitivity_ratio;
6968 camMetadata.update(DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
6969 &fwk_DevCamDebug_aec_hdr_sensitivity_ratio, 1);
6970 }
6971 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_exp_time_ratio,
6972 CAM_INTF_META_DEV_CAM_AEC_HDR_EXP_TIME_RATIO, metadata) {
6973 float fwk_DevCamDebug_aec_hdr_exp_time_ratio = *DevCamDebug_aec_hdr_exp_time_ratio;
6974 camMetadata.update(DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
6975 &fwk_DevCamDebug_aec_hdr_exp_time_ratio, 1);
6976 }
6977 // DevCamDebug metadata translateFromHalMetadata ADRC
6978 IF_META_AVAILABLE(float, DevCamDebug_aec_total_drc_gain,
6979 CAM_INTF_META_DEV_CAM_AEC_TOTAL_DRC_GAIN, metadata) {
6980 float fwk_DevCamDebug_aec_total_drc_gain = *DevCamDebug_aec_total_drc_gain;
6981 camMetadata.update(DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
6982 &fwk_DevCamDebug_aec_total_drc_gain, 1);
6983 }
6984 IF_META_AVAILABLE(float, DevCamDebug_aec_color_drc_gain,
6985 CAM_INTF_META_DEV_CAM_AEC_COLOR_DRC_GAIN, metadata) {
6986 float fwk_DevCamDebug_aec_color_drc_gain = *DevCamDebug_aec_color_drc_gain;
6987 camMetadata.update(DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
6988 &fwk_DevCamDebug_aec_color_drc_gain, 1);
6989 }
6990 IF_META_AVAILABLE(float, DevCamDebug_aec_gtm_ratio,
6991 CAM_INTF_META_DEV_CAM_AEC_GTM_RATIO, metadata) {
6992 float fwk_DevCamDebug_aec_gtm_ratio = *DevCamDebug_aec_gtm_ratio;
6993 camMetadata.update(DEVCAMDEBUG_AEC_GTM_RATIO, &fwk_DevCamDebug_aec_gtm_ratio, 1);
6994 }
6995 IF_META_AVAILABLE(float, DevCamDebug_aec_ltm_ratio,
6996 CAM_INTF_META_DEV_CAM_AEC_LTM_RATIO, metadata) {
6997 float fwk_DevCamDebug_aec_ltm_ratio = *DevCamDebug_aec_ltm_ratio;
6998 camMetadata.update(DEVCAMDEBUG_AEC_LTM_RATIO, &fwk_DevCamDebug_aec_ltm_ratio, 1);
6999 }
7000 IF_META_AVAILABLE(float, DevCamDebug_aec_la_ratio,
7001 CAM_INTF_META_DEV_CAM_AEC_LA_RATIO, metadata) {
7002 float fwk_DevCamDebug_aec_la_ratio = *DevCamDebug_aec_la_ratio;
7003 camMetadata.update(DEVCAMDEBUG_AEC_LA_RATIO, &fwk_DevCamDebug_aec_la_ratio, 1);
7004 }
7005 IF_META_AVAILABLE(float, DevCamDebug_aec_gamma_ratio,
7006 CAM_INTF_META_DEV_CAM_AEC_GAMMA_RATIO, metadata) {
7007 float fwk_DevCamDebug_aec_gamma_ratio = *DevCamDebug_aec_gamma_ratio;
7008 camMetadata.update(DEVCAMDEBUG_AEC_GAMMA_RATIO, &fwk_DevCamDebug_aec_gamma_ratio, 1);
7009 }
Samuel Habdf4fac2017-07-28 17:21:18 -07007010 // DevCamDebug metadata translateFromHalMetadata AEC MOTION
7011 IF_META_AVAILABLE(float, DevCamDebug_aec_camera_motion_dx,
7012 CAM_INTF_META_DEV_CAM_AEC_CAMERA_MOTION_DX, metadata) {
7013 float fwk_DevCamDebug_aec_camera_motion_dx = *DevCamDebug_aec_camera_motion_dx;
7014 camMetadata.update(DEVCAMDEBUG_AEC_CAMERA_MOTION_DX,
7015 &fwk_DevCamDebug_aec_camera_motion_dx, 1);
7016 }
7017 IF_META_AVAILABLE(float, DevCamDebug_aec_camera_motion_dy,
7018 CAM_INTF_META_DEV_CAM_AEC_CAMERA_MOTION_DY, metadata) {
7019 float fwk_DevCamDebug_aec_camera_motion_dy = *DevCamDebug_aec_camera_motion_dy;
7020 camMetadata.update(DEVCAMDEBUG_AEC_CAMERA_MOTION_DY,
7021 &fwk_DevCamDebug_aec_camera_motion_dy, 1);
7022 }
7023 IF_META_AVAILABLE(float, DevCamDebug_aec_subject_motion,
7024 CAM_INTF_META_DEV_CAM_AEC_SUBJECT_MOTION, metadata) {
7025 float fwk_DevCamDebug_aec_subject_motion = *DevCamDebug_aec_subject_motion;
7026 camMetadata.update(DEVCAMDEBUG_AEC_SUBJECT_MOTION,
7027 &fwk_DevCamDebug_aec_subject_motion, 1);
7028 }
Samuel Ha68ba5172016-12-15 18:41:12 -08007029 // DevCamDebug metadata translateFromHalMetadata AWB
7030 IF_META_AVAILABLE(float, DevCamDebug_awb_r_gain,
7031 CAM_INTF_META_DEV_CAM_AWB_R_GAIN, metadata) {
7032 float fwk_DevCamDebug_awb_r_gain = *DevCamDebug_awb_r_gain;
7033 camMetadata.update(DEVCAMDEBUG_AWB_R_GAIN, &fwk_DevCamDebug_awb_r_gain, 1);
7034 }
7035 IF_META_AVAILABLE(float, DevCamDebug_awb_g_gain,
7036 CAM_INTF_META_DEV_CAM_AWB_G_GAIN, metadata) {
7037 float fwk_DevCamDebug_awb_g_gain = *DevCamDebug_awb_g_gain;
7038 camMetadata.update(DEVCAMDEBUG_AWB_G_GAIN, &fwk_DevCamDebug_awb_g_gain, 1);
7039 }
7040 IF_META_AVAILABLE(float, DevCamDebug_awb_b_gain,
7041 CAM_INTF_META_DEV_CAM_AWB_B_GAIN, metadata) {
7042 float fwk_DevCamDebug_awb_b_gain = *DevCamDebug_awb_b_gain;
7043 camMetadata.update(DEVCAMDEBUG_AWB_B_GAIN, &fwk_DevCamDebug_awb_b_gain, 1);
7044 }
7045 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_cct,
7046 CAM_INTF_META_DEV_CAM_AWB_CCT, metadata) {
7047 int32_t fwk_DevCamDebug_awb_cct = *DevCamDebug_awb_cct;
7048 camMetadata.update(DEVCAMDEBUG_AWB_CCT, &fwk_DevCamDebug_awb_cct, 1);
7049 }
7050 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_decision,
7051 CAM_INTF_META_DEV_CAM_AWB_DECISION, metadata) {
7052 int32_t fwk_DevCamDebug_awb_decision = *DevCamDebug_awb_decision;
7053 camMetadata.update(DEVCAMDEBUG_AWB_DECISION, &fwk_DevCamDebug_awb_decision, 1);
7054 }
7055 }
7056 // atrace_end(ATRACE_TAG_ALWAYS);
7057
Thierry Strudel3d639192016-09-09 11:52:26 -07007058 IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
7059 int64_t fwk_frame_number = *frame_number;
7060 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
7061 }
7062
7063 IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
7064 int32_t fps_range[2];
7065 fps_range[0] = (int32_t)float_range->min_fps;
7066 fps_range[1] = (int32_t)float_range->max_fps;
7067 camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
7068 fps_range, 2);
7069 LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
7070 fps_range[0], fps_range[1]);
7071 }
7072
7073 IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
7074 camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
7075 }
7076
7077 IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7078 int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
7079 METADATA_MAP_SIZE(SCENE_MODES_MAP),
7080 *sceneMode);
7081 if (NAME_NOT_FOUND != val) {
7082 uint8_t fwkSceneMode = (uint8_t)val;
7083 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
7084 LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
7085 fwkSceneMode);
7086 }
7087 }
7088
7089 IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
7090 uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
7091 camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
7092 }
7093
7094 IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
7095 uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
7096 camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
7097 }
7098
7099 IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
7100 uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
7101 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
7102 }
7103
7104 IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
7105 CAM_INTF_META_EDGE_MODE, metadata) {
7106 camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
7107 }
7108
7109 IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
7110 uint8_t fwk_flashPower = (uint8_t) *flashPower;
7111 camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
7112 }
7113
7114 IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
7115 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
7116 }
7117
7118 IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
7119 if (0 <= *flashState) {
7120 uint8_t fwk_flashState = (uint8_t) *flashState;
7121 if (!gCamCapability[mCameraId]->flash_available) {
7122 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
7123 }
7124 camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
7125 }
7126 }
7127
7128 IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
7129 int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
7130 if (NAME_NOT_FOUND != val) {
7131 uint8_t fwk_flashMode = (uint8_t)val;
7132 camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
7133 }
7134 }
7135
7136 IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
7137 uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
7138 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
7139 }
7140
7141 IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
7142 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
7143 }
7144
7145 IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
7146 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
7147 }
7148
7149 IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
7150 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
7151 }
7152
7153 IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
7154 uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
7155 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
7156 }
7157
7158 IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
7159 uint8_t fwk_videoStab = (uint8_t) *videoStab;
7160 LOGD("fwk_videoStab = %d", fwk_videoStab);
7161 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
7162 } else {
7163 // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
7164 // and so hardcoding the Video Stab result to OFF mode.
7165 uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
7166 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007167 LOGD("EIS result default to OFF mode");
Thierry Strudel3d639192016-09-09 11:52:26 -07007168 }
7169
7170 IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
7171 uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
7172 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
7173 }
7174
7175 IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
7176 camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
7177 }
7178
Thierry Strudel3d639192016-09-09 11:52:26 -07007179 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
7180 CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007181 float fwk_blackLevelInd[BLACK_LEVEL_PATTERN_CNT];
Thierry Strudel3d639192016-09-09 11:52:26 -07007182
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007183 adjustBlackLevelForCFA(blackLevelAppliedPattern->cam_black_level, fwk_blackLevelInd,
7184 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07007185
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007186 LOGD("applied dynamicblackLevel in RGGB order = %f %f %f %f",
Thierry Strudel3d639192016-09-09 11:52:26 -07007187 blackLevelAppliedPattern->cam_black_level[0],
7188 blackLevelAppliedPattern->cam_black_level[1],
7189 blackLevelAppliedPattern->cam_black_level[2],
7190 blackLevelAppliedPattern->cam_black_level[3]);
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007191 camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd,
7192 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007193
7194#ifndef USE_HAL_3_3
7195 // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
Jason Lee4f3d96e2017-02-28 19:24:14 +05307196 // Need convert the internal 14 bit depth to sensor 10 bit sensor raw
Zhijun Heb753c672016-06-15 14:50:48 -07007197 // depth space.
Jason Lee4f3d96e2017-02-28 19:24:14 +05307198 fwk_blackLevelInd[0] /= 16.0;
7199 fwk_blackLevelInd[1] /= 16.0;
7200 fwk_blackLevelInd[2] /= 16.0;
7201 fwk_blackLevelInd[3] /= 16.0;
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007202 camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd,
7203 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007204#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007205 }
7206
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007207#ifndef USE_HAL_3_3
7208 // Fixed whitelevel is used by ISP/Sensor
7209 camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
7210 &gCamCapability[mCameraId]->white_level, 1);
7211#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007212
7213 IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
7214 CAM_INTF_META_SCALER_CROP_REGION, metadata) {
7215 int32_t scalerCropRegion[4];
7216 scalerCropRegion[0] = hScalerCropRegion->left;
7217 scalerCropRegion[1] = hScalerCropRegion->top;
7218 scalerCropRegion[2] = hScalerCropRegion->width;
7219 scalerCropRegion[3] = hScalerCropRegion->height;
7220
7221 // Adjust crop region from sensor output coordinate system to active
7222 // array coordinate system.
7223 mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
7224 scalerCropRegion[2], scalerCropRegion[3]);
7225
7226 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
7227 }
7228
7229 IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
7230 LOGD("sensorExpTime = %lld", *sensorExpTime);
7231 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
7232 }
7233
Shuzhen Wang6a1dd612017-08-05 15:03:53 -07007234 IF_META_AVAILABLE(float, expTimeBoost, CAM_INTF_META_EXP_TIME_BOOST, metadata) {
7235 LOGD("expTimeBoost = %f", *expTimeBoost);
7236 camMetadata.update(NEXUS_EXPERIMENTAL_2017_EXP_TIME_BOOST, expTimeBoost, 1);
7237 }
7238
Thierry Strudel3d639192016-09-09 11:52:26 -07007239 IF_META_AVAILABLE(int64_t, sensorFameDuration,
7240 CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
7241 LOGD("sensorFameDuration = %lld", *sensorFameDuration);
7242 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
7243 }
7244
7245 IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
7246 CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
7247 LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
7248 camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
7249 sensorRollingShutterSkew, 1);
7250 }
7251
7252 IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
7253 LOGD("sensorSensitivity = %d", *sensorSensitivity);
7254 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
7255
7256 //calculate the noise profile based on sensitivity
7257 double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
7258 double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
7259 double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
7260 for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
7261 noise_profile[i] = noise_profile_S;
7262 noise_profile[i+1] = noise_profile_O;
7263 }
7264 LOGD("noise model entry (S, O) is (%f, %f)",
7265 noise_profile_S, noise_profile_O);
7266 camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
7267 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
7268 }
7269
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007270#ifndef USE_HAL_3_3
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007271 int32_t fwk_ispSensitivity = 100;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007272 IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007273 fwk_ispSensitivity = (int32_t) *ispSensitivity;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007274 }
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007275 IF_META_AVAILABLE(float, postStatsSensitivity, CAM_INTF_META_ISP_POST_STATS_SENSITIVITY, metadata) {
7276 fwk_ispSensitivity = (int32_t) (*postStatsSensitivity * fwk_ispSensitivity);
7277 }
7278 camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007279#endif
7280
Thierry Strudel3d639192016-09-09 11:52:26 -07007281 IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
7282 uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
7283 camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
7284 }
7285
7286 IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
7287 int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
7288 *faceDetectMode);
7289 if (NAME_NOT_FOUND != val) {
7290 uint8_t fwk_faceDetectMode = (uint8_t)val;
7291 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
7292
7293 if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
7294 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
7295 CAM_INTF_META_FACE_DETECTION, metadata) {
7296 uint8_t numFaces = MIN(
7297 faceDetectionInfo->num_faces_detected, MAX_ROI);
7298 int32_t faceIds[MAX_ROI];
7299 uint8_t faceScores[MAX_ROI];
7300 int32_t faceRectangles[MAX_ROI * 4];
7301 int32_t faceLandmarks[MAX_ROI * 6];
7302 size_t j = 0, k = 0;
7303
7304 for (size_t i = 0; i < numFaces; i++) {
7305 faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
7306 // Adjust crop region from sensor output coordinate system to active
7307 // array coordinate system.
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007308 cam_rect_t rect = faceDetectionInfo->faces[i].face_boundary;
Thierry Strudel3d639192016-09-09 11:52:26 -07007309 mCropRegionMapper.toActiveArray(rect.left, rect.top,
7310 rect.width, rect.height);
7311
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007312 convertToRegions(rect, faceRectangles+j, -1);
Thierry Strudel3d639192016-09-09 11:52:26 -07007313
Jason Lee8ce36fa2017-04-19 19:40:37 -07007314 LOGL("FD_DEBUG : Frame[%d] Face[%d] : top-left (%d, %d), "
7315 "bottom-right (%d, %d)",
7316 faceDetectionInfo->frame_id, i,
7317 faceRectangles[j + FACE_LEFT], faceRectangles[j + FACE_TOP],
7318 faceRectangles[j + FACE_RIGHT], faceRectangles[j + FACE_BOTTOM]);
7319
Thierry Strudel3d639192016-09-09 11:52:26 -07007320 j+= 4;
7321 }
7322 if (numFaces <= 0) {
7323 memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
7324 memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
7325 memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
7326 memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
7327 }
7328
7329 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
7330 numFaces);
7331 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
7332 faceRectangles, numFaces * 4U);
7333 if (fwk_faceDetectMode ==
7334 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
7335 IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
7336 CAM_INTF_META_FACE_LANDMARK, metadata) {
7337
7338 for (size_t i = 0; i < numFaces; i++) {
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007339 cam_face_landmarks_info_t face_landmarks = landmarks->face_landmarks[i];
Thierry Strudel3d639192016-09-09 11:52:26 -07007340 // Map the co-ordinate sensor output coordinate system to active
7341 // array coordinate system.
7342 mCropRegionMapper.toActiveArray(
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007343 face_landmarks.left_eye_center.x,
7344 face_landmarks.left_eye_center.y);
Thierry Strudel3d639192016-09-09 11:52:26 -07007345 mCropRegionMapper.toActiveArray(
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007346 face_landmarks.right_eye_center.x,
7347 face_landmarks.right_eye_center.y);
Thierry Strudel3d639192016-09-09 11:52:26 -07007348 mCropRegionMapper.toActiveArray(
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007349 face_landmarks.mouth_center.x,
7350 face_landmarks.mouth_center.y);
Thierry Strudel3d639192016-09-09 11:52:26 -07007351
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007352 convertLandmarks(face_landmarks, faceLandmarks+k);
Jason Lee8ce36fa2017-04-19 19:40:37 -07007353
7354 LOGL("FD_DEBUG LANDMARK : Frame[%d] Face[%d] : "
7355 "left-eye (%d, %d), right-eye (%d, %d), mouth (%d, %d)",
7356 faceDetectionInfo->frame_id, i,
7357 faceLandmarks[k + LEFT_EYE_X],
7358 faceLandmarks[k + LEFT_EYE_Y],
7359 faceLandmarks[k + RIGHT_EYE_X],
7360 faceLandmarks[k + RIGHT_EYE_Y],
7361 faceLandmarks[k + MOUTH_X],
7362 faceLandmarks[k + MOUTH_Y]);
7363
Thierry Strudel04e026f2016-10-10 11:27:36 -07007364 k+= TOTAL_LANDMARK_INDICES;
7365 }
7366 } else {
7367 for (size_t i = 0; i < numFaces; i++) {
7368 setInvalidLandmarks(faceLandmarks+k);
7369 k+= TOTAL_LANDMARK_INDICES;
Thierry Strudel3d639192016-09-09 11:52:26 -07007370 }
7371 }
7372
Jason Lee49619db2017-04-13 12:07:22 -07007373 for (size_t i = 0; i < numFaces; i++) {
7374 faceIds[i] = faceDetectionInfo->faces[i].face_id;
7375
7376 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : faceIds=%d",
7377 faceDetectionInfo->frame_id, i, faceIds[i]);
7378 }
7379
Thierry Strudel3d639192016-09-09 11:52:26 -07007380 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
7381 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
7382 faceLandmarks, numFaces * 6U);
Jason Lee49619db2017-04-13 12:07:22 -07007383 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007384 IF_META_AVAILABLE(cam_face_blink_data_t, blinks,
7385 CAM_INTF_META_FACE_BLINK, metadata) {
7386 uint8_t detected[MAX_ROI];
7387 uint8_t degree[MAX_ROI * 2];
7388 for (size_t i = 0; i < numFaces; i++) {
7389 detected[i] = blinks->blink[i].blink_detected;
7390 degree[2 * i] = blinks->blink[i].left_blink;
7391 degree[2 * i + 1] = blinks->blink[i].right_blink;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007392
Jason Lee49619db2017-04-13 12:07:22 -07007393 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7394 "blink_detected=%d, leye_blink=%d, reye_blink=%d",
7395 faceDetectionInfo->frame_id, i, detected[i], degree[2 * i],
7396 degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007397 }
7398 camMetadata.update(QCAMERA3_STATS_BLINK_DETECTED,
7399 detected, numFaces);
7400 camMetadata.update(QCAMERA3_STATS_BLINK_DEGREE,
7401 degree, numFaces * 2);
7402 }
7403 IF_META_AVAILABLE(cam_face_smile_data_t, smiles,
7404 CAM_INTF_META_FACE_SMILE, metadata) {
7405 uint8_t degree[MAX_ROI];
7406 uint8_t confidence[MAX_ROI];
7407 for (size_t i = 0; i < numFaces; i++) {
7408 degree[i] = smiles->smile[i].smile_degree;
7409 confidence[i] = smiles->smile[i].smile_confidence;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007410
Jason Lee49619db2017-04-13 12:07:22 -07007411 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7412 "smile_degree=%d, smile_score=%d",
7413 faceDetectionInfo->frame_id, i, degree[i], confidence[i]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007414 }
7415 camMetadata.update(QCAMERA3_STATS_SMILE_DEGREE,
7416 degree, numFaces);
7417 camMetadata.update(QCAMERA3_STATS_SMILE_CONFIDENCE,
7418 confidence, numFaces);
7419 }
7420 IF_META_AVAILABLE(cam_face_gaze_data_t, gazes,
7421 CAM_INTF_META_FACE_GAZE, metadata) {
7422 int8_t angle[MAX_ROI];
7423 int32_t direction[MAX_ROI * 3];
7424 int8_t degree[MAX_ROI * 2];
7425 for (size_t i = 0; i < numFaces; i++) {
7426 angle[i] = gazes->gaze[i].gaze_angle;
7427 direction[3 * i] = gazes->gaze[i].updown_dir;
7428 direction[3 * i + 1] = gazes->gaze[i].leftright_dir;
7429 direction[3 * i + 2] = gazes->gaze[i].roll_dir;
7430 degree[2 * i] = gazes->gaze[i].left_right_gaze;
7431 degree[2 * i + 1] = gazes->gaze[i].top_bottom_gaze;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007432
7433 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : gaze_angle=%d, "
7434 "updown_dir=%d, leftright_dir=%d,, roll_dir=%d, "
7435 "left_right_gaze=%d, top_bottom_gaze=%d",
7436 faceDetectionInfo->frame_id, i, angle[i],
7437 direction[3 * i], direction[3 * i + 1],
7438 direction[3 * i + 2],
7439 degree[2 * i], degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007440 }
7441 camMetadata.update(QCAMERA3_STATS_GAZE_ANGLE,
7442 (uint8_t *)angle, numFaces);
7443 camMetadata.update(QCAMERA3_STATS_GAZE_DIRECTION,
7444 direction, numFaces * 3);
7445 camMetadata.update(QCAMERA3_STATS_GAZE_DEGREE,
7446 (uint8_t *)degree, numFaces * 2);
7447 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007448 }
7449 }
7450 }
7451 }
7452
7453 IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
7454 uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
Shuzhen Wang14415f52016-11-16 18:26:18 -08007455 int32_t histogramBins = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007456 camMetadata.update(QCAMERA3_HISTOGRAM_MODE, &fwk_histogramMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -08007457 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &fwk_histogramMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007458
Shuzhen Wang14415f52016-11-16 18:26:18 -08007459 IF_META_AVAILABLE(int32_t, histBins, CAM_INTF_META_STATS_HISTOGRAM_BINS, metadata) {
7460 histogramBins = *histBins;
7461 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS, &histogramBins, 1);
7462 }
7463
7464 if (fwk_histogramMode == QCAMERA3_HISTOGRAM_MODE_ON && histogramBins > 0) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007465 IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, metadata) {
7466 // process histogram statistics info
Shuzhen Wang14415f52016-11-16 18:26:18 -08007467 int32_t* histogramData = NULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007468
7469 switch (stats_data->type) {
7470 case CAM_HISTOGRAM_TYPE_BAYER:
7471 switch (stats_data->bayer_stats.data_type) {
7472 case CAM_STATS_CHANNEL_GR:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007473 histogramData = (int32_t *)stats_data->bayer_stats.gr_stats.hist_buf;
7474 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007475 case CAM_STATS_CHANNEL_GB:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007476 histogramData = (int32_t *)stats_data->bayer_stats.gb_stats.hist_buf;
7477 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007478 case CAM_STATS_CHANNEL_B:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007479 histogramData = (int32_t *)stats_data->bayer_stats.b_stats.hist_buf;
7480 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007481 case CAM_STATS_CHANNEL_Y:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007482 case CAM_STATS_CHANNEL_ALL:
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007483 case CAM_STATS_CHANNEL_R:
7484 default:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007485 histogramData = (int32_t *)stats_data->bayer_stats.r_stats.hist_buf;
7486 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007487 }
7488 break;
7489 case CAM_HISTOGRAM_TYPE_YUV:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007490 histogramData = (int32_t *)stats_data->yuv_stats.hist_buf;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007491 break;
7492 }
7493
Shuzhen Wang14415f52016-11-16 18:26:18 -08007494 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM, histogramData, histogramBins);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007495 }
7496 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007497 }
7498
7499 IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
7500 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
7501 uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
7502 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
7503 }
7504
7505 IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
7506 CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
7507 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
7508 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
7509 }
7510
7511 IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
7512 CAM_INTF_META_LENS_SHADING_MAP, metadata) {
7513 size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
7514 CAM_MAX_SHADING_MAP_HEIGHT);
7515 size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
7516 CAM_MAX_SHADING_MAP_WIDTH);
7517 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
7518 lensShadingMap->lens_shading, 4U * map_width * map_height);
7519 }
7520
7521 IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
7522 uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
7523 camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
7524 }
7525
7526 IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
7527 //Populate CAM_INTF_META_TONEMAP_CURVES
7528 /* ch0 = G, ch 1 = B, ch 2 = R*/
7529 if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7530 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7531 tonemap->tonemap_points_cnt,
7532 CAM_MAX_TONEMAP_CURVE_SIZE);
7533 tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7534 }
7535
7536 camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
7537 &tonemap->curves[0].tonemap_points[0][0],
7538 tonemap->tonemap_points_cnt * 2);
7539
7540 camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
7541 &tonemap->curves[1].tonemap_points[0][0],
7542 tonemap->tonemap_points_cnt * 2);
7543
7544 camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
7545 &tonemap->curves[2].tonemap_points[0][0],
7546 tonemap->tonemap_points_cnt * 2);
7547 }
7548
7549 IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
7550 CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
7551 camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
7552 CC_GAIN_MAX);
7553 }
7554
7555 IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
7556 CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
7557 camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
7558 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
7559 CC_MATRIX_COLS * CC_MATRIX_ROWS);
7560 }
7561
7562 IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
7563 CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
7564 if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7565 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7566 toneCurve->tonemap_points_cnt,
7567 CAM_MAX_TONEMAP_CURVE_SIZE);
7568 toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7569 }
7570 camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
7571 (float*)toneCurve->curve.tonemap_points,
7572 toneCurve->tonemap_points_cnt * 2);
7573 }
7574
7575 IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
7576 CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
7577 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
7578 predColorCorrectionGains->gains, 4);
7579 }
7580
7581 IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
7582 CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
7583 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
7584 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
7585 CC_MATRIX_ROWS * CC_MATRIX_COLS);
7586 }
7587
7588 IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
7589 camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
7590 }
7591
7592 IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
7593 uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
7594 camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
7595 }
7596
7597 IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
7598 uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
7599 camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
7600 }
7601
7602 IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
7603 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
7604 *effectMode);
7605 if (NAME_NOT_FOUND != val) {
7606 uint8_t fwk_effectMode = (uint8_t)val;
7607 camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
7608 }
7609 }
7610
7611 IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
7612 CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
7613 int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
7614 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
7615 if (NAME_NOT_FOUND != fwk_testPatternMode) {
7616 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
7617 }
7618 int32_t fwk_testPatternData[4];
7619 fwk_testPatternData[0] = testPatternData->r;
7620 fwk_testPatternData[3] = testPatternData->b;
7621 switch (gCamCapability[mCameraId]->color_arrangement) {
7622 case CAM_FILTER_ARRANGEMENT_RGGB:
7623 case CAM_FILTER_ARRANGEMENT_GRBG:
7624 fwk_testPatternData[1] = testPatternData->gr;
7625 fwk_testPatternData[2] = testPatternData->gb;
7626 break;
7627 case CAM_FILTER_ARRANGEMENT_GBRG:
7628 case CAM_FILTER_ARRANGEMENT_BGGR:
7629 fwk_testPatternData[2] = testPatternData->gr;
7630 fwk_testPatternData[1] = testPatternData->gb;
7631 break;
7632 default:
7633 LOGE("color arrangement %d is not supported",
7634 gCamCapability[mCameraId]->color_arrangement);
7635 break;
7636 }
7637 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
7638 }
7639
7640 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
7641 camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
7642 }
7643
7644 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
7645 String8 str((const char *)gps_methods);
7646 camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
7647 }
7648
7649 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
7650 camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
7651 }
7652
7653 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
7654 camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
7655 }
7656
7657 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
7658 uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
7659 camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
7660 }
7661
7662 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
7663 uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
7664 camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
7665 }
7666
7667 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
7668 int32_t fwk_thumb_size[2];
7669 fwk_thumb_size[0] = thumb_size->width;
7670 fwk_thumb_size[1] = thumb_size->height;
7671 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
7672 }
7673
Shuzhen Wang2fea89e2017-05-08 17:02:15 -07007674 // Skip reprocess metadata if there is no input stream.
7675 if (mInputStreamInfo.dim.width > 0 && mInputStreamInfo.dim.height > 0) {
7676 IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
7677 camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
7678 privateData,
7679 MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
7680 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007681 }
7682
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007683 IF_META_AVAILABLE(int32_t, meteringMode, CAM_INTF_PARM_AEC_ALGO_TYPE, metadata) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08007684 camMetadata.update(QCAMERA3_EXPOSURE_METER,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007685 meteringMode, 1);
7686 }
7687
Thierry Strudel54dc9782017-02-15 12:12:10 -08007688 IF_META_AVAILABLE(cam_asd_hdr_scene_data_t, hdr_scene_data,
7689 CAM_INTF_META_ASD_HDR_SCENE_DATA, metadata) {
7690 LOGD("hdr_scene_data: %d %f\n",
7691 hdr_scene_data->is_hdr_scene, hdr_scene_data->hdr_confidence);
7692 uint8_t isHdr = hdr_scene_data->is_hdr_scene;
7693 float isHdrConfidence = hdr_scene_data->hdr_confidence;
7694 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE,
7695 &isHdr, 1);
7696 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE,
7697 &isHdrConfidence, 1);
7698 }
7699
7700
7701
Thierry Strudel3d639192016-09-09 11:52:26 -07007702 if (metadata->is_tuning_params_valid) {
7703 uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
7704 uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
7705 metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
7706
7707
7708 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
7709 sizeof(uint32_t));
7710 data += sizeof(uint32_t);
7711
7712 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
7713 sizeof(uint32_t));
7714 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
7715 data += sizeof(uint32_t);
7716
7717 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
7718 sizeof(uint32_t));
7719 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
7720 data += sizeof(uint32_t);
7721
7722 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
7723 sizeof(uint32_t));
7724 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
7725 data += sizeof(uint32_t);
7726
7727 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
7728 sizeof(uint32_t));
7729 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
7730 data += sizeof(uint32_t);
7731
7732 metadata->tuning_params.tuning_mod3_data_size = 0;
7733 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
7734 sizeof(uint32_t));
7735 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
7736 data += sizeof(uint32_t);
7737
7738 size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
7739 TUNING_SENSOR_DATA_MAX);
7740 memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
7741 count);
7742 data += count;
7743
7744 count = MIN(metadata->tuning_params.tuning_vfe_data_size,
7745 TUNING_VFE_DATA_MAX);
7746 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
7747 count);
7748 data += count;
7749
7750 count = MIN(metadata->tuning_params.tuning_cpp_data_size,
7751 TUNING_CPP_DATA_MAX);
7752 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
7753 count);
7754 data += count;
7755
7756 count = MIN(metadata->tuning_params.tuning_cac_data_size,
7757 TUNING_CAC_DATA_MAX);
7758 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
7759 count);
7760 data += count;
7761
7762 camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
7763 (int32_t *)(void *)tuning_meta_data_blob,
7764 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
7765 }
7766
7767 IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
7768 CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
7769 camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
7770 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
7771 NEUTRAL_COL_POINTS);
7772 }
7773
7774 IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
7775 uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
7776 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
7777 }
7778
7779 IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
7780 int32_t aeRegions[REGIONS_TUPLE_COUNT];
7781 // Adjust crop region from sensor output coordinate system to active
7782 // array coordinate system.
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007783 cam_rect_t hAeRect = hAeRegions->rect;
7784 mCropRegionMapper.toActiveArray(hAeRect.left, hAeRect.top,
7785 hAeRect.width, hAeRect.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07007786
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007787 convertToRegions(hAeRect, aeRegions, hAeRegions->weight);
Thierry Strudel3d639192016-09-09 11:52:26 -07007788 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
7789 REGIONS_TUPLE_COUNT);
7790 LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
7791 aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007792 hAeRect.left, hAeRect.top, hAeRect.width,
7793 hAeRect.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07007794 }
7795
Shuzhen Wang181c57b2017-07-21 11:39:44 -07007796 if (!pendingRequest.focusStateSent) {
7797 if (pendingRequest.focusStateValid) {
7798 camMetadata.update(ANDROID_CONTROL_AF_STATE, &pendingRequest.focusState, 1);
7799 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", pendingRequest.focusState);
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007800 } else {
Shuzhen Wang181c57b2017-07-21 11:39:44 -07007801 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
7802 uint8_t fwk_afState = (uint8_t) *afState;
7803 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
7804 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
7805 }
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007806 }
7807 }
7808
Thierry Strudel3d639192016-09-09 11:52:26 -07007809 IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
7810 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
7811 }
7812
7813 IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
7814 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
7815 }
7816
7817 IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
7818 uint8_t fwk_lensState = *lensState;
7819 camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
7820 }
7821
Thierry Strudel3d639192016-09-09 11:52:26 -07007822 IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007823 uint32_t ab_mode = *hal_ab_mode;
7824 if (ab_mode == CAM_ANTIBANDING_MODE_AUTO_60HZ ||
7825 ab_mode == CAM_ANTIBANDING_MODE_AUTO_50HZ) {
7826 ab_mode = CAM_ANTIBANDING_MODE_AUTO;
7827 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007828 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007829 ab_mode);
Thierry Strudel3d639192016-09-09 11:52:26 -07007830 if (NAME_NOT_FOUND != val) {
7831 uint8_t fwk_ab_mode = (uint8_t)val;
7832 camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
7833 }
7834 }
7835
7836 IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7837 int val = lookupFwkName(SCENE_MODES_MAP,
7838 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
7839 if (NAME_NOT_FOUND != val) {
7840 uint8_t fwkBestshotMode = (uint8_t)val;
7841 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
7842 LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
7843 } else {
7844 LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
7845 }
7846 }
7847
7848 IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
7849 uint8_t fwk_mode = (uint8_t) *mode;
7850 camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
7851 }
7852
7853 /* Constant metadata values to be update*/
7854 uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
7855 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
7856
7857 uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
7858 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
7859
7860 int32_t hotPixelMap[2];
7861 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
7862
7863 // CDS
7864 IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
7865 camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
7866 }
7867
Thierry Strudel04e026f2016-10-10 11:27:36 -07007868 IF_META_AVAILABLE(cam_sensor_hdr_type_t, vhdr, CAM_INTF_PARM_SENSOR_HDR, metadata) {
7869 int32_t fwk_hdr;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007870 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007871 if(*vhdr == CAM_SENSOR_HDR_OFF) {
7872 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_OFF;
7873 } else {
7874 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_ON;
7875 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007876
7877 if(fwk_hdr != curr_hdr_state) {
7878 LOGH("PROFILE_META_HDR_TOGGLED value=%d", fwk_hdr);
7879 if(fwk_hdr)
7880 mCurrFeatureState |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7881 else
7882 mCurrFeatureState &= ~CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7883 }
Thierry Strudel04e026f2016-10-10 11:27:36 -07007884 camMetadata.update(QCAMERA3_VIDEO_HDR_MODE, &fwk_hdr, 1);
7885 }
7886
Thierry Strudel54dc9782017-02-15 12:12:10 -08007887 //binning correction
7888 IF_META_AVAILABLE(cam_binning_correction_mode_t, bin_correction,
7889 CAM_INTF_META_BINNING_CORRECTION_MODE, metadata) {
7890 int32_t fwk_bin_mode = (int32_t) *bin_correction;
7891 camMetadata.update(QCAMERA3_BINNING_CORRECTION_MODE, &fwk_bin_mode, 1);
7892 }
7893
Thierry Strudel04e026f2016-10-10 11:27:36 -07007894 IF_META_AVAILABLE(cam_ir_mode_type_t, ir, CAM_INTF_META_IR_MODE, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007895 int32_t fwk_ir = (int32_t) *ir;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007896 int8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR ) != 0);
7897 int8_t is_ir_on = 0;
7898
7899 (fwk_ir > 0) ? (is_ir_on = 1) : (is_ir_on = 0) ;
7900 if(is_ir_on != curr_ir_state) {
7901 LOGH("PROFILE_META_IR_TOGGLED value=%d", fwk_ir);
7902 if(is_ir_on)
7903 mCurrFeatureState |= CAM_QCOM_FEATURE_IR;
7904 else
7905 mCurrFeatureState &= ~CAM_QCOM_FEATURE_IR;
7906 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007907 camMetadata.update(QCAMERA3_IR_MODE, &fwk_ir, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007908 }
7909
Thierry Strudel269c81a2016-10-12 12:13:59 -07007910 // AEC SPEED
7911 IF_META_AVAILABLE(float, aec, CAM_INTF_META_AEC_CONVERGENCE_SPEED, metadata) {
7912 camMetadata.update(QCAMERA3_AEC_CONVERGENCE_SPEED, aec, 1);
7913 }
7914
7915 // AWB SPEED
7916 IF_META_AVAILABLE(float, awb, CAM_INTF_META_AWB_CONVERGENCE_SPEED, metadata) {
7917 camMetadata.update(QCAMERA3_AWB_CONVERGENCE_SPEED, awb, 1);
7918 }
7919
Thierry Strudel3d639192016-09-09 11:52:26 -07007920 // TNR
7921 IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
7922 uint8_t tnr_enable = tnr->denoise_enable;
7923 int32_t tnr_process_type = (int32_t)tnr->process_plates;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007924 int8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0) ;
7925 int8_t is_tnr_on = 0;
7926
7927 (tnr_enable > 0) ? (is_tnr_on = 1) : (is_tnr_on = 0);
7928 if(is_tnr_on != curr_tnr_state) {
7929 LOGH("PROFILE_META_TNR_TOGGLED value=%d", tnr_enable);
7930 if(is_tnr_on)
7931 mCurrFeatureState |= CAM_QTI_FEATURE_SW_TNR;
7932 else
7933 mCurrFeatureState &= ~CAM_QTI_FEATURE_SW_TNR;
7934 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007935
7936 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
7937 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
7938 }
7939
7940 // Reprocess crop data
7941 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
7942 uint8_t cnt = crop_data->num_of_streams;
7943 if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
7944 // mm-qcamera-daemon only posts crop_data for streams
7945 // not linked to pproc. So no valid crop metadata is not
7946 // necessarily an error case.
7947 LOGD("No valid crop metadata entries");
7948 } else {
7949 uint32_t reproc_stream_id;
7950 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
7951 LOGD("No reprocessible stream found, ignore crop data");
7952 } else {
7953 int rc = NO_ERROR;
7954 Vector<int32_t> roi_map;
7955 int32_t *crop = new int32_t[cnt*4];
7956 if (NULL == crop) {
7957 rc = NO_MEMORY;
7958 }
7959 if (NO_ERROR == rc) {
7960 int32_t streams_found = 0;
7961 for (size_t i = 0; i < cnt; i++) {
7962 if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
7963 if (pprocDone) {
7964 // HAL already does internal reprocessing,
7965 // either via reprocessing before JPEG encoding,
7966 // or offline postprocessing for pproc bypass case.
7967 crop[0] = 0;
7968 crop[1] = 0;
7969 crop[2] = mInputStreamInfo.dim.width;
7970 crop[3] = mInputStreamInfo.dim.height;
7971 } else {
7972 crop[0] = crop_data->crop_info[i].crop.left;
7973 crop[1] = crop_data->crop_info[i].crop.top;
7974 crop[2] = crop_data->crop_info[i].crop.width;
7975 crop[3] = crop_data->crop_info[i].crop.height;
7976 }
7977 roi_map.add(crop_data->crop_info[i].roi_map.left);
7978 roi_map.add(crop_data->crop_info[i].roi_map.top);
7979 roi_map.add(crop_data->crop_info[i].roi_map.width);
7980 roi_map.add(crop_data->crop_info[i].roi_map.height);
7981 streams_found++;
7982 LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
7983 crop[0], crop[1], crop[2], crop[3]);
7984 LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
7985 crop_data->crop_info[i].roi_map.left,
7986 crop_data->crop_info[i].roi_map.top,
7987 crop_data->crop_info[i].roi_map.width,
7988 crop_data->crop_info[i].roi_map.height);
7989 break;
7990
7991 }
7992 }
7993 camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
7994 &streams_found, 1);
7995 camMetadata.update(QCAMERA3_CROP_REPROCESS,
7996 crop, (size_t)(streams_found * 4));
7997 if (roi_map.array()) {
7998 camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
7999 roi_map.array(), roi_map.size());
8000 }
8001 }
8002 if (crop) {
8003 delete [] crop;
8004 }
8005 }
8006 }
8007 }
8008
8009 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
8010 // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
8011 // so hardcoding the CAC result to OFF mode.
8012 uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
8013 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
8014 } else {
8015 IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
8016 int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
8017 *cacMode);
8018 if (NAME_NOT_FOUND != val) {
8019 uint8_t resultCacMode = (uint8_t)val;
8020 // check whether CAC result from CB is equal to Framework set CAC mode
8021 // If not equal then set the CAC mode came in corresponding request
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008022 if (pendingRequest.fwkCacMode != resultCacMode) {
8023 resultCacMode = pendingRequest.fwkCacMode;
Thierry Strudel3d639192016-09-09 11:52:26 -07008024 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08008025 //Check if CAC is disabled by property
8026 if (m_cacModeDisabled) {
8027 resultCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
8028 }
8029
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008030 LOGD("fwk_cacMode=%d resultCacMode=%d", pendingRequest.fwkCacMode, resultCacMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07008031 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
8032 } else {
8033 LOGE("Invalid CAC camera parameter: %d", *cacMode);
8034 }
8035 }
8036 }
8037
8038 // Post blob of cam_cds_data through vendor tag.
8039 IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
8040 uint8_t cnt = cdsInfo->num_of_streams;
8041 cam_cds_data_t cdsDataOverride;
8042 memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
8043 cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
8044 cdsDataOverride.num_of_streams = 1;
8045 if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
8046 uint32_t reproc_stream_id;
8047 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
8048 LOGD("No reprocessible stream found, ignore cds data");
8049 } else {
8050 for (size_t i = 0; i < cnt; i++) {
8051 if (cdsInfo->cds_info[i].stream_id ==
8052 reproc_stream_id) {
8053 cdsDataOverride.cds_info[0].cds_enable =
8054 cdsInfo->cds_info[i].cds_enable;
8055 break;
8056 }
8057 }
8058 }
8059 } else {
8060 LOGD("Invalid stream count %d in CDS_DATA", cnt);
8061 }
8062 camMetadata.update(QCAMERA3_CDS_INFO,
8063 (uint8_t *)&cdsDataOverride,
8064 sizeof(cam_cds_data_t));
8065 }
8066
8067 // Ldaf calibration data
8068 if (!mLdafCalibExist) {
8069 IF_META_AVAILABLE(uint32_t, ldafCalib,
8070 CAM_INTF_META_LDAF_EXIF, metadata) {
8071 mLdafCalibExist = true;
8072 mLdafCalib[0] = ldafCalib[0];
8073 mLdafCalib[1] = ldafCalib[1];
8074 LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
8075 ldafCalib[0], ldafCalib[1]);
8076 }
8077 }
8078
Thierry Strudel54dc9782017-02-15 12:12:10 -08008079 // EXIF debug data through vendor tag
8080 /*
8081 * Mobicat Mask can assume 3 values:
8082 * 1 refers to Mobicat data,
8083 * 2 refers to Stats Debug and Exif Debug Data
8084 * 3 refers to Mobicat and Stats Debug Data
8085 * We want to make sure that we are sending Exif debug data
8086 * only when Mobicat Mask is 2.
8087 */
8088 if ((mExifParams.debug_params != NULL) && (getMobicatMask() == 2)) {
8089 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB,
8090 (uint8_t *)(void *)mExifParams.debug_params,
8091 sizeof(mm_jpeg_debug_exif_params_t));
8092 }
8093
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008094 // Reprocess and DDM debug data through vendor tag
8095 cam_reprocess_info_t repro_info;
8096 memset(&repro_info, 0, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008097 IF_META_AVAILABLE(cam_stream_crop_info_t, sensorCropInfo,
8098 CAM_INTF_META_SNAP_CROP_INFO_SENSOR, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008099 memcpy(&(repro_info.sensor_crop_info), sensorCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008100 }
8101 IF_META_AVAILABLE(cam_stream_crop_info_t, camifCropInfo,
8102 CAM_INTF_META_SNAP_CROP_INFO_CAMIF, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008103 memcpy(&(repro_info.camif_crop_info), camifCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008104 }
8105 IF_META_AVAILABLE(cam_stream_crop_info_t, ispCropInfo,
8106 CAM_INTF_META_SNAP_CROP_INFO_ISP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008107 memcpy(&(repro_info.isp_crop_info), ispCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008108 }
8109 IF_META_AVAILABLE(cam_stream_crop_info_t, cppCropInfo,
8110 CAM_INTF_META_SNAP_CROP_INFO_CPP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008111 memcpy(&(repro_info.cpp_crop_info), cppCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008112 }
8113 IF_META_AVAILABLE(cam_focal_length_ratio_t, ratio,
8114 CAM_INTF_META_AF_FOCAL_LENGTH_RATIO, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008115 memcpy(&(repro_info.af_focal_length_ratio), ratio, sizeof(cam_focal_length_ratio_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008116 }
8117 IF_META_AVAILABLE(int32_t, flip, CAM_INTF_PARM_FLIP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008118 memcpy(&(repro_info.pipeline_flip), flip, sizeof(int32_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008119 }
8120 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
8121 CAM_INTF_PARM_ROTATION, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008122 memcpy(&(repro_info.rotation_info), rotationInfo, sizeof(cam_rotation_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008123 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008124 IF_META_AVAILABLE(cam_area_t, afRoi, CAM_INTF_META_AF_ROI, metadata) {
8125 memcpy(&(repro_info.af_roi), afRoi, sizeof(cam_area_t));
8126 }
8127 IF_META_AVAILABLE(cam_dyn_img_data_t, dynMask, CAM_INTF_META_IMG_DYN_FEAT, metadata) {
8128 memcpy(&(repro_info.dyn_mask), dynMask, sizeof(cam_dyn_img_data_t));
8129 }
8130 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
8131 (uint8_t *)&repro_info, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008132
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008133 // INSTANT AEC MODE
8134 IF_META_AVAILABLE(uint8_t, instant_aec_mode,
8135 CAM_INTF_PARM_INSTANT_AEC, metadata) {
8136 camMetadata.update(QCAMERA3_INSTANT_AEC_MODE, instant_aec_mode, 1);
8137 }
8138
Shuzhen Wange763e802016-03-31 10:24:29 -07008139 // AF scene change
8140 IF_META_AVAILABLE(uint8_t, afSceneChange, CAM_INTF_META_AF_SCENE_CHANGE, metadata) {
8141 camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, afSceneChange, 1);
8142 }
8143
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07008144 // Enable ZSL
8145 if (enableZsl != nullptr) {
8146 uint8_t value = *enableZsl ?
8147 ANDROID_CONTROL_ENABLE_ZSL_TRUE : ANDROID_CONTROL_ENABLE_ZSL_FALSE;
8148 camMetadata.update(ANDROID_CONTROL_ENABLE_ZSL, &value, 1);
8149 }
8150
Xu Han821ea9c2017-05-23 09:00:40 -07008151 // OIS Data
8152 IF_META_AVAILABLE(cam_frame_ois_info_t, frame_ois_data, CAM_INTF_META_FRAME_OIS_DATA, metadata) {
8153 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_VSYNC,
8154 &(frame_ois_data->frame_sof_timestamp_vsync), 1);
8155 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_BOOTTIME,
8156 &(frame_ois_data->frame_sof_timestamp_boottime), 1);
8157 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_TIMESTAMPS_BOOTTIME,
8158 frame_ois_data->ois_sample_timestamp_boottime, frame_ois_data->num_ois_sample);
8159 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_X,
8160 frame_ois_data->ois_sample_shift_x, frame_ois_data->num_ois_sample);
8161 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_Y,
8162 frame_ois_data->ois_sample_shift_y, frame_ois_data->num_ois_sample);
Xue Tu2c3e9142017-08-18 16:23:52 -07008163 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_PIXEL_X,
8164 frame_ois_data->ois_sample_shift_pixel_x, frame_ois_data->num_ois_sample);
8165 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_PIXEL_Y,
8166 frame_ois_data->ois_sample_shift_pixel_y, frame_ois_data->num_ois_sample);
Xu Han821ea9c2017-05-23 09:00:40 -07008167 }
8168
Thierry Strudel3d639192016-09-09 11:52:26 -07008169 resultMetadata = camMetadata.release();
8170 return resultMetadata;
8171}
8172
8173/*===========================================================================
8174 * FUNCTION : saveExifParams
8175 *
8176 * DESCRIPTION:
8177 *
8178 * PARAMETERS :
8179 * @metadata : metadata information from callback
8180 *
8181 * RETURN : none
8182 *
8183 *==========================================================================*/
8184void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
8185{
8186 IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
8187 CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
8188 if (mExifParams.debug_params) {
8189 mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
8190 mExifParams.debug_params->ae_debug_params_valid = TRUE;
8191 }
8192 }
8193 IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
8194 CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
8195 if (mExifParams.debug_params) {
8196 mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
8197 mExifParams.debug_params->awb_debug_params_valid = TRUE;
8198 }
8199 }
8200 IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
8201 CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
8202 if (mExifParams.debug_params) {
8203 mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
8204 mExifParams.debug_params->af_debug_params_valid = TRUE;
8205 }
8206 }
8207 IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
8208 CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
8209 if (mExifParams.debug_params) {
8210 mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
8211 mExifParams.debug_params->asd_debug_params_valid = TRUE;
8212 }
8213 }
8214 IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
8215 CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
8216 if (mExifParams.debug_params) {
8217 mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
8218 mExifParams.debug_params->stats_debug_params_valid = TRUE;
8219 }
8220 }
8221 IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
8222 CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
8223 if (mExifParams.debug_params) {
8224 mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
8225 mExifParams.debug_params->bestats_debug_params_valid = TRUE;
8226 }
8227 }
8228 IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
8229 CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
8230 if (mExifParams.debug_params) {
8231 mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
8232 mExifParams.debug_params->bhist_debug_params_valid = TRUE;
8233 }
8234 }
8235 IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
8236 CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
8237 if (mExifParams.debug_params) {
8238 mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
8239 mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
8240 }
8241 }
8242}
8243
8244/*===========================================================================
8245 * FUNCTION : get3AExifParams
8246 *
8247 * DESCRIPTION:
8248 *
8249 * PARAMETERS : none
8250 *
8251 *
8252 * RETURN : mm_jpeg_exif_params_t
8253 *
8254 *==========================================================================*/
8255mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
8256{
8257 return mExifParams;
8258}
8259
8260/*===========================================================================
8261 * FUNCTION : translateCbUrgentMetadataToResultMetadata
8262 *
8263 * DESCRIPTION:
8264 *
8265 * PARAMETERS :
8266 * @metadata : metadata information from callback
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008267 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the last
8268 * urgent metadata in a batch. Always true for
8269 * non-batch mode.
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008270 * @frame_number : frame number for this urgent metadata
Shuzhen Wang485e2442017-08-02 12:21:08 -07008271 * @isJumpstartMetadata: Whether this is a partial metadata for jumpstart,
8272 * i.e. even though it doesn't map to a valid partial
8273 * frame number, its metadata entries should be kept.
Thierry Strudel3d639192016-09-09 11:52:26 -07008274 * RETURN : camera_metadata_t*
8275 * metadata in a format specified by fwk
8276 *==========================================================================*/
8277camera_metadata_t*
8278QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008279 (metadata_buffer_t *metadata, bool lastUrgentMetadataInBatch,
Shuzhen Wang485e2442017-08-02 12:21:08 -07008280 uint32_t frame_number, bool isJumpstartMetadata)
Thierry Strudel3d639192016-09-09 11:52:26 -07008281{
8282 CameraMetadata camMetadata;
8283 camera_metadata_t *resultMetadata;
8284
Shuzhen Wang485e2442017-08-02 12:21:08 -07008285 if (!lastUrgentMetadataInBatch && !isJumpstartMetadata) {
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008286 /* In batch mode, use empty metadata if this is not the last in batch
8287 */
8288 resultMetadata = allocate_camera_metadata(0, 0);
8289 return resultMetadata;
8290 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008291
8292 IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
8293 uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
8294 camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
8295 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
8296 }
8297
8298 IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
8299 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
8300 &aecTrigger->trigger, 1);
8301 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
8302 &aecTrigger->trigger_id, 1);
8303 LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
8304 aecTrigger->trigger);
8305 LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
8306 aecTrigger->trigger_id);
8307 }
8308
8309 IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
8310 uint8_t fwk_ae_state = (uint8_t) *ae_state;
8311 camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
8312 LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
8313 }
8314
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008315 IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
8316 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
8317 if (NAME_NOT_FOUND != val) {
8318 uint8_t fwkAfMode = (uint8_t)val;
8319 camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
8320 LOGD("urgent Metadata : ANDROID_CONTROL_AF_MODE %d", val);
8321 } else {
8322 LOGH("urgent Metadata not found : ANDROID_CONTROL_AF_MODE %d",
8323 val);
8324 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008325 }
8326
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008327 IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
8328 LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
8329 af_trigger->trigger);
8330 LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
8331 af_trigger->trigger_id);
8332
8333 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
8334 mAfTrigger = *af_trigger;
8335 uint32_t fwk_AfState = (uint32_t) *afState;
8336
8337 // If this is the result for a new trigger, check if there is new early
8338 // af state. If there is, use the last af state for all results
8339 // preceding current partial frame number.
8340 for (auto & pendingRequest : mPendingRequestsList) {
8341 if (pendingRequest.frame_number < frame_number) {
8342 pendingRequest.focusStateValid = true;
8343 pendingRequest.focusState = fwk_AfState;
8344 } else if (pendingRequest.frame_number == frame_number) {
8345 IF_META_AVAILABLE(uint32_t, earlyAfState, CAM_INTF_META_EARLY_AF_STATE, metadata) {
8346 // Check if early AF state for trigger exists. If yes, send AF state as
8347 // partial result for better latency.
8348 uint8_t fwkEarlyAfState = (uint8_t) *earlyAfState;
8349 pendingRequest.focusStateSent = true;
8350 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwkEarlyAfState, 1);
8351 LOGD("urgent Metadata(%d) : ANDROID_CONTROL_AF_STATE %u",
8352 frame_number, fwkEarlyAfState);
8353 }
8354 }
8355 }
8356 }
8357 }
8358 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
8359 &mAfTrigger.trigger, 1);
8360 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &mAfTrigger.trigger_id, 1);
8361
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008362 IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
8363 /*af regions*/
Shuzhen Wang91c14e82017-08-21 17:56:57 -07008364 cam_rect_t hAfRect = hAfRegions->rect;
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008365 int32_t afRegions[REGIONS_TUPLE_COUNT];
8366 // Adjust crop region from sensor output coordinate system to active
8367 // array coordinate system.
Shuzhen Wang91c14e82017-08-21 17:56:57 -07008368 mCropRegionMapper.toActiveArray(hAfRect.left, hAfRect.top,
8369 hAfRect.width, hAfRect.height);
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008370
Shuzhen Wang91c14e82017-08-21 17:56:57 -07008371 convertToRegions(hAfRect, afRegions, hAfRegions->weight);
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008372 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
8373 REGIONS_TUPLE_COUNT);
8374 LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
8375 afRegions[0], afRegions[1], afRegions[2], afRegions[3],
Shuzhen Wang91c14e82017-08-21 17:56:57 -07008376 hAfRect.left, hAfRect.top, hAfRect.width,
8377 hAfRect.height);
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008378 }
8379
Shuzhen Wangcc386c52017-03-29 09:28:08 -07008380 // AF region confidence
8381 IF_META_AVAILABLE(int32_t, afRegionConfidence, CAM_INTF_META_AF_REGIONS_CONFIDENCE, metadata) {
8382 camMetadata.update(NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE, afRegionConfidence, 1);
8383 }
8384
Thierry Strudel3d639192016-09-09 11:52:26 -07008385 IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
8386 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
8387 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
8388 if (NAME_NOT_FOUND != val) {
8389 uint8_t fwkWhiteBalanceMode = (uint8_t)val;
8390 camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
8391 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
8392 } else {
8393 LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
8394 }
8395 }
8396
8397 uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8398 uint32_t aeMode = CAM_AE_MODE_MAX;
8399 int32_t flashMode = CAM_FLASH_MODE_MAX;
8400 int32_t redeye = -1;
8401 IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
8402 aeMode = *pAeMode;
8403 }
8404 IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
8405 flashMode = *pFlashMode;
8406 }
8407 IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
8408 redeye = *pRedeye;
8409 }
8410
8411 if (1 == redeye) {
8412 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
8413 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8414 } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
8415 int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
8416 flashMode);
8417 if (NAME_NOT_FOUND != val) {
8418 fwk_aeMode = (uint8_t)val;
8419 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8420 } else {
8421 LOGE("Unsupported flash mode %d", flashMode);
8422 }
8423 } else if (aeMode == CAM_AE_MODE_ON) {
8424 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
8425 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8426 } else if (aeMode == CAM_AE_MODE_OFF) {
8427 fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8428 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08008429 } else if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
8430 fwk_aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
8431 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07008432 } else {
8433 LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
8434 "flashMode:%d, aeMode:%u!!!",
8435 redeye, flashMode, aeMode);
8436 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008437 if (mInstantAEC) {
8438 // Increment frame Idx count untill a bound reached for instant AEC.
8439 mInstantAecFrameIdxCount++;
8440 IF_META_AVAILABLE(cam_3a_params_t, ae_params,
8441 CAM_INTF_META_AEC_INFO, metadata) {
8442 LOGH("ae_params->settled = %d",ae_params->settled);
8443 // If AEC settled, or if number of frames reached bound value,
8444 // should reset instant AEC.
8445 if (ae_params->settled ||
8446 (mInstantAecFrameIdxCount > mAecSkipDisplayFrameBound)) {
8447 LOGH("AEC settled or Frames reached instantAEC bound, resetting instantAEC");
8448 mInstantAEC = false;
8449 mResetInstantAEC = true;
8450 mInstantAecFrameIdxCount = 0;
8451 }
8452 }
8453 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008454 resultMetadata = camMetadata.release();
8455 return resultMetadata;
8456}
8457
8458/*===========================================================================
8459 * FUNCTION : dumpMetadataToFile
8460 *
8461 * DESCRIPTION: Dumps tuning metadata to file system
8462 *
8463 * PARAMETERS :
8464 * @meta : tuning metadata
8465 * @dumpFrameCount : current dump frame count
8466 * @enabled : Enable mask
8467 *
8468 *==========================================================================*/
8469void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
8470 uint32_t &dumpFrameCount,
8471 bool enabled,
8472 const char *type,
8473 uint32_t frameNumber)
8474{
8475 //Some sanity checks
8476 if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
8477 LOGE("Tuning sensor data size bigger than expected %d: %d",
8478 meta.tuning_sensor_data_size,
8479 TUNING_SENSOR_DATA_MAX);
8480 return;
8481 }
8482
8483 if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
8484 LOGE("Tuning VFE data size bigger than expected %d: %d",
8485 meta.tuning_vfe_data_size,
8486 TUNING_VFE_DATA_MAX);
8487 return;
8488 }
8489
8490 if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
8491 LOGE("Tuning CPP data size bigger than expected %d: %d",
8492 meta.tuning_cpp_data_size,
8493 TUNING_CPP_DATA_MAX);
8494 return;
8495 }
8496
8497 if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
8498 LOGE("Tuning CAC data size bigger than expected %d: %d",
8499 meta.tuning_cac_data_size,
8500 TUNING_CAC_DATA_MAX);
8501 return;
8502 }
8503 //
8504
8505 if(enabled){
8506 char timeBuf[FILENAME_MAX];
8507 char buf[FILENAME_MAX];
8508 memset(buf, 0, sizeof(buf));
8509 memset(timeBuf, 0, sizeof(timeBuf));
8510 time_t current_time;
8511 struct tm * timeinfo;
8512 time (&current_time);
8513 timeinfo = localtime (&current_time);
8514 if (timeinfo != NULL) {
8515 strftime (timeBuf, sizeof(timeBuf),
8516 QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
8517 }
8518 String8 filePath(timeBuf);
8519 snprintf(buf,
8520 sizeof(buf),
8521 "%dm_%s_%d.bin",
8522 dumpFrameCount,
8523 type,
8524 frameNumber);
8525 filePath.append(buf);
8526 int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
8527 if (file_fd >= 0) {
8528 ssize_t written_len = 0;
8529 meta.tuning_data_version = TUNING_DATA_VERSION;
8530 void *data = (void *)((uint8_t *)&meta.tuning_data_version);
8531 written_len += write(file_fd, data, sizeof(uint32_t));
8532 data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
8533 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
8534 written_len += write(file_fd, data, sizeof(uint32_t));
8535 data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
8536 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
8537 written_len += write(file_fd, data, sizeof(uint32_t));
8538 data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
8539 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
8540 written_len += write(file_fd, data, sizeof(uint32_t));
8541 data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
8542 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
8543 written_len += write(file_fd, data, sizeof(uint32_t));
8544 meta.tuning_mod3_data_size = 0;
8545 data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
8546 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
8547 written_len += write(file_fd, data, sizeof(uint32_t));
8548 size_t total_size = meta.tuning_sensor_data_size;
8549 data = (void *)((uint8_t *)&meta.data);
8550 written_len += write(file_fd, data, total_size);
8551 total_size = meta.tuning_vfe_data_size;
8552 data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
8553 written_len += write(file_fd, data, total_size);
8554 total_size = meta.tuning_cpp_data_size;
8555 data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
8556 written_len += write(file_fd, data, total_size);
8557 total_size = meta.tuning_cac_data_size;
8558 data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
8559 written_len += write(file_fd, data, total_size);
8560 close(file_fd);
8561 }else {
8562 LOGE("fail to open file for metadata dumping");
8563 }
8564 }
8565}
8566
8567/*===========================================================================
8568 * FUNCTION : cleanAndSortStreamInfo
8569 *
8570 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
8571 * and sort them such that raw stream is at the end of the list
8572 * This is a workaround for camera daemon constraint.
8573 *
8574 * PARAMETERS : None
8575 *
8576 *==========================================================================*/
8577void QCamera3HardwareInterface::cleanAndSortStreamInfo()
8578{
8579 List<stream_info_t *> newStreamInfo;
8580
8581 /*clean up invalid streams*/
8582 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
8583 it != mStreamInfo.end();) {
8584 if(((*it)->status) == INVALID){
8585 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
8586 delete channel;
8587 free(*it);
8588 it = mStreamInfo.erase(it);
8589 } else {
8590 it++;
8591 }
8592 }
8593
8594 // Move preview/video/callback/snapshot streams into newList
8595 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8596 it != mStreamInfo.end();) {
8597 if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
8598 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
8599 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
8600 newStreamInfo.push_back(*it);
8601 it = mStreamInfo.erase(it);
8602 } else
8603 it++;
8604 }
8605 // Move raw streams into newList
8606 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8607 it != mStreamInfo.end();) {
8608 newStreamInfo.push_back(*it);
8609 it = mStreamInfo.erase(it);
8610 }
8611
8612 mStreamInfo = newStreamInfo;
8613}
8614
8615/*===========================================================================
8616 * FUNCTION : extractJpegMetadata
8617 *
8618 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
8619 * JPEG metadata is cached in HAL, and return as part of capture
8620 * result when metadata is returned from camera daemon.
8621 *
8622 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
8623 * @request: capture request
8624 *
8625 *==========================================================================*/
8626void QCamera3HardwareInterface::extractJpegMetadata(
8627 CameraMetadata& jpegMetadata,
8628 const camera3_capture_request_t *request)
8629{
8630 CameraMetadata frame_settings;
8631 frame_settings = request->settings;
8632
8633 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
8634 jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
8635 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
8636 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
8637
8638 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
8639 jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
8640 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
8641 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
8642
8643 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
8644 jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
8645 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
8646 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
8647
8648 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
8649 jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
8650 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
8651 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
8652
8653 if (frame_settings.exists(ANDROID_JPEG_QUALITY))
8654 jpegMetadata.update(ANDROID_JPEG_QUALITY,
8655 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
8656 frame_settings.find(ANDROID_JPEG_QUALITY).count);
8657
8658 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
8659 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
8660 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
8661 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
8662
8663 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
8664 int32_t thumbnail_size[2];
8665 thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
8666 thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
8667 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
8668 int32_t orientation =
8669 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008670 if ((!needJpegExifRotation()) && ((orientation == 90) || (orientation == 270))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07008671 //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
8672 int32_t temp;
8673 temp = thumbnail_size[0];
8674 thumbnail_size[0] = thumbnail_size[1];
8675 thumbnail_size[1] = temp;
8676 }
8677 }
8678 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
8679 thumbnail_size,
8680 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
8681 }
8682
8683}
8684
8685/*===========================================================================
8686 * FUNCTION : convertToRegions
8687 *
8688 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
8689 *
8690 * PARAMETERS :
8691 * @rect : cam_rect_t struct to convert
8692 * @region : int32_t destination array
8693 * @weight : if we are converting from cam_area_t, weight is valid
8694 * else weight = -1
8695 *
8696 *==========================================================================*/
8697void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
8698 int32_t *region, int weight)
8699{
Jason Lee8ce36fa2017-04-19 19:40:37 -07008700 region[FACE_LEFT] = rect.left;
8701 region[FACE_TOP] = rect.top;
8702 region[FACE_RIGHT] = rect.left + rect.width;
8703 region[FACE_BOTTOM] = rect.top + rect.height;
Thierry Strudel3d639192016-09-09 11:52:26 -07008704 if (weight > -1) {
Jason Lee8ce36fa2017-04-19 19:40:37 -07008705 region[FACE_WEIGHT] = weight;
Thierry Strudel3d639192016-09-09 11:52:26 -07008706 }
8707}
8708
8709/*===========================================================================
8710 * FUNCTION : convertFromRegions
8711 *
8712 * DESCRIPTION: helper method to convert from array to cam_rect_t
8713 *
8714 * PARAMETERS :
8715 * @rect : cam_rect_t struct to convert
8716 * @region : int32_t destination array
8717 * @weight : if we are converting from cam_area_t, weight is valid
8718 * else weight = -1
8719 *
8720 *==========================================================================*/
8721void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
Chien-Yu Chen92724a82017-01-06 11:50:30 -08008722 const CameraMetadata &frame_settings, uint32_t tag)
Thierry Strudel3d639192016-09-09 11:52:26 -07008723{
Thierry Strudel3d639192016-09-09 11:52:26 -07008724 int32_t x_min = frame_settings.find(tag).data.i32[0];
8725 int32_t y_min = frame_settings.find(tag).data.i32[1];
8726 int32_t x_max = frame_settings.find(tag).data.i32[2];
8727 int32_t y_max = frame_settings.find(tag).data.i32[3];
8728 roi.weight = frame_settings.find(tag).data.i32[4];
8729 roi.rect.left = x_min;
8730 roi.rect.top = y_min;
8731 roi.rect.width = x_max - x_min;
8732 roi.rect.height = y_max - y_min;
8733}
8734
8735/*===========================================================================
8736 * FUNCTION : resetIfNeededROI
8737 *
8738 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
8739 * crop region
8740 *
8741 * PARAMETERS :
8742 * @roi : cam_area_t struct to resize
8743 * @scalerCropRegion : cam_crop_region_t region to compare against
8744 *
8745 *
8746 *==========================================================================*/
8747bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
8748 const cam_crop_region_t* scalerCropRegion)
8749{
8750 int32_t roi_x_max = roi->rect.width + roi->rect.left;
8751 int32_t roi_y_max = roi->rect.height + roi->rect.top;
8752 int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
8753 int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
8754
8755 /* According to spec weight = 0 is used to indicate roi needs to be disabled
8756 * without having this check the calculations below to validate if the roi
8757 * is inside scalar crop region will fail resulting in the roi not being
8758 * reset causing algorithm to continue to use stale roi window
8759 */
8760 if (roi->weight == 0) {
8761 return true;
8762 }
8763
8764 if ((roi_x_max < scalerCropRegion->left) ||
8765 // right edge of roi window is left of scalar crop's left edge
8766 (roi_y_max < scalerCropRegion->top) ||
8767 // bottom edge of roi window is above scalar crop's top edge
8768 (roi->rect.left > crop_x_max) ||
8769 // left edge of roi window is beyond(right) of scalar crop's right edge
8770 (roi->rect.top > crop_y_max)){
8771 // top edge of roi windo is above scalar crop's top edge
8772 return false;
8773 }
8774 if (roi->rect.left < scalerCropRegion->left) {
8775 roi->rect.left = scalerCropRegion->left;
8776 }
8777 if (roi->rect.top < scalerCropRegion->top) {
8778 roi->rect.top = scalerCropRegion->top;
8779 }
8780 if (roi_x_max > crop_x_max) {
8781 roi_x_max = crop_x_max;
8782 }
8783 if (roi_y_max > crop_y_max) {
8784 roi_y_max = crop_y_max;
8785 }
8786 roi->rect.width = roi_x_max - roi->rect.left;
8787 roi->rect.height = roi_y_max - roi->rect.top;
8788 return true;
8789}
8790
8791/*===========================================================================
8792 * FUNCTION : convertLandmarks
8793 *
8794 * DESCRIPTION: helper method to extract the landmarks from face detection info
8795 *
8796 * PARAMETERS :
8797 * @landmark_data : input landmark data to be converted
8798 * @landmarks : int32_t destination array
8799 *
8800 *
8801 *==========================================================================*/
8802void QCamera3HardwareInterface::convertLandmarks(
8803 cam_face_landmarks_info_t landmark_data,
8804 int32_t *landmarks)
8805{
Thierry Strudel04e026f2016-10-10 11:27:36 -07008806 if (landmark_data.is_left_eye_valid) {
8807 landmarks[LEFT_EYE_X] = (int32_t)landmark_data.left_eye_center.x;
8808 landmarks[LEFT_EYE_Y] = (int32_t)landmark_data.left_eye_center.y;
8809 } else {
8810 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8811 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8812 }
8813
8814 if (landmark_data.is_right_eye_valid) {
8815 landmarks[RIGHT_EYE_X] = (int32_t)landmark_data.right_eye_center.x;
8816 landmarks[RIGHT_EYE_Y] = (int32_t)landmark_data.right_eye_center.y;
8817 } else {
8818 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8819 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8820 }
8821
8822 if (landmark_data.is_mouth_valid) {
8823 landmarks[MOUTH_X] = (int32_t)landmark_data.mouth_center.x;
8824 landmarks[MOUTH_Y] = (int32_t)landmark_data.mouth_center.y;
8825 } else {
8826 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8827 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
8828 }
8829}
8830
8831/*===========================================================================
8832 * FUNCTION : setInvalidLandmarks
8833 *
8834 * DESCRIPTION: helper method to set invalid landmarks
8835 *
8836 * PARAMETERS :
8837 * @landmarks : int32_t destination array
8838 *
8839 *
8840 *==========================================================================*/
8841void QCamera3HardwareInterface::setInvalidLandmarks(
8842 int32_t *landmarks)
8843{
8844 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8845 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8846 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8847 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8848 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8849 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
Thierry Strudel3d639192016-09-09 11:52:26 -07008850}
8851
8852#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008853
8854/*===========================================================================
8855 * FUNCTION : getCapabilities
8856 *
8857 * DESCRIPTION: query camera capability from back-end
8858 *
8859 * PARAMETERS :
8860 * @ops : mm-interface ops structure
8861 * @cam_handle : camera handle for which we need capability
8862 *
8863 * RETURN : ptr type of capability structure
8864 * capability for success
8865 * NULL for failure
8866 *==========================================================================*/
8867cam_capability_t *QCamera3HardwareInterface::getCapabilities(mm_camera_ops_t *ops,
8868 uint32_t cam_handle)
8869{
8870 int rc = NO_ERROR;
8871 QCamera3HeapMemory *capabilityHeap = NULL;
8872 cam_capability_t *cap_ptr = NULL;
8873
8874 if (ops == NULL) {
8875 LOGE("Invalid arguments");
8876 return NULL;
8877 }
8878
8879 capabilityHeap = new QCamera3HeapMemory(1);
8880 if (capabilityHeap == NULL) {
8881 LOGE("creation of capabilityHeap failed");
8882 return NULL;
8883 }
8884
8885 /* Allocate memory for capability buffer */
8886 rc = capabilityHeap->allocate(sizeof(cam_capability_t));
8887 if(rc != OK) {
8888 LOGE("No memory for cappability");
8889 goto allocate_failed;
8890 }
8891
8892 /* Map memory for capability buffer */
8893 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
8894
8895 rc = ops->map_buf(cam_handle,
8896 CAM_MAPPING_BUF_TYPE_CAPABILITY, capabilityHeap->getFd(0),
8897 sizeof(cam_capability_t), capabilityHeap->getPtr(0));
8898 if(rc < 0) {
8899 LOGE("failed to map capability buffer");
8900 rc = FAILED_TRANSACTION;
8901 goto map_failed;
8902 }
8903
8904 /* Query Capability */
8905 rc = ops->query_capability(cam_handle);
8906 if(rc < 0) {
8907 LOGE("failed to query capability");
8908 rc = FAILED_TRANSACTION;
8909 goto query_failed;
8910 }
8911
8912 cap_ptr = (cam_capability_t *)malloc(sizeof(cam_capability_t));
8913 if (cap_ptr == NULL) {
8914 LOGE("out of memory");
8915 rc = NO_MEMORY;
8916 goto query_failed;
8917 }
8918
8919 memset(cap_ptr, 0, sizeof(cam_capability_t));
8920 memcpy(cap_ptr, DATA_PTR(capabilityHeap, 0), sizeof(cam_capability_t));
8921
8922 int index;
8923 for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
8924 cam_analysis_info_t *p_analysis_info = &cap_ptr->analysis_info[index];
8925 p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
8926 p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
8927 }
8928
8929query_failed:
8930 ops->unmap_buf(cam_handle, CAM_MAPPING_BUF_TYPE_CAPABILITY);
8931map_failed:
8932 capabilityHeap->deallocate();
8933allocate_failed:
8934 delete capabilityHeap;
8935
8936 if (rc != NO_ERROR) {
8937 return NULL;
8938 } else {
8939 return cap_ptr;
8940 }
8941}
8942
Thierry Strudel3d639192016-09-09 11:52:26 -07008943/*===========================================================================
8944 * FUNCTION : initCapabilities
8945 *
8946 * DESCRIPTION: initialize camera capabilities in static data struct
8947 *
8948 * PARAMETERS :
8949 * @cameraId : camera Id
8950 *
8951 * RETURN : int32_t type of status
8952 * NO_ERROR -- success
8953 * none-zero failure code
8954 *==========================================================================*/
8955int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
8956{
8957 int rc = 0;
8958 mm_camera_vtbl_t *cameraHandle = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008959 uint32_t handle = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07008960
8961 rc = camera_open((uint8_t)cameraId, &cameraHandle);
8962 if (rc) {
8963 LOGE("camera_open failed. rc = %d", rc);
8964 goto open_failed;
8965 }
8966 if (!cameraHandle) {
8967 LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
8968 goto open_failed;
8969 }
8970
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008971 handle = get_main_camera_handle(cameraHandle->camera_handle);
8972 gCamCapability[cameraId] = getCapabilities(cameraHandle->ops, handle);
8973 if (gCamCapability[cameraId] == NULL) {
8974 rc = FAILED_TRANSACTION;
8975 goto failed_op;
Thierry Strudel3d639192016-09-09 11:52:26 -07008976 }
8977
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008978 gCamCapability[cameraId]->camera_index = cameraId;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008979 if (is_dual_camera_by_idx(cameraId)) {
8980 handle = get_aux_camera_handle(cameraHandle->camera_handle);
8981 gCamCapability[cameraId]->aux_cam_cap =
8982 getCapabilities(cameraHandle->ops, handle);
8983 if (gCamCapability[cameraId]->aux_cam_cap == NULL) {
8984 rc = FAILED_TRANSACTION;
8985 free(gCamCapability[cameraId]);
8986 goto failed_op;
8987 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08008988
8989 // Copy the main camera capability to main_cam_cap struct
8990 gCamCapability[cameraId]->main_cam_cap =
8991 (cam_capability_t *)malloc(sizeof(cam_capability_t));
8992 if (gCamCapability[cameraId]->main_cam_cap == NULL) {
8993 LOGE("out of memory");
8994 rc = NO_MEMORY;
8995 goto failed_op;
8996 }
8997 memcpy(gCamCapability[cameraId]->main_cam_cap, gCamCapability[cameraId],
8998 sizeof(cam_capability_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008999 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07009000failed_op:
Thierry Strudel3d639192016-09-09 11:52:26 -07009001 cameraHandle->ops->close_camera(cameraHandle->camera_handle);
9002 cameraHandle = NULL;
9003open_failed:
9004 return rc;
9005}
9006
9007/*==========================================================================
9008 * FUNCTION : get3Aversion
9009 *
9010 * DESCRIPTION: get the Q3A S/W version
9011 *
9012 * PARAMETERS :
9013 * @sw_version: Reference of Q3A structure which will hold version info upon
9014 * return
9015 *
9016 * RETURN : None
9017 *
9018 *==========================================================================*/
9019void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
9020{
9021 if(gCamCapability[mCameraId])
9022 sw_version = gCamCapability[mCameraId]->q3a_version;
9023 else
9024 LOGE("Capability structure NULL!");
9025}
9026
9027
9028/*===========================================================================
9029 * FUNCTION : initParameters
9030 *
9031 * DESCRIPTION: initialize camera parameters
9032 *
9033 * PARAMETERS :
9034 *
9035 * RETURN : int32_t type of status
9036 * NO_ERROR -- success
9037 * none-zero failure code
9038 *==========================================================================*/
9039int QCamera3HardwareInterface::initParameters()
9040{
9041 int rc = 0;
9042
9043 //Allocate Set Param Buffer
9044 mParamHeap = new QCamera3HeapMemory(1);
9045 rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
9046 if(rc != OK) {
9047 rc = NO_MEMORY;
9048 LOGE("Failed to allocate SETPARM Heap memory");
9049 delete mParamHeap;
9050 mParamHeap = NULL;
9051 return rc;
9052 }
9053
9054 //Map memory for parameters buffer
9055 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
9056 CAM_MAPPING_BUF_TYPE_PARM_BUF,
9057 mParamHeap->getFd(0),
9058 sizeof(metadata_buffer_t),
9059 (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
9060 if(rc < 0) {
9061 LOGE("failed to map SETPARM buffer");
9062 rc = FAILED_TRANSACTION;
9063 mParamHeap->deallocate();
9064 delete mParamHeap;
9065 mParamHeap = NULL;
9066 return rc;
9067 }
9068
9069 mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
9070
9071 mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
9072 return rc;
9073}
9074
9075/*===========================================================================
9076 * FUNCTION : deinitParameters
9077 *
9078 * DESCRIPTION: de-initialize camera parameters
9079 *
9080 * PARAMETERS :
9081 *
9082 * RETURN : NONE
9083 *==========================================================================*/
9084void QCamera3HardwareInterface::deinitParameters()
9085{
9086 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
9087 CAM_MAPPING_BUF_TYPE_PARM_BUF);
9088
9089 mParamHeap->deallocate();
9090 delete mParamHeap;
9091 mParamHeap = NULL;
9092
9093 mParameters = NULL;
9094
9095 free(mPrevParameters);
9096 mPrevParameters = NULL;
9097}
9098
9099/*===========================================================================
9100 * FUNCTION : calcMaxJpegSize
9101 *
9102 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
9103 *
9104 * PARAMETERS :
9105 *
9106 * RETURN : max_jpeg_size
9107 *==========================================================================*/
9108size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
9109{
9110 size_t max_jpeg_size = 0;
9111 size_t temp_width, temp_height;
9112 size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
9113 MAX_SIZES_CNT);
9114 for (size_t i = 0; i < count; i++) {
9115 temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
9116 temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
9117 if (temp_width * temp_height > max_jpeg_size ) {
9118 max_jpeg_size = temp_width * temp_height;
9119 }
9120 }
9121 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
9122 return max_jpeg_size;
9123}
9124
9125/*===========================================================================
9126 * FUNCTION : getMaxRawSize
9127 *
9128 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
9129 *
9130 * PARAMETERS :
9131 *
9132 * RETURN : Largest supported Raw Dimension
9133 *==========================================================================*/
9134cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
9135{
9136 int max_width = 0;
9137 cam_dimension_t maxRawSize;
9138
9139 memset(&maxRawSize, 0, sizeof(cam_dimension_t));
9140 for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
9141 if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
9142 max_width = gCamCapability[camera_id]->raw_dim[i].width;
9143 maxRawSize = gCamCapability[camera_id]->raw_dim[i];
9144 }
9145 }
9146 return maxRawSize;
9147}
9148
9149
9150/*===========================================================================
9151 * FUNCTION : calcMaxJpegDim
9152 *
9153 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
9154 *
9155 * PARAMETERS :
9156 *
9157 * RETURN : max_jpeg_dim
9158 *==========================================================================*/
9159cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
9160{
9161 cam_dimension_t max_jpeg_dim;
9162 cam_dimension_t curr_jpeg_dim;
9163 max_jpeg_dim.width = 0;
9164 max_jpeg_dim.height = 0;
9165 curr_jpeg_dim.width = 0;
9166 curr_jpeg_dim.height = 0;
9167 for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
9168 curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
9169 curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
9170 if (curr_jpeg_dim.width * curr_jpeg_dim.height >
9171 max_jpeg_dim.width * max_jpeg_dim.height ) {
9172 max_jpeg_dim.width = curr_jpeg_dim.width;
9173 max_jpeg_dim.height = curr_jpeg_dim.height;
9174 }
9175 }
9176 return max_jpeg_dim;
9177}
9178
9179/*===========================================================================
9180 * FUNCTION : addStreamConfig
9181 *
9182 * DESCRIPTION: adds the stream configuration to the array
9183 *
9184 * PARAMETERS :
9185 * @available_stream_configs : pointer to stream configuration array
9186 * @scalar_format : scalar format
9187 * @dim : configuration dimension
9188 * @config_type : input or output configuration type
9189 *
9190 * RETURN : NONE
9191 *==========================================================================*/
9192void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
9193 int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
9194{
9195 available_stream_configs.add(scalar_format);
9196 available_stream_configs.add(dim.width);
9197 available_stream_configs.add(dim.height);
9198 available_stream_configs.add(config_type);
9199}
9200
9201/*===========================================================================
9202 * FUNCTION : suppportBurstCapture
9203 *
9204 * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
9205 *
9206 * PARAMETERS :
9207 * @cameraId : camera Id
9208 *
9209 * RETURN : true if camera supports BURST_CAPTURE
9210 * false otherwise
9211 *==========================================================================*/
9212bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
9213{
9214 const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
9215 const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
9216 const int32_t highResWidth = 3264;
9217 const int32_t highResHeight = 2448;
9218
9219 if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
9220 // Maximum resolution images cannot be captured at >= 10fps
9221 // -> not supporting BURST_CAPTURE
9222 return false;
9223 }
9224
9225 if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
9226 // Maximum resolution images can be captured at >= 20fps
9227 // --> supporting BURST_CAPTURE
9228 return true;
9229 }
9230
9231 // Find the smallest highRes resolution, or largest resolution if there is none
9232 size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
9233 MAX_SIZES_CNT);
9234 size_t highRes = 0;
9235 while ((highRes + 1 < totalCnt) &&
9236 (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
9237 gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
9238 highResWidth * highResHeight)) {
9239 highRes++;
9240 }
9241 if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
9242 return true;
9243 } else {
9244 return false;
9245 }
9246}
9247
9248/*===========================================================================
Emilian Peev0f3c3162017-03-15 12:57:46 +00009249 * FUNCTION : getPDStatIndex
9250 *
9251 * DESCRIPTION: Return the meta raw phase detection statistics index if present
9252 *
9253 * PARAMETERS :
9254 * @caps : camera capabilities
9255 *
9256 * RETURN : int32_t type
9257 * non-negative - on success
9258 * -1 - on failure
9259 *==========================================================================*/
9260int32_t QCamera3HardwareInterface::getPDStatIndex(cam_capability_t *caps) {
9261 if (nullptr == caps) {
9262 return -1;
9263 }
9264
9265 uint32_t metaRawCount = caps->meta_raw_channel_count;
9266 int32_t ret = -1;
9267 for (size_t i = 0; i < metaRawCount; i++) {
9268 if (CAM_FORMAT_SUBTYPE_PDAF_STATS == caps->sub_fmt[i]) {
9269 ret = i;
9270 break;
9271 }
9272 }
9273
9274 return ret;
9275}
9276
9277/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07009278 * FUNCTION : initStaticMetadata
9279 *
9280 * DESCRIPTION: initialize the static metadata
9281 *
9282 * PARAMETERS :
9283 * @cameraId : camera Id
9284 *
9285 * RETURN : int32_t type of status
9286 * 0 -- success
9287 * non-zero failure code
9288 *==========================================================================*/
9289int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
9290{
9291 int rc = 0;
9292 CameraMetadata staticInfo;
9293 size_t count = 0;
9294 bool limitedDevice = false;
9295 char prop[PROPERTY_VALUE_MAX];
9296 bool supportBurst = false;
9297
9298 supportBurst = supportBurstCapture(cameraId);
9299
9300 /* If sensor is YUV sensor (no raw support) or if per-frame control is not
9301 * guaranteed or if min fps of max resolution is less than 20 fps, its
9302 * advertised as limited device*/
9303 limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
9304 (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
9305 (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
9306 !supportBurst;
9307
9308 uint8_t supportedHwLvl = limitedDevice ?
9309 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009310#ifndef USE_HAL_3_3
9311 // LEVEL_3 - This device will support level 3.
9312 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
9313#else
Thierry Strudel3d639192016-09-09 11:52:26 -07009314 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009315#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009316
9317 staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
9318 &supportedHwLvl, 1);
9319
9320 bool facingBack = false;
9321 if ((gCamCapability[cameraId]->position == CAM_POSITION_BACK) ||
9322 (gCamCapability[cameraId]->position == CAM_POSITION_BACK_AUX)) {
9323 facingBack = true;
9324 }
9325 /*HAL 3 only*/
9326 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
9327 &gCamCapability[cameraId]->min_focus_distance, 1);
9328
9329 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
9330 &gCamCapability[cameraId]->hyper_focal_distance, 1);
9331
9332 /*should be using focal lengths but sensor doesn't provide that info now*/
9333 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
9334 &gCamCapability[cameraId]->focal_length,
9335 1);
9336
9337 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
9338 gCamCapability[cameraId]->apertures,
9339 MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
9340
9341 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
9342 gCamCapability[cameraId]->filter_densities,
9343 MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
9344
9345
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009346 uint8_t available_opt_stab_modes[CAM_OPT_STAB_MAX];
9347 size_t mode_count =
9348 MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count);
9349 for (size_t i = 0; i < mode_count; i++) {
9350 available_opt_stab_modes[i] = gCamCapability[cameraId]->optical_stab_modes[i];
9351 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009352 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009353 available_opt_stab_modes, mode_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009354
9355 int32_t lens_shading_map_size[] = {
9356 MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
9357 MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
9358 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
9359 lens_shading_map_size,
9360 sizeof(lens_shading_map_size)/sizeof(int32_t));
9361
9362 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
9363 gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
9364
9365 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
9366 gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
9367
9368 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
9369 &gCamCapability[cameraId]->max_frame_duration, 1);
9370
9371 camera_metadata_rational baseGainFactor = {
9372 gCamCapability[cameraId]->base_gain_factor.numerator,
9373 gCamCapability[cameraId]->base_gain_factor.denominator};
9374 staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
9375 &baseGainFactor, 1);
9376
9377 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
9378 (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
9379
9380 int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
9381 gCamCapability[cameraId]->pixel_array_size.height};
9382 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
9383 pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
9384
9385 int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
9386 gCamCapability[cameraId]->active_array_size.top,
9387 gCamCapability[cameraId]->active_array_size.width,
9388 gCamCapability[cameraId]->active_array_size.height};
9389 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
9390 active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
9391
9392 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
9393 &gCamCapability[cameraId]->white_level, 1);
9394
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009395 int32_t adjusted_bl_per_cfa[BLACK_LEVEL_PATTERN_CNT];
9396 adjustBlackLevelForCFA(gCamCapability[cameraId]->black_level_pattern, adjusted_bl_per_cfa,
9397 gCamCapability[cameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07009398 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009399 adjusted_bl_per_cfa, BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel3d639192016-09-09 11:52:26 -07009400
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009401#ifndef USE_HAL_3_3
9402 bool hasBlackRegions = false;
9403 if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
9404 LOGW("black_region_count: %d is bounded to %d",
9405 gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
9406 gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
9407 }
9408 if (gCamCapability[cameraId]->optical_black_region_count != 0) {
9409 int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
9410 for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
9411 opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
9412 }
9413 staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
9414 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
9415 hasBlackRegions = true;
9416 }
9417#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009418 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
9419 &gCamCapability[cameraId]->flash_charge_duration, 1);
9420
9421 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
9422 &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
9423
Shuzhen Wang98d5efb2016-09-07 18:08:22 -07009424 uint8_t timestampSource = (gCamCapability[cameraId]->timestamp_calibrated ?
9425 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME :
9426 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN);
Thierry Strudel3d639192016-09-09 11:52:26 -07009427 staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
9428 &timestampSource, 1);
9429
Thierry Strudel54dc9782017-02-15 12:12:10 -08009430 //update histogram vendor data
9431 staticInfo.update(QCAMERA3_HISTOGRAM_BUCKETS,
Thierry Strudel3d639192016-09-09 11:52:26 -07009432 &gCamCapability[cameraId]->histogram_size, 1);
9433
Thierry Strudel54dc9782017-02-15 12:12:10 -08009434 staticInfo.update(QCAMERA3_HISTOGRAM_MAX_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -07009435 &gCamCapability[cameraId]->max_histogram_count, 1);
9436
Shuzhen Wang14415f52016-11-16 18:26:18 -08009437 //Set supported bins to be {max_bins, max_bins/2, max_bins/4, ...}
9438 //so that app can request fewer number of bins than the maximum supported.
9439 std::vector<int32_t> histBins;
9440 int32_t maxHistBins = gCamCapability[cameraId]->max_histogram_count;
9441 histBins.push_back(maxHistBins);
9442 while ((maxHistBins >> 1) >= MIN_CAM_HISTOGRAM_STATS_SIZE &&
9443 (maxHistBins & 0x1) == 0) {
9444 histBins.push_back(maxHistBins >> 1);
9445 maxHistBins >>= 1;
9446 }
9447 staticInfo.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_SUPPORTED_BINS,
9448 histBins.data(), histBins.size());
9449
Thierry Strudel3d639192016-09-09 11:52:26 -07009450 int32_t sharpness_map_size[] = {
9451 gCamCapability[cameraId]->sharpness_map_size.width,
9452 gCamCapability[cameraId]->sharpness_map_size.height};
9453
9454 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
9455 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
9456
9457 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
9458 &gCamCapability[cameraId]->max_sharpness_map_value, 1);
9459
Emilian Peev0f3c3162017-03-15 12:57:46 +00009460 int32_t indexPD = getPDStatIndex(gCamCapability[cameraId]);
9461 if (0 <= indexPD) {
9462 // Advertise PD stats data as part of the Depth capabilities
9463 int32_t depthWidth =
9464 gCamCapability[cameraId]->raw_meta_dim[indexPD].width;
9465 int32_t depthHeight =
9466 gCamCapability[cameraId]->raw_meta_dim[indexPD].height;
Emilian Peev656e4fa2017-06-02 16:47:04 +01009467 int32_t depthStride =
9468 gCamCapability[cameraId]->raw_meta_dim[indexPD].width * 2;
Emilian Peev0f3c3162017-03-15 12:57:46 +00009469 int32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
9470 assert(0 < depthSamplesCount);
9471 staticInfo.update(ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
9472 &depthSamplesCount, 1);
9473
9474 int32_t depthConfigs[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9475 depthHeight,
9476 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
9477 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1,
9478 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT};
9479 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
9480 depthConfigs, sizeof(depthConfigs)/sizeof(depthConfigs[0]));
9481
9482 int64_t depthMinDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9483 depthHeight, 33333333,
9484 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 33333333};
9485 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
9486 depthMinDuration,
9487 sizeof(depthMinDuration) / sizeof(depthMinDuration[0]));
9488
9489 int64_t depthStallDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9490 depthHeight, 0,
9491 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 0};
9492 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
9493 depthStallDuration,
9494 sizeof(depthStallDuration) / sizeof(depthStallDuration[0]));
9495
9496 uint8_t depthExclusive = ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_FALSE;
9497 staticInfo.update(ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE, &depthExclusive, 1);
Emilian Peev656e4fa2017-06-02 16:47:04 +01009498
9499 int32_t pd_dimensions [] = {depthWidth, depthHeight, depthStride};
9500 staticInfo.update(NEXUS_EXPERIMENTAL_2017_PD_DATA_DIMENSIONS,
9501 pd_dimensions, sizeof(pd_dimensions) / sizeof(pd_dimensions[0]));
Emilian Peev0f3c3162017-03-15 12:57:46 +00009502 }
9503
Thierry Strudel3d639192016-09-09 11:52:26 -07009504 int32_t scalar_formats[] = {
9505 ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
9506 ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
9507 ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
9508 ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
9509 HAL_PIXEL_FORMAT_RAW10,
9510 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
Emilian Peev0f3c3162017-03-15 12:57:46 +00009511 size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(scalar_formats[0]);
9512 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS, scalar_formats,
9513 scalar_formats_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009514
9515 int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
9516 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9517 makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
9518 count, MAX_SIZES_CNT, available_processed_sizes);
9519 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
9520 available_processed_sizes, count * 2);
9521
9522 int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
9523 count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
9524 makeTable(gCamCapability[cameraId]->raw_dim,
9525 count, MAX_SIZES_CNT, available_raw_sizes);
9526 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
9527 available_raw_sizes, count * 2);
9528
9529 int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
9530 count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
9531 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
9532 count, MAX_SIZES_CNT, available_fps_ranges);
9533 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
9534 available_fps_ranges, count * 2);
9535
9536 camera_metadata_rational exposureCompensationStep = {
9537 gCamCapability[cameraId]->exp_compensation_step.numerator,
9538 gCamCapability[cameraId]->exp_compensation_step.denominator};
9539 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
9540 &exposureCompensationStep, 1);
9541
9542 Vector<uint8_t> availableVstabModes;
9543 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
9544 char eis_prop[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009545 bool eisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07009546 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009547 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07009548 uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009549 count = IS_TYPE_MAX;
9550 count = MIN(gCamCapability[cameraId]->supported_is_types_cnt, count);
9551 for (size_t i = 0; i < count; i++) {
9552 if ((gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
9553 (gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
9554 eisSupported = true;
9555 break;
9556 }
9557 }
9558 if (facingBack && eis_prop_set && eisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07009559 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
9560 }
9561 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
9562 availableVstabModes.array(), availableVstabModes.size());
9563
9564 /*HAL 1 and HAL 3 common*/
9565 uint32_t zoomSteps = gCamCapability[cameraId]->zoom_ratio_tbl_cnt;
9566 uint32_t maxZoomStep = gCamCapability[cameraId]->zoom_ratio_tbl[zoomSteps - 1];
9567 uint32_t minZoomStep = 100; //as per HAL1/API1 spec
Zhijun He2a5df222017-04-04 18:20:38 -07009568 // Cap the max zoom to the max preferred value
9569 float maxZoom = MIN(maxZoomStep/minZoomStep, MAX_PREFERRED_ZOOM_RATIO);
Thierry Strudel3d639192016-09-09 11:52:26 -07009570 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
9571 &maxZoom, 1);
9572
9573 uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
9574 staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
9575
9576 int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
9577 if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
9578 max3aRegions[2] = 0; /* AF not supported */
9579 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
9580 max3aRegions, 3);
9581
9582 /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
9583 memset(prop, 0, sizeof(prop));
9584 property_get("persist.camera.facedetect", prop, "1");
9585 uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
9586 LOGD("Support face detection mode: %d",
9587 supportedFaceDetectMode);
9588
9589 int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
Thierry Strudel04e026f2016-10-10 11:27:36 -07009590 /* support mode should be OFF if max number of face is 0 */
9591 if (maxFaces <= 0) {
9592 supportedFaceDetectMode = 0;
9593 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009594 Vector<uint8_t> availableFaceDetectModes;
9595 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
9596 if (supportedFaceDetectMode == 1) {
9597 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9598 } else if (supportedFaceDetectMode == 2) {
9599 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9600 } else if (supportedFaceDetectMode == 3) {
9601 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9602 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9603 } else {
9604 maxFaces = 0;
9605 }
9606 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
9607 availableFaceDetectModes.array(),
9608 availableFaceDetectModes.size());
9609 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
9610 (int32_t *)&maxFaces, 1);
Thierry Strudel54dc9782017-02-15 12:12:10 -08009611 uint8_t face_bsgc = gCamCapability[cameraId]->face_bsgc;
9612 staticInfo.update(QCAMERA3_STATS_BSGC_AVAILABLE,
9613 &face_bsgc, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07009614
9615 int32_t exposureCompensationRange[] = {
9616 gCamCapability[cameraId]->exposure_compensation_min,
9617 gCamCapability[cameraId]->exposure_compensation_max};
9618 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
9619 exposureCompensationRange,
9620 sizeof(exposureCompensationRange)/sizeof(int32_t));
9621
9622 uint8_t lensFacing = (facingBack) ?
9623 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
9624 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
9625
9626 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
9627 available_thumbnail_sizes,
9628 sizeof(available_thumbnail_sizes)/sizeof(int32_t));
9629
9630 /*all sizes will be clubbed into this tag*/
9631 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9632 /*android.scaler.availableStreamConfigurations*/
9633 Vector<int32_t> available_stream_configs;
9634 cam_dimension_t active_array_dim;
9635 active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
9636 active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
Thierry Strudel2896d122017-02-23 19:18:03 -08009637
9638 /*advertise list of input dimensions supported based on below property.
9639 By default all sizes upto 5MP will be advertised.
9640 Note that the setprop resolution format should be WxH.
9641 e.g: adb shell setprop persist.camera.input.minsize 1280x720
9642 To list all supported sizes, setprop needs to be set with "0x0" */
9643 cam_dimension_t minInputSize = {2592,1944}; //5MP
9644 memset(prop, 0, sizeof(prop));
9645 property_get("persist.camera.input.minsize", prop, "2592x1944");
9646 if (strlen(prop) > 0) {
9647 char *saveptr = NULL;
9648 char *token = strtok_r(prop, "x", &saveptr);
9649 if (token != NULL) {
9650 minInputSize.width = atoi(token);
9651 }
9652 token = strtok_r(NULL, "x", &saveptr);
9653 if (token != NULL) {
9654 minInputSize.height = atoi(token);
9655 }
9656 }
9657
Thierry Strudel3d639192016-09-09 11:52:26 -07009658 /* Add input/output stream configurations for each scalar formats*/
9659 for (size_t j = 0; j < scalar_formats_count; j++) {
9660 switch (scalar_formats[j]) {
9661 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9662 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9663 case HAL_PIXEL_FORMAT_RAW10:
9664 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9665 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9666 addStreamConfig(available_stream_configs, scalar_formats[j],
9667 gCamCapability[cameraId]->raw_dim[i],
9668 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9669 }
9670 break;
9671 case HAL_PIXEL_FORMAT_BLOB:
9672 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9673 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9674 addStreamConfig(available_stream_configs, scalar_formats[j],
9675 gCamCapability[cameraId]->picture_sizes_tbl[i],
9676 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9677 }
9678 break;
9679 case HAL_PIXEL_FORMAT_YCbCr_420_888:
9680 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
9681 default:
9682 cam_dimension_t largest_picture_size;
9683 memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
9684 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9685 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9686 addStreamConfig(available_stream_configs, scalar_formats[j],
9687 gCamCapability[cameraId]->picture_sizes_tbl[i],
9688 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
Thierry Strudel2896d122017-02-23 19:18:03 -08009689 /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
Zhijun Hee0cc0ae2017-05-19 22:19:27 -07009690 if ((scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
9691 scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) && i == 0) {
Thierry Strudel2896d122017-02-23 19:18:03 -08009692 if ((gCamCapability[cameraId]->picture_sizes_tbl[i].width
9693 >= minInputSize.width) || (gCamCapability[cameraId]->
9694 picture_sizes_tbl[i].height >= minInputSize.height)) {
9695 addStreamConfig(available_stream_configs, scalar_formats[j],
9696 gCamCapability[cameraId]->picture_sizes_tbl[i],
9697 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
9698 }
9699 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009700 }
Thierry Strudel2896d122017-02-23 19:18:03 -08009701
Thierry Strudel3d639192016-09-09 11:52:26 -07009702 break;
9703 }
9704 }
9705
9706 staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
9707 available_stream_configs.array(), available_stream_configs.size());
9708 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
9709 staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
9710
9711 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
9712 staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
9713
9714 /* android.scaler.availableMinFrameDurations */
9715 Vector<int64_t> available_min_durations;
9716 for (size_t j = 0; j < scalar_formats_count; j++) {
9717 switch (scalar_formats[j]) {
9718 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9719 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9720 case HAL_PIXEL_FORMAT_RAW10:
9721 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9722 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9723 available_min_durations.add(scalar_formats[j]);
9724 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
9725 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
9726 available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
9727 }
9728 break;
9729 default:
9730 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9731 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9732 available_min_durations.add(scalar_formats[j]);
9733 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
9734 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
9735 available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
9736 }
9737 break;
9738 }
9739 }
9740 staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
9741 available_min_durations.array(), available_min_durations.size());
9742
9743 Vector<int32_t> available_hfr_configs;
9744 for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
9745 int32_t fps = 0;
9746 switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
9747 case CAM_HFR_MODE_60FPS:
9748 fps = 60;
9749 break;
9750 case CAM_HFR_MODE_90FPS:
9751 fps = 90;
9752 break;
9753 case CAM_HFR_MODE_120FPS:
9754 fps = 120;
9755 break;
9756 case CAM_HFR_MODE_150FPS:
9757 fps = 150;
9758 break;
9759 case CAM_HFR_MODE_180FPS:
9760 fps = 180;
9761 break;
9762 case CAM_HFR_MODE_210FPS:
9763 fps = 210;
9764 break;
9765 case CAM_HFR_MODE_240FPS:
9766 fps = 240;
9767 break;
9768 case CAM_HFR_MODE_480FPS:
9769 fps = 480;
9770 break;
9771 case CAM_HFR_MODE_OFF:
9772 case CAM_HFR_MODE_MAX:
9773 default:
9774 break;
9775 }
9776
9777 /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
9778 if (fps >= MIN_FPS_FOR_BATCH_MODE) {
9779 /* For each HFR frame rate, need to advertise one variable fps range
9780 * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
9781 * and [120, 120]. While camcorder preview alone is running [30, 120] is
9782 * set by the app. When video recording is started, [120, 120] is
9783 * set. This way sensor configuration does not change when recording
9784 * is started */
9785
9786 /* (width, height, fps_min, fps_max, batch_size_max) */
9787 for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
9788 j < MAX_SIZES_CNT; j++) {
9789 available_hfr_configs.add(
9790 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9791 available_hfr_configs.add(
9792 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9793 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
9794 available_hfr_configs.add(fps);
9795 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9796
9797 /* (width, height, fps_min, fps_max, batch_size_max) */
9798 available_hfr_configs.add(
9799 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9800 available_hfr_configs.add(
9801 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9802 available_hfr_configs.add(fps);
9803 available_hfr_configs.add(fps);
9804 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9805 }
9806 }
9807 }
9808 //Advertise HFR capability only if the property is set
9809 memset(prop, 0, sizeof(prop));
9810 property_get("persist.camera.hal3hfr.enable", prop, "1");
9811 uint8_t hfrEnable = (uint8_t)atoi(prop);
9812
9813 if(hfrEnable && available_hfr_configs.array()) {
9814 staticInfo.update(
9815 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
9816 available_hfr_configs.array(), available_hfr_configs.size());
9817 }
9818
9819 int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
9820 staticInfo.update(ANDROID_JPEG_MAX_SIZE,
9821 &max_jpeg_size, 1);
9822
9823 uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
9824 size_t size = 0;
9825 count = CAM_EFFECT_MODE_MAX;
9826 count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
9827 for (size_t i = 0; i < count; i++) {
9828 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
9829 gCamCapability[cameraId]->supported_effects[i]);
9830 if (NAME_NOT_FOUND != val) {
9831 avail_effects[size] = (uint8_t)val;
9832 size++;
9833 }
9834 }
9835 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
9836 avail_effects,
9837 size);
9838
9839 uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
9840 uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
9841 size_t supported_scene_modes_cnt = 0;
9842 count = CAM_SCENE_MODE_MAX;
9843 count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
9844 for (size_t i = 0; i < count; i++) {
9845 if (gCamCapability[cameraId]->supported_scene_modes[i] !=
9846 CAM_SCENE_MODE_OFF) {
9847 int val = lookupFwkName(SCENE_MODES_MAP,
9848 METADATA_MAP_SIZE(SCENE_MODES_MAP),
9849 gCamCapability[cameraId]->supported_scene_modes[i]);
Mansoor Aftab58465fa2017-01-26 15:02:44 -08009850
Thierry Strudel3d639192016-09-09 11:52:26 -07009851 if (NAME_NOT_FOUND != val) {
9852 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
9853 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
9854 supported_scene_modes_cnt++;
9855 }
9856 }
9857 }
9858 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
9859 avail_scene_modes,
9860 supported_scene_modes_cnt);
9861
9862 uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
9863 makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
9864 supported_scene_modes_cnt,
9865 CAM_SCENE_MODE_MAX,
9866 scene_mode_overrides,
9867 supported_indexes,
9868 cameraId);
9869
9870 if (supported_scene_modes_cnt == 0) {
9871 supported_scene_modes_cnt = 1;
9872 avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
9873 }
9874
9875 staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
9876 scene_mode_overrides, supported_scene_modes_cnt * 3);
9877
9878 uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
9879 ANDROID_CONTROL_MODE_AUTO,
9880 ANDROID_CONTROL_MODE_USE_SCENE_MODE};
9881 staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
9882 available_control_modes,
9883 3);
9884
9885 uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
9886 size = 0;
9887 count = CAM_ANTIBANDING_MODE_MAX;
9888 count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
9889 for (size_t i = 0; i < count; i++) {
9890 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
9891 gCamCapability[cameraId]->supported_antibandings[i]);
9892 if (NAME_NOT_FOUND != val) {
9893 avail_antibanding_modes[size] = (uint8_t)val;
9894 size++;
9895 }
9896
9897 }
9898 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
9899 avail_antibanding_modes,
9900 size);
9901
9902 uint8_t avail_abberation_modes[] = {
9903 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
9904 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
9905 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
9906 count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
9907 count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
9908 if (0 == count) {
9909 // If no aberration correction modes are available for a device, this advertise OFF mode
9910 size = 1;
9911 } else {
9912 // If count is not zero then atleast one among the FAST or HIGH quality is supported
9913 // So, advertize all 3 modes if atleast any one mode is supported as per the
9914 // new M requirement
9915 size = 3;
9916 }
9917 staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
9918 avail_abberation_modes,
9919 size);
9920
9921 uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
9922 size = 0;
9923 count = CAM_FOCUS_MODE_MAX;
9924 count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
9925 for (size_t i = 0; i < count; i++) {
9926 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
9927 gCamCapability[cameraId]->supported_focus_modes[i]);
9928 if (NAME_NOT_FOUND != val) {
9929 avail_af_modes[size] = (uint8_t)val;
9930 size++;
9931 }
9932 }
9933 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
9934 avail_af_modes,
9935 size);
9936
9937 uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
9938 size = 0;
9939 count = CAM_WB_MODE_MAX;
9940 count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
9941 for (size_t i = 0; i < count; i++) {
9942 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
9943 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
9944 gCamCapability[cameraId]->supported_white_balances[i]);
9945 if (NAME_NOT_FOUND != val) {
9946 avail_awb_modes[size] = (uint8_t)val;
9947 size++;
9948 }
9949 }
9950 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
9951 avail_awb_modes,
9952 size);
9953
9954 uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
9955 count = CAM_FLASH_FIRING_LEVEL_MAX;
9956 count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
9957 count);
9958 for (size_t i = 0; i < count; i++) {
9959 available_flash_levels[i] =
9960 gCamCapability[cameraId]->supported_firing_levels[i];
9961 }
9962 staticInfo.update(ANDROID_FLASH_FIRING_POWER,
9963 available_flash_levels, count);
9964
9965 uint8_t flashAvailable;
9966 if (gCamCapability[cameraId]->flash_available)
9967 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
9968 else
9969 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
9970 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
9971 &flashAvailable, 1);
9972
9973 Vector<uint8_t> avail_ae_modes;
9974 count = CAM_AE_MODE_MAX;
9975 count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
9976 for (size_t i = 0; i < count; i++) {
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08009977 uint8_t aeMode = gCamCapability[cameraId]->supported_ae_modes[i];
9978 if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
9979 aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
9980 }
9981 avail_ae_modes.add(aeMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07009982 }
9983 if (flashAvailable) {
9984 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
9985 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
9986 }
9987 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
9988 avail_ae_modes.array(),
9989 avail_ae_modes.size());
9990
9991 int32_t sensitivity_range[2];
9992 sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
9993 sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
9994 staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
9995 sensitivity_range,
9996 sizeof(sensitivity_range) / sizeof(int32_t));
9997
9998 staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
9999 &gCamCapability[cameraId]->max_analog_sensitivity,
10000 1);
10001
10002 int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
10003 staticInfo.update(ANDROID_SENSOR_ORIENTATION,
10004 &sensor_orientation,
10005 1);
10006
10007 int32_t max_output_streams[] = {
10008 MAX_STALLING_STREAMS,
10009 MAX_PROCESSED_STREAMS,
10010 MAX_RAW_STREAMS};
10011 staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
10012 max_output_streams,
10013 sizeof(max_output_streams)/sizeof(max_output_streams[0]));
10014
10015 uint8_t avail_leds = 0;
10016 staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
10017 &avail_leds, 0);
10018
10019 uint8_t focus_dist_calibrated;
10020 int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
10021 gCamCapability[cameraId]->focus_dist_calibrated);
10022 if (NAME_NOT_FOUND != val) {
10023 focus_dist_calibrated = (uint8_t)val;
10024 staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
10025 &focus_dist_calibrated, 1);
10026 }
10027
10028 int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
10029 size = 0;
10030 count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
10031 MAX_TEST_PATTERN_CNT);
10032 for (size_t i = 0; i < count; i++) {
10033 int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
10034 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
10035 if (NAME_NOT_FOUND != testpatternMode) {
10036 avail_testpattern_modes[size] = testpatternMode;
10037 size++;
10038 }
10039 }
10040 staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
10041 avail_testpattern_modes,
10042 size);
10043
10044 uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
10045 staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
10046 &max_pipeline_depth,
10047 1);
10048
10049 int32_t partial_result_count = PARTIAL_RESULT_COUNT;
10050 staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
10051 &partial_result_count,
10052 1);
10053
10054 int32_t max_stall_duration = MAX_REPROCESS_STALL;
10055 staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
10056
10057 Vector<uint8_t> available_capabilities;
10058 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
10059 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
10060 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
10061 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
10062 if (supportBurst) {
10063 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
10064 }
10065 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
10066 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
10067 if (hfrEnable && available_hfr_configs.array()) {
10068 available_capabilities.add(
10069 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
10070 }
10071
10072 if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
10073 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
10074 }
10075 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
10076 available_capabilities.array(),
10077 available_capabilities.size());
10078
10079 //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
10080 //Assumption is that all bayer cameras support MANUAL_SENSOR.
10081 uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
10082 ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
10083
10084 staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
10085 &aeLockAvailable, 1);
10086
10087 //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
10088 //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
10089 uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
10090 ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
10091
10092 staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
10093 &awbLockAvailable, 1);
10094
10095 int32_t max_input_streams = 1;
10096 staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
10097 &max_input_streams,
10098 1);
10099
10100 /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
10101 int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
10102 HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
10103 HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
10104 HAL_PIXEL_FORMAT_YCbCr_420_888};
10105 staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
10106 io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
10107
10108 int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
10109 staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
10110 &max_latency,
10111 1);
10112
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010113#ifndef USE_HAL_3_3
10114 int32_t isp_sensitivity_range[2];
10115 isp_sensitivity_range[0] =
10116 gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
10117 isp_sensitivity_range[1] =
10118 gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
10119 staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
10120 isp_sensitivity_range,
10121 sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
10122#endif
10123
Thierry Strudel3d639192016-09-09 11:52:26 -070010124 uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
10125 ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
10126 staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
10127 available_hot_pixel_modes,
10128 sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
10129
10130 uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
10131 ANDROID_SHADING_MODE_FAST,
10132 ANDROID_SHADING_MODE_HIGH_QUALITY};
10133 staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
10134 available_shading_modes,
10135 3);
10136
10137 uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
10138 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
10139 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
10140 available_lens_shading_map_modes,
10141 2);
10142
10143 uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
10144 ANDROID_EDGE_MODE_FAST,
10145 ANDROID_EDGE_MODE_HIGH_QUALITY,
10146 ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
10147 staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
10148 available_edge_modes,
10149 sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
10150
10151 uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
10152 ANDROID_NOISE_REDUCTION_MODE_FAST,
10153 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
10154 ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
10155 ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
10156 staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
10157 available_noise_red_modes,
10158 sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
10159
10160 uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
10161 ANDROID_TONEMAP_MODE_FAST,
10162 ANDROID_TONEMAP_MODE_HIGH_QUALITY};
10163 staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
10164 available_tonemap_modes,
10165 sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
10166
10167 uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
10168 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
10169 available_hot_pixel_map_modes,
10170 sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
10171
10172 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
10173 gCamCapability[cameraId]->reference_illuminant1);
10174 if (NAME_NOT_FOUND != val) {
10175 uint8_t fwkReferenceIlluminant = (uint8_t)val;
10176 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
10177 }
10178
10179 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
10180 gCamCapability[cameraId]->reference_illuminant2);
10181 if (NAME_NOT_FOUND != val) {
10182 uint8_t fwkReferenceIlluminant = (uint8_t)val;
10183 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
10184 }
10185
10186 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
10187 (void *)gCamCapability[cameraId]->forward_matrix1,
10188 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
10189
10190 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
10191 (void *)gCamCapability[cameraId]->forward_matrix2,
10192 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
10193
10194 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
10195 (void *)gCamCapability[cameraId]->color_transform1,
10196 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
10197
10198 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
10199 (void *)gCamCapability[cameraId]->color_transform2,
10200 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
10201
10202 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
10203 (void *)gCamCapability[cameraId]->calibration_transform1,
10204 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
10205
10206 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
10207 (void *)gCamCapability[cameraId]->calibration_transform2,
10208 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
10209
10210 int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
10211 ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
10212 ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
10213 ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
10214 ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
10215 ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
10216 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
10217 ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
10218 ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
10219 ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
10220 ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
10221 ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
10222 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
10223 ANDROID_JPEG_GPS_COORDINATES,
10224 ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
10225 ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
10226 ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
10227 ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10228 ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
10229 ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
10230 ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
10231 ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
10232 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
10233 ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010234#ifndef USE_HAL_3_3
10235 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10236#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010237 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010238 ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -070010239 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
10240 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -070010241 ANDROID_BLACK_LEVEL_LOCK, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010242 /* DevCamDebug metadata request_keys_basic */
10243 DEVCAMDEBUG_META_ENABLE,
10244 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010245 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -070010246 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -070010247 TANGO_MODE_DATA_SENSOR_FULLFOV,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -070010248 NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER,
Emilian Peev656e4fa2017-06-02 16:47:04 +010010249 NEXUS_EXPERIMENTAL_2017_PD_DATA_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010250 };
Thierry Strudel3d639192016-09-09 11:52:26 -070010251
10252 size_t request_keys_cnt =
10253 sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
10254 Vector<int32_t> available_request_keys;
10255 available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
10256 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10257 available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
10258 }
10259
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010260 if (gExposeEnableZslKey) {
Chenjie Luo4a761802017-06-13 17:35:54 +000010261 available_request_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
Chien-Yu Chen0a921f92017-08-27 17:25:33 -070010262 available_request_keys.add(NEXUS_EXPERIMENTAL_2017_POSTVIEW);
Chien-Yu Chenb0981e32017-08-28 19:27:35 -070010263 available_request_keys.add(NEXUS_EXPERIMENTAL_2017_CONTINUOUS_ZSL_CAPTURE);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010264 }
10265
Thierry Strudel3d639192016-09-09 11:52:26 -070010266 staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
10267 available_request_keys.array(), available_request_keys.size());
10268
10269 int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
10270 ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
10271 ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
10272 ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
10273 ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
10274 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
10275 ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
10276 ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
10277 ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
10278 ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10279 ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
10280 ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
10281 ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
10282 ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
10283 ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
10284 ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
10285 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010286 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -070010287 ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
10288 ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
10289 ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010290 ANDROID_STATISTICS_FACE_SCORES,
10291#ifndef USE_HAL_3_3
10292 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10293#endif
Shuzhen Wang2abea3d2016-03-31 11:09:27 -070010294 NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Shuzhen Wange763e802016-03-31 10:24:29 -070010295 NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010296 // DevCamDebug metadata result_keys_basic
10297 DEVCAMDEBUG_META_ENABLE,
10298 // DevCamDebug metadata result_keys AF
10299 DEVCAMDEBUG_AF_LENS_POSITION,
10300 DEVCAMDEBUG_AF_TOF_CONFIDENCE,
10301 DEVCAMDEBUG_AF_TOF_DISTANCE,
10302 DEVCAMDEBUG_AF_LUMA,
10303 DEVCAMDEBUG_AF_HAF_STATE,
10304 DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
10305 DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
10306 DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
10307 DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
10308 DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
10309 DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
10310 DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
10311 DEVCAMDEBUG_AF_MONITOR_REFOCUS,
10312 DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
10313 DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
10314 DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
10315 DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
10316 DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
10317 DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
10318 DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
10319 DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
10320 DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
10321 DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
10322 DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
10323 DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
10324 DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
10325 DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
10326 // DevCamDebug metadata result_keys AEC
10327 DEVCAMDEBUG_AEC_TARGET_LUMA,
10328 DEVCAMDEBUG_AEC_COMP_LUMA,
10329 DEVCAMDEBUG_AEC_AVG_LUMA,
10330 DEVCAMDEBUG_AEC_CUR_LUMA,
10331 DEVCAMDEBUG_AEC_LINECOUNT,
10332 DEVCAMDEBUG_AEC_REAL_GAIN,
10333 DEVCAMDEBUG_AEC_EXP_INDEX,
10334 DEVCAMDEBUG_AEC_LUX_IDX,
Samuel Ha34229982017-02-17 13:51:11 -080010335 // DevCamDebug metadata result_keys zzHDR
10336 DEVCAMDEBUG_AEC_L_REAL_GAIN,
10337 DEVCAMDEBUG_AEC_L_LINECOUNT,
10338 DEVCAMDEBUG_AEC_S_REAL_GAIN,
10339 DEVCAMDEBUG_AEC_S_LINECOUNT,
10340 DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
10341 DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
10342 // DevCamDebug metadata result_keys ADRC
10343 DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
10344 DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
10345 DEVCAMDEBUG_AEC_GTM_RATIO,
10346 DEVCAMDEBUG_AEC_LTM_RATIO,
10347 DEVCAMDEBUG_AEC_LA_RATIO,
10348 DEVCAMDEBUG_AEC_GAMMA_RATIO,
Samuel Habdf4fac2017-07-28 17:21:18 -070010349 // DevCamDebug metadata result_keys AEC MOTION
10350 DEVCAMDEBUG_AEC_CAMERA_MOTION_DX,
10351 DEVCAMDEBUG_AEC_CAMERA_MOTION_DY,
10352 DEVCAMDEBUG_AEC_SUBJECT_MOTION,
Samuel Ha68ba5172016-12-15 18:41:12 -080010353 // DevCamDebug metadata result_keys AWB
10354 DEVCAMDEBUG_AWB_R_GAIN,
10355 DEVCAMDEBUG_AWB_G_GAIN,
10356 DEVCAMDEBUG_AWB_B_GAIN,
10357 DEVCAMDEBUG_AWB_CCT,
10358 DEVCAMDEBUG_AWB_DECISION,
10359 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010360 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
10361 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
10362 NEXUS_EXPERIMENTAL_2017_HISTOGRAM,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -070010363 NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE,
Shuzhen Wangc89c77e2017-08-07 15:50:12 -070010364 NEXUS_EXPERIMENTAL_2017_EXP_TIME_BOOST,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010365 };
10366
Thierry Strudel3d639192016-09-09 11:52:26 -070010367 size_t result_keys_cnt =
10368 sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
10369
10370 Vector<int32_t> available_result_keys;
10371 available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
10372 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10373 available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
10374 }
10375 if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
10376 available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
10377 available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
10378 }
10379 if (supportedFaceDetectMode == 1) {
10380 available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
10381 available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
10382 } else if ((supportedFaceDetectMode == 2) ||
10383 (supportedFaceDetectMode == 3)) {
10384 available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
10385 available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
10386 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010387#ifndef USE_HAL_3_3
10388 if (hasBlackRegions) {
10389 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
10390 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
10391 }
10392#endif
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010393
10394 if (gExposeEnableZslKey) {
10395 available_result_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
Chien-Yu Chendaf68892017-08-25 12:56:40 -070010396 available_result_keys.add(NEXUS_EXPERIMENTAL_2017_NEXT_STILL_INTENT_REQUEST_READY);
Chien-Yu Chen0a921f92017-08-27 17:25:33 -070010397 available_result_keys.add(NEXUS_EXPERIMENTAL_2017_POSTVIEW_CONFIG);
10398 available_result_keys.add(NEXUS_EXPERIMENTAL_2017_POSTVIEW_DATA);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010399 }
10400
Thierry Strudel3d639192016-09-09 11:52:26 -070010401 staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10402 available_result_keys.array(), available_result_keys.size());
10403
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010404 int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
Thierry Strudel3d639192016-09-09 11:52:26 -070010405 ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
10406 ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
10407 ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
10408 ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
10409 ANDROID_SCALER_CROPPING_TYPE,
10410 ANDROID_SYNC_MAX_LATENCY,
10411 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
10412 ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
10413 ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
10414 ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
10415 ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
10416 ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
10417 ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
10418 ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
10419 ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
10420 ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
10421 ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
10422 ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
10423 ANDROID_LENS_FACING,
10424 ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
10425 ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
10426 ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10427 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
10428 ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
10429 ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
10430 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
10431 /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
10432 ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
10433 ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
10434 ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
10435 ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
10436 ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
10437 ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
10438 ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
10439 ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
10440 ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
10441 ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
10442 ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
10443 ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010444 ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -070010445 ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
10446 ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
10447 ANDROID_EDGE_AVAILABLE_EDGE_MODES,
10448 ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
10449 ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
10450 ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
10451 ANDROID_TONEMAP_MAX_CURVE_POINTS,
10452 ANDROID_CONTROL_AVAILABLE_MODES,
10453 ANDROID_CONTROL_AE_LOCK_AVAILABLE,
10454 ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
10455 ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
10456 ANDROID_SHADING_AVAILABLE_MODES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010457 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
10458#ifndef USE_HAL_3_3
10459 ANDROID_SENSOR_OPAQUE_RAW_SIZE,
10460 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
10461#endif
10462 };
10463
10464 Vector<int32_t> available_characteristics_keys;
10465 available_characteristics_keys.appendArray(characteristics_keys_basic,
10466 sizeof(characteristics_keys_basic)/sizeof(int32_t));
10467#ifndef USE_HAL_3_3
10468 if (hasBlackRegions) {
10469 available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
10470 }
10471#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +000010472
10473 if (0 <= indexPD) {
10474 int32_t depthKeys[] = {
10475 ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
10476 ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
10477 ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
10478 ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
10479 ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE
10480 };
10481 available_characteristics_keys.appendArray(depthKeys,
10482 sizeof(depthKeys) / sizeof(depthKeys[0]));
10483 }
10484
Thierry Strudel3d639192016-09-09 11:52:26 -070010485 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010486 available_characteristics_keys.array(),
10487 available_characteristics_keys.size());
Thierry Strudel3d639192016-09-09 11:52:26 -070010488
10489 /*available stall durations depend on the hw + sw and will be different for different devices */
10490 /*have to add for raw after implementation*/
10491 int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
10492 size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
10493
10494 Vector<int64_t> available_stall_durations;
10495 for (uint32_t j = 0; j < stall_formats_count; j++) {
10496 if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
10497 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10498 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
10499 available_stall_durations.add(stall_formats[j]);
10500 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
10501 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
10502 available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
10503 }
10504 } else {
10505 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10506 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10507 available_stall_durations.add(stall_formats[j]);
10508 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
10509 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
10510 available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
10511 }
10512 }
10513 }
10514 staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
10515 available_stall_durations.array(),
10516 available_stall_durations.size());
10517
10518 //QCAMERA3_OPAQUE_RAW
10519 uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10520 cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10521 switch (gCamCapability[cameraId]->opaque_raw_fmt) {
10522 case LEGACY_RAW:
10523 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10524 fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
10525 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10526 fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10527 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10528 fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
10529 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10530 break;
10531 case MIPI_RAW:
10532 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10533 fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
10534 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10535 fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
10536 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10537 fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
10538 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
10539 break;
10540 default:
10541 LOGE("unknown opaque_raw_format %d",
10542 gCamCapability[cameraId]->opaque_raw_fmt);
10543 break;
10544 }
10545 staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
10546
10547 Vector<int32_t> strides;
10548 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10549 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10550 cam_stream_buf_plane_info_t buf_planes;
10551 strides.add(gCamCapability[cameraId]->raw_dim[i].width);
10552 strides.add(gCamCapability[cameraId]->raw_dim[i].height);
10553 mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10554 &gCamCapability[cameraId]->padding_info, &buf_planes);
10555 strides.add(buf_planes.plane_info.mp[0].stride);
10556 }
10557 staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
10558 strides.size());
10559
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010560 //TBD: remove the following line once backend advertises zzHDR in feature mask
10561 gCamCapability[cameraId]->qcom_supported_feature_mask |= CAM_QCOM_FEATURE_ZIGZAG_HDR;
Thierry Strudel04e026f2016-10-10 11:27:36 -070010562 //Video HDR default
10563 if ((gCamCapability[cameraId]->qcom_supported_feature_mask) &
10564 (CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR |
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010565 CAM_QCOM_FEATURE_ZIGZAG_HDR | CAM_QCOM_FEATURE_SENSOR_HDR)) {
Thierry Strudel04e026f2016-10-10 11:27:36 -070010566 int32_t vhdr_mode[] = {
10567 QCAMERA3_VIDEO_HDR_MODE_OFF,
10568 QCAMERA3_VIDEO_HDR_MODE_ON};
10569
10570 size_t vhdr_mode_count = sizeof(vhdr_mode) / sizeof(int32_t);
10571 staticInfo.update(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES,
10572 vhdr_mode, vhdr_mode_count);
10573 }
10574
Thierry Strudel3d639192016-09-09 11:52:26 -070010575 staticInfo.update(QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
10576 (const uint8_t*)&gCamCapability[cameraId]->related_cam_calibration,
10577 sizeof(gCamCapability[cameraId]->related_cam_calibration));
10578
10579 uint8_t isMonoOnly =
10580 (gCamCapability[cameraId]->color_arrangement == CAM_FILTER_ARRANGEMENT_Y);
10581 staticInfo.update(QCAMERA3_SENSOR_IS_MONO_ONLY,
10582 &isMonoOnly, 1);
10583
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010584#ifndef USE_HAL_3_3
10585 Vector<int32_t> opaque_size;
10586 for (size_t j = 0; j < scalar_formats_count; j++) {
10587 if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
10588 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10589 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10590 cam_stream_buf_plane_info_t buf_planes;
10591
10592 rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10593 &gCamCapability[cameraId]->padding_info, &buf_planes);
10594
10595 if (rc == 0) {
10596 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
10597 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
10598 opaque_size.add(buf_planes.plane_info.frame_len);
10599 }else {
10600 LOGE("raw frame calculation failed!");
10601 }
10602 }
10603 }
10604 }
10605
10606 if ((opaque_size.size() > 0) &&
10607 (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
10608 staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
10609 else
10610 LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
10611#endif
10612
Thierry Strudel04e026f2016-10-10 11:27:36 -070010613 if (gCamCapability[cameraId]->supported_ir_mode_cnt > 0) {
10614 int32_t avail_ir_modes[CAM_IR_MODE_MAX];
10615 size = 0;
10616 count = CAM_IR_MODE_MAX;
10617 count = MIN(gCamCapability[cameraId]->supported_ir_mode_cnt, count);
10618 for (size_t i = 0; i < count; i++) {
10619 int val = lookupFwkName(IR_MODES_MAP, METADATA_MAP_SIZE(IR_MODES_MAP),
10620 gCamCapability[cameraId]->supported_ir_modes[i]);
10621 if (NAME_NOT_FOUND != val) {
10622 avail_ir_modes[size] = (int32_t)val;
10623 size++;
10624 }
10625 }
10626 staticInfo.update(QCAMERA3_IR_AVAILABLE_MODES,
10627 avail_ir_modes, size);
10628 }
10629
Thierry Strudel295a0ca2016-11-03 18:38:47 -070010630 if (gCamCapability[cameraId]->supported_instant_aec_modes_cnt > 0) {
10631 int32_t available_instant_aec_modes[CAM_AEC_CONVERGENCE_MAX];
10632 size = 0;
10633 count = CAM_AEC_CONVERGENCE_MAX;
10634 count = MIN(gCamCapability[cameraId]->supported_instant_aec_modes_cnt, count);
10635 for (size_t i = 0; i < count; i++) {
10636 int val = lookupFwkName(INSTANT_AEC_MODES_MAP, METADATA_MAP_SIZE(INSTANT_AEC_MODES_MAP),
10637 gCamCapability[cameraId]->supported_instant_aec_modes[i]);
10638 if (NAME_NOT_FOUND != val) {
10639 available_instant_aec_modes[size] = (int32_t)val;
10640 size++;
10641 }
10642 }
10643 staticInfo.update(QCAMERA3_INSTANT_AEC_AVAILABLE_MODES,
10644 available_instant_aec_modes, size);
10645 }
10646
Thierry Strudel54dc9782017-02-15 12:12:10 -080010647 int32_t sharpness_range[] = {
10648 gCamCapability[cameraId]->sharpness_ctrl.min_value,
10649 gCamCapability[cameraId]->sharpness_ctrl.max_value};
10650 staticInfo.update(QCAMERA3_SHARPNESS_RANGE, sharpness_range, 2);
10651
10652 if (gCamCapability[cameraId]->supported_binning_correction_mode_cnt > 0) {
10653 int32_t avail_binning_modes[CAM_BINNING_CORRECTION_MODE_MAX];
10654 size = 0;
10655 count = CAM_BINNING_CORRECTION_MODE_MAX;
10656 count = MIN(gCamCapability[cameraId]->supported_binning_correction_mode_cnt, count);
10657 for (size_t i = 0; i < count; i++) {
10658 int val = lookupFwkName(BINNING_CORRECTION_MODES_MAP,
10659 METADATA_MAP_SIZE(BINNING_CORRECTION_MODES_MAP),
10660 gCamCapability[cameraId]->supported_binning_modes[i]);
10661 if (NAME_NOT_FOUND != val) {
10662 avail_binning_modes[size] = (int32_t)val;
10663 size++;
10664 }
10665 }
10666 staticInfo.update(QCAMERA3_AVAILABLE_BINNING_CORRECTION_MODES,
10667 avail_binning_modes, size);
10668 }
10669
10670 if (gCamCapability[cameraId]->supported_aec_modes_cnt > 0) {
10671 int32_t available_aec_modes[CAM_AEC_MODE_MAX];
10672 size = 0;
10673 count = MIN(gCamCapability[cameraId]->supported_aec_modes_cnt, CAM_AEC_MODE_MAX);
10674 for (size_t i = 0; i < count; i++) {
10675 int32_t val = lookupFwkName(AEC_MODES_MAP, METADATA_MAP_SIZE(AEC_MODES_MAP),
10676 gCamCapability[cameraId]->supported_aec_modes[i]);
10677 if (NAME_NOT_FOUND != val)
10678 available_aec_modes[size++] = val;
10679 }
10680 staticInfo.update(QCAMERA3_EXPOSURE_METER_AVAILABLE_MODES,
10681 available_aec_modes, size);
10682 }
10683
10684 if (gCamCapability[cameraId]->supported_iso_modes_cnt > 0) {
10685 int32_t available_iso_modes[CAM_ISO_MODE_MAX];
10686 size = 0;
10687 count = MIN(gCamCapability[cameraId]->supported_iso_modes_cnt, CAM_ISO_MODE_MAX);
10688 for (size_t i = 0; i < count; i++) {
10689 int32_t val = lookupFwkName(ISO_MODES_MAP, METADATA_MAP_SIZE(ISO_MODES_MAP),
10690 gCamCapability[cameraId]->supported_iso_modes[i]);
10691 if (NAME_NOT_FOUND != val)
10692 available_iso_modes[size++] = val;
10693 }
10694 staticInfo.update(QCAMERA3_ISO_AVAILABLE_MODES,
10695 available_iso_modes, size);
10696 }
10697
10698 int64_t available_exp_time_range[EXPOSURE_TIME_RANGE_CNT];
Jason Lee805955a2017-05-04 10:29:14 -070010699 for (size_t i = 0; i < EXPOSURE_TIME_RANGE_CNT; i++)
Thierry Strudel54dc9782017-02-15 12:12:10 -080010700 available_exp_time_range[i] = gCamCapability[cameraId]->exposure_time_range[i];
10701 staticInfo.update(QCAMERA3_EXP_TIME_RANGE,
10702 available_exp_time_range, EXPOSURE_TIME_RANGE_CNT);
10703
10704 int32_t available_saturation_range[4];
10705 available_saturation_range[0] = gCamCapability[cameraId]->saturation_ctrl.min_value;
10706 available_saturation_range[1] = gCamCapability[cameraId]->saturation_ctrl.max_value;
10707 available_saturation_range[2] = gCamCapability[cameraId]->saturation_ctrl.def_value;
10708 available_saturation_range[3] = gCamCapability[cameraId]->saturation_ctrl.step;
10709 staticInfo.update(QCAMERA3_SATURATION_RANGE,
10710 available_saturation_range, 4);
10711
10712 uint8_t is_hdr_values[2];
10713 is_hdr_values[0] = 0;
10714 is_hdr_values[1] = 1;
10715 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_VALUES,
10716 is_hdr_values, 2);
10717
10718 float is_hdr_confidence_range[2];
10719 is_hdr_confidence_range[0] = 0.0;
10720 is_hdr_confidence_range[1] = 1.0;
10721 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE_RANGE,
10722 is_hdr_confidence_range, 2);
10723
Emilian Peev0a972ef2017-03-16 10:25:53 +000010724 size_t eepromLength = strnlen(
10725 reinterpret_cast<const char *>(
10726 gCamCapability[cameraId]->eeprom_version_info),
10727 sizeof(gCamCapability[cameraId]->eeprom_version_info));
10728 if (0 < eepromLength) {
Zhijun Hea557c4c2017-03-16 18:37:53 -070010729 char easelInfo[] = ",E:N";
10730 char *eepromInfo = reinterpret_cast<char *>(gCamCapability[cameraId]->eeprom_version_info);
10731 if (eepromLength + sizeof(easelInfo) < MAX_EEPROM_VERSION_INFO_LEN) {
10732 eepromLength += sizeof(easelInfo);
Chien-Yu Chend77a5462017-06-02 18:00:38 -070010733 strlcat(eepromInfo, ((gEaselManagerClient != nullptr &&
Arnd Geis082a4d72017-08-24 10:33:07 -070010734 gEaselManagerClient->isEaselPresentOnDevice()) ? ",E-ver" : ",E:N"),
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010735 MAX_EEPROM_VERSION_INFO_LEN);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010736 }
Emilian Peev0a972ef2017-03-16 10:25:53 +000010737 staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_VERSION_INFO,
10738 gCamCapability[cameraId]->eeprom_version_info, eepromLength);
10739 }
10740
Thierry Strudel3d639192016-09-09 11:52:26 -070010741 gStaticMetadata[cameraId] = staticInfo.release();
10742 return rc;
10743}
10744
10745/*===========================================================================
10746 * FUNCTION : makeTable
10747 *
10748 * DESCRIPTION: make a table of sizes
10749 *
10750 * PARAMETERS :
10751 *
10752 *
10753 *==========================================================================*/
10754void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
10755 size_t max_size, int32_t *sizeTable)
10756{
10757 size_t j = 0;
10758 if (size > max_size) {
10759 size = max_size;
10760 }
10761 for (size_t i = 0; i < size; i++) {
10762 sizeTable[j] = dimTable[i].width;
10763 sizeTable[j+1] = dimTable[i].height;
10764 j+=2;
10765 }
10766}
10767
10768/*===========================================================================
10769 * FUNCTION : makeFPSTable
10770 *
10771 * DESCRIPTION: make a table of fps ranges
10772 *
10773 * PARAMETERS :
10774 *
10775 *==========================================================================*/
10776void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
10777 size_t max_size, int32_t *fpsRangesTable)
10778{
10779 size_t j = 0;
10780 if (size > max_size) {
10781 size = max_size;
10782 }
10783 for (size_t i = 0; i < size; i++) {
10784 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
10785 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
10786 j+=2;
10787 }
10788}
10789
10790/*===========================================================================
10791 * FUNCTION : makeOverridesList
10792 *
10793 * DESCRIPTION: make a list of scene mode overrides
10794 *
10795 * PARAMETERS :
10796 *
10797 *
10798 *==========================================================================*/
10799void QCamera3HardwareInterface::makeOverridesList(
10800 cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
10801 uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
10802{
10803 /*daemon will give a list of overrides for all scene modes.
10804 However we should send the fwk only the overrides for the scene modes
10805 supported by the framework*/
10806 size_t j = 0;
10807 if (size > max_size) {
10808 size = max_size;
10809 }
10810 size_t focus_count = CAM_FOCUS_MODE_MAX;
10811 focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
10812 focus_count);
10813 for (size_t i = 0; i < size; i++) {
10814 bool supt = false;
10815 size_t index = supported_indexes[i];
10816 overridesList[j] = gCamCapability[camera_id]->flash_available ?
10817 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
10818 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
10819 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
10820 overridesTable[index].awb_mode);
10821 if (NAME_NOT_FOUND != val) {
10822 overridesList[j+1] = (uint8_t)val;
10823 }
10824 uint8_t focus_override = overridesTable[index].af_mode;
10825 for (size_t k = 0; k < focus_count; k++) {
10826 if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
10827 supt = true;
10828 break;
10829 }
10830 }
10831 if (supt) {
10832 val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
10833 focus_override);
10834 if (NAME_NOT_FOUND != val) {
10835 overridesList[j+2] = (uint8_t)val;
10836 }
10837 } else {
10838 overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
10839 }
10840 j+=3;
10841 }
10842}
10843
10844/*===========================================================================
10845 * FUNCTION : filterJpegSizes
10846 *
10847 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
10848 * could be downscaled to
10849 *
10850 * PARAMETERS :
10851 *
10852 * RETURN : length of jpegSizes array
10853 *==========================================================================*/
10854
10855size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
10856 size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
10857 uint8_t downscale_factor)
10858{
10859 if (0 == downscale_factor) {
10860 downscale_factor = 1;
10861 }
10862
10863 int32_t min_width = active_array_size.width / downscale_factor;
10864 int32_t min_height = active_array_size.height / downscale_factor;
10865 size_t jpegSizesCnt = 0;
10866 if (processedSizesCnt > maxCount) {
10867 processedSizesCnt = maxCount;
10868 }
10869 for (size_t i = 0; i < processedSizesCnt; i+=2) {
10870 if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
10871 jpegSizes[jpegSizesCnt] = processedSizes[i];
10872 jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
10873 jpegSizesCnt += 2;
10874 }
10875 }
10876 return jpegSizesCnt;
10877}
10878
10879/*===========================================================================
10880 * FUNCTION : computeNoiseModelEntryS
10881 *
10882 * DESCRIPTION: function to map a given sensitivity to the S noise
10883 * model parameters in the DNG noise model.
10884 *
10885 * PARAMETERS : sens : the sensor sensitivity
10886 *
10887 ** RETURN : S (sensor amplification) noise
10888 *
10889 *==========================================================================*/
10890double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
10891 double s = gCamCapability[mCameraId]->gradient_S * sens +
10892 gCamCapability[mCameraId]->offset_S;
10893 return ((s < 0.0) ? 0.0 : s);
10894}
10895
10896/*===========================================================================
10897 * FUNCTION : computeNoiseModelEntryO
10898 *
10899 * DESCRIPTION: function to map a given sensitivity to the O noise
10900 * model parameters in the DNG noise model.
10901 *
10902 * PARAMETERS : sens : the sensor sensitivity
10903 *
10904 ** RETURN : O (sensor readout) noise
10905 *
10906 *==========================================================================*/
10907double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
10908 int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
10909 double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
10910 1.0 : (1.0 * sens / max_analog_sens);
10911 double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
10912 gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
10913 return ((o < 0.0) ? 0.0 : o);
10914}
10915
10916/*===========================================================================
10917 * FUNCTION : getSensorSensitivity
10918 *
10919 * DESCRIPTION: convert iso_mode to an integer value
10920 *
10921 * PARAMETERS : iso_mode : the iso_mode supported by sensor
10922 *
10923 ** RETURN : sensitivity supported by sensor
10924 *
10925 *==========================================================================*/
10926int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
10927{
10928 int32_t sensitivity;
10929
10930 switch (iso_mode) {
10931 case CAM_ISO_MODE_100:
10932 sensitivity = 100;
10933 break;
10934 case CAM_ISO_MODE_200:
10935 sensitivity = 200;
10936 break;
10937 case CAM_ISO_MODE_400:
10938 sensitivity = 400;
10939 break;
10940 case CAM_ISO_MODE_800:
10941 sensitivity = 800;
10942 break;
10943 case CAM_ISO_MODE_1600:
10944 sensitivity = 1600;
10945 break;
10946 default:
10947 sensitivity = -1;
10948 break;
10949 }
10950 return sensitivity;
10951}
10952
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010953int QCamera3HardwareInterface::initHdrPlusClientLocked() {
Chien-Yu Chend77a5462017-06-02 18:00:38 -070010954 if (gEaselManagerClient == nullptr) {
10955 gEaselManagerClient = EaselManagerClient::create();
10956 if (gEaselManagerClient == nullptr) {
10957 ALOGE("%s: Failed to create Easel manager client.", __FUNCTION__);
10958 return -ENODEV;
10959 }
10960 }
10961
10962 if (!EaselManagerClientOpened && gEaselManagerClient->isEaselPresentOnDevice()) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010963 // Check if HAL should not power on Easel even if it's present. This is to allow HDR+ tests
10964 // to connect to Easel.
10965 bool doNotpowerOnEasel =
10966 property_get_bool("camera.hdrplus.donotpoweroneasel", false);
10967
10968 if (doNotpowerOnEasel) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070010969 ALOGI("%s: Easel is present but not powered on.", __FUNCTION__);
10970 return OK;
10971 }
10972
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010973 // If Easel is present, power on Easel and suspend it immediately.
Chien-Yu Chend77a5462017-06-02 18:00:38 -070010974 status_t res = gEaselManagerClient->open();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010975 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010976 ALOGE("%s: Opening Easel manager client failed: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010977 return res;
10978 }
10979
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010980 EaselManagerClientOpened = true;
10981
Chien-Yu Chend77a5462017-06-02 18:00:38 -070010982 res = gEaselManagerClient->suspend();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010983 if (res != OK) {
10984 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-res), res);
10985 }
10986
Chien-Yu Chen4d752e32017-06-07 12:13:24 -070010987 gEaselBypassOnly = !property_get_bool("persist.camera.hdrplus.enable", true);
Chien-Yu Chen509314b2017-04-07 15:27:55 -070010988 gEaselProfilingEnabled = property_get_bool("persist.camera.hdrplus.profiling", false);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010989
10990 // Expose enableZsl key only when HDR+ mode is enabled.
10991 gExposeEnableZslKey = !gEaselBypassOnly;
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080010992 }
10993
10994 return OK;
10995}
10996
Thierry Strudel3d639192016-09-09 11:52:26 -070010997/*===========================================================================
10998 * FUNCTION : getCamInfo
10999 *
11000 * DESCRIPTION: query camera capabilities
11001 *
11002 * PARAMETERS :
11003 * @cameraId : camera Id
11004 * @info : camera info struct to be filled in with camera capabilities
11005 *
11006 * RETURN : int type of status
11007 * NO_ERROR -- success
11008 * none-zero failure code
11009 *==========================================================================*/
11010int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
11011 struct camera_info *info)
11012{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011013 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_GET_CAM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070011014 int rc = 0;
11015
11016 pthread_mutex_lock(&gCamLock);
Zhijun Hea557c4c2017-03-16 18:37:53 -070011017
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070011018 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070011019 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070011020 rc = initHdrPlusClientLocked();
11021 if (rc != OK) {
11022 ALOGE("%s: initHdrPlusClientLocked failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
11023 pthread_mutex_unlock(&gCamLock);
11024 return rc;
11025 }
Zhijun Hea557c4c2017-03-16 18:37:53 -070011026 }
11027
Thierry Strudel3d639192016-09-09 11:52:26 -070011028 if (NULL == gCamCapability[cameraId]) {
11029 rc = initCapabilities(cameraId);
11030 if (rc < 0) {
11031 pthread_mutex_unlock(&gCamLock);
11032 return rc;
11033 }
11034 }
11035
11036 if (NULL == gStaticMetadata[cameraId]) {
11037 rc = initStaticMetadata(cameraId);
11038 if (rc < 0) {
11039 pthread_mutex_unlock(&gCamLock);
11040 return rc;
11041 }
11042 }
11043
11044 switch(gCamCapability[cameraId]->position) {
11045 case CAM_POSITION_BACK:
11046 case CAM_POSITION_BACK_AUX:
11047 info->facing = CAMERA_FACING_BACK;
11048 break;
11049
11050 case CAM_POSITION_FRONT:
11051 case CAM_POSITION_FRONT_AUX:
11052 info->facing = CAMERA_FACING_FRONT;
11053 break;
11054
11055 default:
11056 LOGE("Unknown position type %d for camera id:%d",
11057 gCamCapability[cameraId]->position, cameraId);
11058 rc = -1;
11059 break;
11060 }
11061
11062
11063 info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011064#ifndef USE_HAL_3_3
11065 info->device_version = CAMERA_DEVICE_API_VERSION_3_4;
11066#else
Thierry Strudel3d639192016-09-09 11:52:26 -070011067 info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011068#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070011069 info->static_camera_characteristics = gStaticMetadata[cameraId];
11070
11071 //For now assume both cameras can operate independently.
11072 info->conflicting_devices = NULL;
11073 info->conflicting_devices_length = 0;
11074
11075 //resource cost is 100 * MIN(1.0, m/M),
11076 //where m is throughput requirement with maximum stream configuration
11077 //and M is CPP maximum throughput.
11078 float max_fps = 0.0;
11079 for (uint32_t i = 0;
11080 i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
11081 if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
11082 max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
11083 }
11084 float ratio = 1.0 * MAX_PROCESSED_STREAMS *
11085 gCamCapability[cameraId]->active_array_size.width *
11086 gCamCapability[cameraId]->active_array_size.height * max_fps /
11087 gCamCapability[cameraId]->max_pixel_bandwidth;
11088 info->resource_cost = 100 * MIN(1.0, ratio);
11089 LOGI("camera %d resource cost is %d", cameraId,
11090 info->resource_cost);
11091
11092 pthread_mutex_unlock(&gCamLock);
11093 return rc;
11094}
11095
11096/*===========================================================================
11097 * FUNCTION : translateCapabilityToMetadata
11098 *
11099 * DESCRIPTION: translate the capability into camera_metadata_t
11100 *
11101 * PARAMETERS : type of the request
11102 *
11103 *
11104 * RETURN : success: camera_metadata_t*
11105 * failure: NULL
11106 *
11107 *==========================================================================*/
11108camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
11109{
11110 if (mDefaultMetadata[type] != NULL) {
11111 return mDefaultMetadata[type];
11112 }
11113 //first time we are handling this request
11114 //fill up the metadata structure using the wrapper class
11115 CameraMetadata settings;
11116 //translate from cam_capability_t to camera_metadata_tag_t
11117 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
11118 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
11119 int32_t defaultRequestID = 0;
11120 settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
11121
11122 /* OIS disable */
11123 char ois_prop[PROPERTY_VALUE_MAX];
11124 memset(ois_prop, 0, sizeof(ois_prop));
11125 property_get("persist.camera.ois.disable", ois_prop, "0");
11126 uint8_t ois_disable = (uint8_t)atoi(ois_prop);
11127
11128 /* Force video to use OIS */
11129 char videoOisProp[PROPERTY_VALUE_MAX];
11130 memset(videoOisProp, 0, sizeof(videoOisProp));
11131 property_get("persist.camera.ois.video", videoOisProp, "1");
11132 uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
Shuzhen Wang19463d72016-03-08 11:09:52 -080011133
11134 // Hybrid AE enable/disable
11135 char hybrid_ae_prop[PROPERTY_VALUE_MAX];
11136 memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
11137 property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
Shuzhen Wang77b049a2017-08-30 12:24:36 -070011138 uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
Shuzhen Wang19463d72016-03-08 11:09:52 -080011139
Thierry Strudel3d639192016-09-09 11:52:26 -070011140 uint8_t controlIntent = 0;
11141 uint8_t focusMode;
11142 uint8_t vsMode;
11143 uint8_t optStabMode;
11144 uint8_t cacMode;
11145 uint8_t edge_mode;
11146 uint8_t noise_red_mode;
11147 uint8_t tonemap_mode;
11148 bool highQualityModeEntryAvailable = FALSE;
11149 bool fastModeEntryAvailable = FALSE;
Shuzhen Wang14415f52016-11-16 18:26:18 -080011150 uint8_t histogramEnable = false;
Thierry Strudel3d639192016-09-09 11:52:26 -070011151 vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
11152 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011153 uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
Shuzhen Wangcc386c52017-03-29 09:28:08 -070011154 uint8_t trackingAfTrigger = NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER_IDLE;
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011155 uint8_t enableZsl = ANDROID_CONTROL_ENABLE_ZSL_FALSE;
Mansoor Aftabea39eba2017-01-26 14:58:25 -080011156
Thierry Strudel3d639192016-09-09 11:52:26 -070011157 switch (type) {
11158 case CAMERA3_TEMPLATE_PREVIEW:
11159 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
11160 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11161 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11162 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11163 edge_mode = ANDROID_EDGE_MODE_FAST;
11164 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11165 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11166 break;
11167 case CAMERA3_TEMPLATE_STILL_CAPTURE:
11168 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
11169 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11170 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11171 edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
11172 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
11173 tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
11174 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
11175 // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
11176 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
11177 if (gCamCapability[mCameraId]->aberration_modes[i] ==
11178 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
11179 highQualityModeEntryAvailable = TRUE;
11180 } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
11181 CAM_COLOR_CORRECTION_ABERRATION_FAST) {
11182 fastModeEntryAvailable = TRUE;
11183 }
11184 }
11185 if (highQualityModeEntryAvailable) {
11186 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
11187 } else if (fastModeEntryAvailable) {
11188 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11189 }
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011190 if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
11191 shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
11192 }
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011193 enableZsl = ANDROID_CONTROL_ENABLE_ZSL_TRUE;
Thierry Strudel3d639192016-09-09 11:52:26 -070011194 break;
11195 case CAMERA3_TEMPLATE_VIDEO_RECORD:
11196 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
11197 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
11198 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070011199 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11200 edge_mode = ANDROID_EDGE_MODE_FAST;
11201 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11202 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11203 if (forceVideoOis)
11204 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11205 break;
11206 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
11207 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
11208 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
11209 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070011210 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11211 edge_mode = ANDROID_EDGE_MODE_FAST;
11212 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11213 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11214 if (forceVideoOis)
11215 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11216 break;
11217 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
11218 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
11219 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11220 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11221 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11222 edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
11223 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
11224 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11225 break;
11226 case CAMERA3_TEMPLATE_MANUAL:
11227 edge_mode = ANDROID_EDGE_MODE_FAST;
11228 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11229 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11230 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11231 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
11232 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
11233 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11234 break;
11235 default:
11236 edge_mode = ANDROID_EDGE_MODE_FAST;
11237 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11238 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11239 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11240 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
11241 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11242 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11243 break;
11244 }
Thierry Strudel04e026f2016-10-10 11:27:36 -070011245 // Set CAC to OFF if underlying device doesn't support
11246 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
11247 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
11248 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011249 settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
11250 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
11251 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
11252 if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
11253 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
11254 }
11255 settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -080011256 settings.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &histogramEnable, 1);
Shuzhen Wangcc386c52017-03-29 09:28:08 -070011257 settings.update(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER, &trackingAfTrigger, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011258
11259 if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
11260 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
11261 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11262 else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
11263 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
11264 || ois_disable)
11265 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11266 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011267 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011268
11269 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
11270 &gCamCapability[mCameraId]->exposure_compensation_default, 1);
11271
11272 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
11273 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
11274
11275 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
11276 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
11277
11278 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
11279 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
11280
11281 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
11282 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
11283
11284 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
11285 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
11286
11287 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
11288 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
11289
11290 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
11291 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
11292
11293 /*flash*/
11294 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
11295 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
11296
11297 static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
11298 settings.update(ANDROID_FLASH_FIRING_POWER,
11299 &flashFiringLevel, 1);
11300
11301 /* lens */
11302 float default_aperture = gCamCapability[mCameraId]->apertures[0];
11303 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
11304
11305 if (gCamCapability[mCameraId]->filter_densities_count) {
11306 float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
11307 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
11308 gCamCapability[mCameraId]->filter_densities_count);
11309 }
11310
11311 float default_focal_length = gCamCapability[mCameraId]->focal_length;
11312 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
11313
Thierry Strudel3d639192016-09-09 11:52:26 -070011314 static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
11315 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
11316
11317 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
11318 settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
11319
11320 static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
11321 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
11322
11323 /* face detection (default to OFF) */
11324 static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
11325 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
11326
Thierry Strudel54dc9782017-02-15 12:12:10 -080011327 static const uint8_t histogramMode = QCAMERA3_HISTOGRAM_MODE_OFF;
11328 settings.update(QCAMERA3_HISTOGRAM_MODE, &histogramMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011329
11330 static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
11331 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
11332
11333 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
11334 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
11335
Thierry Strudel3d639192016-09-09 11:52:26 -070011336
11337 static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11338 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
11339
11340 /* Exposure time(Update the Min Exposure Time)*/
11341 int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
11342 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
11343
11344 /* frame duration */
11345 static const int64_t default_frame_duration = NSEC_PER_33MSEC;
11346 settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
11347
11348 /* sensitivity */
11349 static const int32_t default_sensitivity = 100;
11350 settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011351#ifndef USE_HAL_3_3
11352 static const int32_t default_isp_sensitivity =
11353 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
11354 settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
11355#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070011356
11357 /*edge mode*/
11358 settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
11359
11360 /*noise reduction mode*/
11361 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
11362
11363 /*color correction mode*/
11364 static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
11365 settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
11366
11367 /*transform matrix mode*/
11368 settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
11369
11370 int32_t scaler_crop_region[4];
11371 scaler_crop_region[0] = 0;
11372 scaler_crop_region[1] = 0;
11373 scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
11374 scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
11375 settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
11376
11377 static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
11378 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
11379
11380 /*focus distance*/
11381 float focus_distance = 0.0;
11382 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
11383
11384 /*target fps range: use maximum range for picture, and maximum fixed range for video*/
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011385 /* Restrict template max_fps to 30 */
Thierry Strudel3d639192016-09-09 11:52:26 -070011386 float max_range = 0.0;
11387 float max_fixed_fps = 0.0;
11388 int32_t fps_range[2] = {0, 0};
11389 for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
11390 i++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011391 if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
11392 TEMPLATE_MAX_PREVIEW_FPS) {
11393 continue;
11394 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011395 float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
11396 gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11397 if (type == CAMERA3_TEMPLATE_PREVIEW ||
11398 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
11399 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
11400 if (range > max_range) {
11401 fps_range[0] =
11402 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11403 fps_range[1] =
11404 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11405 max_range = range;
11406 }
11407 } else {
11408 if (range < 0.01 && max_fixed_fps <
11409 gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
11410 fps_range[0] =
11411 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11412 fps_range[1] =
11413 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11414 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11415 }
11416 }
11417 }
11418 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
11419
11420 /*precapture trigger*/
11421 uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
11422 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
11423
11424 /*af trigger*/
11425 uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
11426 settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
11427
11428 /* ae & af regions */
11429 int32_t active_region[] = {
11430 gCamCapability[mCameraId]->active_array_size.left,
11431 gCamCapability[mCameraId]->active_array_size.top,
11432 gCamCapability[mCameraId]->active_array_size.left +
11433 gCamCapability[mCameraId]->active_array_size.width,
11434 gCamCapability[mCameraId]->active_array_size.top +
11435 gCamCapability[mCameraId]->active_array_size.height,
11436 0};
11437 settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
11438 sizeof(active_region) / sizeof(active_region[0]));
11439 settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
11440 sizeof(active_region) / sizeof(active_region[0]));
11441
11442 /* black level lock */
11443 uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11444 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
11445
Thierry Strudel3d639192016-09-09 11:52:26 -070011446 //special defaults for manual template
11447 if (type == CAMERA3_TEMPLATE_MANUAL) {
11448 static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
11449 settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
11450
11451 static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
11452 settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
11453
11454 static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
11455 settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
11456
11457 static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
11458 settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
11459
11460 static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
11461 settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
11462
11463 static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
11464 settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
11465 }
11466
11467
11468 /* TNR
11469 * We'll use this location to determine which modes TNR will be set.
11470 * We will enable TNR to be on if either of the Preview/Video stream requires TNR
11471 * This is not to be confused with linking on a per stream basis that decision
11472 * is still on per-session basis and will be handled as part of config stream
11473 */
11474 uint8_t tnr_enable = 0;
11475
11476 if (m_bTnrPreview || m_bTnrVideo) {
11477
11478 switch (type) {
11479 case CAMERA3_TEMPLATE_VIDEO_RECORD:
11480 tnr_enable = 1;
11481 break;
11482
11483 default:
11484 tnr_enable = 0;
11485 break;
11486 }
11487
11488 int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
11489 settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
11490 settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
11491
11492 LOGD("TNR:%d with process plate %d for template:%d",
11493 tnr_enable, tnr_process_type, type);
11494 }
11495
11496 //Update Link tags to default
Shuzhen Wang920ea402017-05-03 08:49:39 -070011497 uint8_t sync_type = CAM_TYPE_STANDALONE;
Thierry Strudel3d639192016-09-09 11:52:26 -070011498 settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
11499
Chien-Yu Chena3bbdc02017-05-05 11:31:47 -070011500 uint8_t is_main = 1;
Thierry Strudel3d639192016-09-09 11:52:26 -070011501 settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
11502
Shuzhen Wang920ea402017-05-03 08:49:39 -070011503 uint8_t related_camera_id = mCameraId;
11504 settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &related_camera_id, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011505
11506 /* CDS default */
11507 char prop[PROPERTY_VALUE_MAX];
11508 memset(prop, 0, sizeof(prop));
11509 property_get("persist.camera.CDS", prop, "Auto");
11510 cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
11511 cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
11512 if (CAM_CDS_MODE_MAX == cds_mode) {
11513 cds_mode = CAM_CDS_MODE_AUTO;
11514 }
11515
11516 /* Disabling CDS in templates which have TNR enabled*/
11517 if (tnr_enable)
11518 cds_mode = CAM_CDS_MODE_OFF;
11519
11520 int32_t mode = cds_mode;
11521 settings.update(QCAMERA3_CDS_MODE, &mode, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -070011522
Thierry Strudel269c81a2016-10-12 12:13:59 -070011523 /* Manual Convergence AEC Speed is disabled by default*/
11524 float default_aec_speed = 0;
11525 settings.update(QCAMERA3_AEC_CONVERGENCE_SPEED, &default_aec_speed, 1);
11526
11527 /* Manual Convergence AWB Speed is disabled by default*/
11528 float default_awb_speed = 0;
11529 settings.update(QCAMERA3_AWB_CONVERGENCE_SPEED, &default_awb_speed, 1);
11530
Thierry Strudel295a0ca2016-11-03 18:38:47 -070011531 // Set instant AEC to normal convergence by default
11532 int32_t instant_aec_mode = (int32_t)QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE;
11533 settings.update(QCAMERA3_INSTANT_AEC_MODE, &instant_aec_mode, 1);
11534
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011535 if (gExposeEnableZslKey) {
11536 settings.update(ANDROID_CONTROL_ENABLE_ZSL, &enableZsl, 1);
Chien-Yu Chen0a921f92017-08-27 17:25:33 -070011537 int32_t postview = 0;
11538 settings.update(NEXUS_EXPERIMENTAL_2017_POSTVIEW, &postview, 1);
Chien-Yu Chenb0981e32017-08-28 19:27:35 -070011539 int32_t continuousZslCapture = 0;
11540 settings.update(NEXUS_EXPERIMENTAL_2017_CONTINUOUS_ZSL_CAPTURE, &continuousZslCapture, 1);
Shuzhen Wang77b049a2017-08-30 12:24:36 -070011541 // Set hybrid_ae tag in PREVIEW and STILL_CAPTURE templates to 1 so that
11542 // hybrid ae is enabled for 3rd party app HDR+.
11543 if (type == CAMERA3_TEMPLATE_PREVIEW ||
11544 type == CAMERA3_TEMPLATE_STILL_CAPTURE) {
11545 hybrid_ae = 1;
11546 }
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011547 }
Shuzhen Wang77b049a2017-08-30 12:24:36 -070011548 /* hybrid ae */
11549 settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011550
Thierry Strudel3d639192016-09-09 11:52:26 -070011551 mDefaultMetadata[type] = settings.release();
11552
11553 return mDefaultMetadata[type];
11554}
11555
11556/*===========================================================================
Emilian Peev30522a12017-08-03 14:36:33 +010011557 * FUNCTION : getExpectedFrameDuration
11558 *
11559 * DESCRIPTION: Extract the maximum frame duration from either exposure or frame
11560 * duration
11561 *
11562 * PARAMETERS :
11563 * @request : request settings
11564 * @frameDuration : The maximum frame duration in nanoseconds
11565 *
11566 * RETURN : None
11567 *==========================================================================*/
11568void QCamera3HardwareInterface::getExpectedFrameDuration(
11569 const camera_metadata_t *request, nsecs_t *frameDuration /*out*/) {
11570 if (nullptr == frameDuration) {
11571 return;
11572 }
11573
11574 camera_metadata_ro_entry_t e = camera_metadata_ro_entry_t();
11575 find_camera_metadata_ro_entry(request,
11576 ANDROID_SENSOR_EXPOSURE_TIME,
11577 &e);
11578 if (e.count > 0) {
11579 *frameDuration = e.data.i64[0];
11580 }
11581 find_camera_metadata_ro_entry(request,
11582 ANDROID_SENSOR_FRAME_DURATION,
11583 &e);
11584 if (e.count > 0) {
11585 *frameDuration = std::max(e.data.i64[0], *frameDuration);
11586 }
11587}
11588
11589/*===========================================================================
11590 * FUNCTION : calculateMaxExpectedDuration
11591 *
11592 * DESCRIPTION: Calculate the expected frame duration in nanoseconds given the
11593 * current camera settings.
11594 *
11595 * PARAMETERS :
11596 * @request : request settings
11597 *
11598 * RETURN : Expected frame duration in nanoseconds.
11599 *==========================================================================*/
11600nsecs_t QCamera3HardwareInterface::calculateMaxExpectedDuration(
11601 const camera_metadata_t *request) {
11602 nsecs_t maxExpectedDuration = kDefaultExpectedDuration;
11603 camera_metadata_ro_entry_t e = camera_metadata_ro_entry_t();
11604 find_camera_metadata_ro_entry(request, ANDROID_CONTROL_MODE, &e);
11605 if (e.count == 0) {
11606 return maxExpectedDuration;
11607 }
11608
11609 if (e.data.u8[0] == ANDROID_CONTROL_MODE_OFF) {
11610 getExpectedFrameDuration(request, &maxExpectedDuration /*out*/);
11611 }
11612
11613 if (e.data.u8[0] != ANDROID_CONTROL_MODE_AUTO) {
11614 return maxExpectedDuration;
11615 }
11616
11617 find_camera_metadata_ro_entry(request, ANDROID_CONTROL_AE_MODE, &e);
11618 if (e.count == 0) {
11619 return maxExpectedDuration;
11620 }
11621
11622 switch (e.data.u8[0]) {
11623 case ANDROID_CONTROL_AE_MODE_OFF:
11624 getExpectedFrameDuration(request, &maxExpectedDuration /*out*/);
11625 break;
11626 default:
11627 find_camera_metadata_ro_entry(request,
11628 ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
11629 &e);
11630 if (e.count > 1) {
11631 maxExpectedDuration = 1e9 / e.data.u8[0];
11632 }
11633 break;
11634 }
11635
11636 return maxExpectedDuration;
11637}
11638
11639/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070011640 * FUNCTION : setFrameParameters
11641 *
11642 * DESCRIPTION: set parameters per frame as requested in the metadata from
11643 * framework
11644 *
11645 * PARAMETERS :
11646 * @request : request that needs to be serviced
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011647 * @streamsArray : Stream ID of all the requested streams
Thierry Strudel3d639192016-09-09 11:52:26 -070011648 * @blob_request: Whether this request is a blob request or not
11649 *
11650 * RETURN : success: NO_ERROR
11651 * failure:
11652 *==========================================================================*/
11653int QCamera3HardwareInterface::setFrameParameters(
11654 camera3_capture_request_t *request,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011655 cam_stream_ID_t streamsArray,
Thierry Strudel3d639192016-09-09 11:52:26 -070011656 int blob_request,
11657 uint32_t snapshotStreamId)
11658{
11659 /*translate from camera_metadata_t type to parm_type_t*/
11660 int rc = 0;
11661 int32_t hal_version = CAM_HAL_V3;
11662
11663 clear_metadata_buffer(mParameters);
11664 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
11665 LOGE("Failed to set hal version in the parameters");
11666 return BAD_VALUE;
11667 }
11668
11669 /*we need to update the frame number in the parameters*/
11670 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
11671 request->frame_number)) {
11672 LOGE("Failed to set the frame number in the parameters");
11673 return BAD_VALUE;
11674 }
11675
11676 /* Update stream id of all the requested buffers */
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011677 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011678 LOGE("Failed to set stream type mask in the parameters");
11679 return BAD_VALUE;
11680 }
11681
11682 if (mUpdateDebugLevel) {
11683 uint32_t dummyDebugLevel = 0;
11684 /* The value of dummyDebugLevel is irrelavent. On
11685 * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
11686 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
11687 dummyDebugLevel)) {
11688 LOGE("Failed to set UPDATE_DEBUG_LEVEL");
11689 return BAD_VALUE;
11690 }
11691 mUpdateDebugLevel = false;
11692 }
11693
11694 if(request->settings != NULL){
Emilian Peev30522a12017-08-03 14:36:33 +010011695 mExpectedFrameDuration = calculateMaxExpectedDuration(request->settings);
Thierry Strudel3d639192016-09-09 11:52:26 -070011696 rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
11697 if (blob_request)
11698 memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
11699 }
11700
11701 return rc;
11702}
11703
11704/*===========================================================================
11705 * FUNCTION : setReprocParameters
11706 *
11707 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
11708 * return it.
11709 *
11710 * PARAMETERS :
11711 * @request : request that needs to be serviced
11712 *
11713 * RETURN : success: NO_ERROR
11714 * failure:
11715 *==========================================================================*/
11716int32_t QCamera3HardwareInterface::setReprocParameters(
11717 camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
11718 uint32_t snapshotStreamId)
11719{
11720 /*translate from camera_metadata_t type to parm_type_t*/
11721 int rc = 0;
11722
11723 if (NULL == request->settings){
11724 LOGE("Reprocess settings cannot be NULL");
11725 return BAD_VALUE;
11726 }
11727
11728 if (NULL == reprocParam) {
11729 LOGE("Invalid reprocessing metadata buffer");
11730 return BAD_VALUE;
11731 }
11732 clear_metadata_buffer(reprocParam);
11733
11734 /*we need to update the frame number in the parameters*/
11735 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
11736 request->frame_number)) {
11737 LOGE("Failed to set the frame number in the parameters");
11738 return BAD_VALUE;
11739 }
11740
11741 rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
11742 if (rc < 0) {
11743 LOGE("Failed to translate reproc request");
11744 return rc;
11745 }
11746
11747 CameraMetadata frame_settings;
11748 frame_settings = request->settings;
11749 if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
11750 frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
11751 int32_t *crop_count =
11752 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
11753 int32_t *crop_data =
11754 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
11755 int32_t *roi_map =
11756 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
11757 if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
11758 cam_crop_data_t crop_meta;
11759 memset(&crop_meta, 0, sizeof(cam_crop_data_t));
11760 crop_meta.num_of_streams = 1;
11761 crop_meta.crop_info[0].crop.left = crop_data[0];
11762 crop_meta.crop_info[0].crop.top = crop_data[1];
11763 crop_meta.crop_info[0].crop.width = crop_data[2];
11764 crop_meta.crop_info[0].crop.height = crop_data[3];
11765
11766 crop_meta.crop_info[0].roi_map.left =
11767 roi_map[0];
11768 crop_meta.crop_info[0].roi_map.top =
11769 roi_map[1];
11770 crop_meta.crop_info[0].roi_map.width =
11771 roi_map[2];
11772 crop_meta.crop_info[0].roi_map.height =
11773 roi_map[3];
11774
11775 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
11776 rc = BAD_VALUE;
11777 }
11778 LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
11779 request->input_buffer->stream,
11780 crop_meta.crop_info[0].crop.left,
11781 crop_meta.crop_info[0].crop.top,
11782 crop_meta.crop_info[0].crop.width,
11783 crop_meta.crop_info[0].crop.height);
11784 LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
11785 request->input_buffer->stream,
11786 crop_meta.crop_info[0].roi_map.left,
11787 crop_meta.crop_info[0].roi_map.top,
11788 crop_meta.crop_info[0].roi_map.width,
11789 crop_meta.crop_info[0].roi_map.height);
11790 } else {
11791 LOGE("Invalid reprocess crop count %d!", *crop_count);
11792 }
11793 } else {
11794 LOGE("No crop data from matching output stream");
11795 }
11796
11797 /* These settings are not needed for regular requests so handle them specially for
11798 reprocess requests; information needed for EXIF tags */
11799 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
11800 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
11801 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11802 if (NAME_NOT_FOUND != val) {
11803 uint32_t flashMode = (uint32_t)val;
11804 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
11805 rc = BAD_VALUE;
11806 }
11807 } else {
11808 LOGE("Could not map fwk flash mode %d to correct hal flash mode",
11809 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11810 }
11811 } else {
11812 LOGH("No flash mode in reprocess settings");
11813 }
11814
11815 if (frame_settings.exists(ANDROID_FLASH_STATE)) {
11816 int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
11817 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
11818 rc = BAD_VALUE;
11819 }
11820 } else {
11821 LOGH("No flash state in reprocess settings");
11822 }
11823
11824 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS)) {
11825 uint8_t *reprocessFlags =
11826 frame_settings.find(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS).data.u8;
11827 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_REPROCESS_FLAGS,
11828 *reprocessFlags)) {
11829 rc = BAD_VALUE;
11830 }
11831 }
11832
Thierry Strudel54dc9782017-02-15 12:12:10 -080011833 // Add exif debug data to internal metadata
11834 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB)) {
11835 mm_jpeg_debug_exif_params_t *debug_params =
11836 (mm_jpeg_debug_exif_params_t *)frame_settings.find
11837 (QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB).data.u8;
11838 // AE
11839 if (debug_params->ae_debug_params_valid == TRUE) {
11840 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AE,
11841 debug_params->ae_debug_params);
11842 }
11843 // AWB
11844 if (debug_params->awb_debug_params_valid == TRUE) {
11845 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AWB,
11846 debug_params->awb_debug_params);
11847 }
11848 // AF
11849 if (debug_params->af_debug_params_valid == TRUE) {
11850 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AF,
11851 debug_params->af_debug_params);
11852 }
11853 // ASD
11854 if (debug_params->asd_debug_params_valid == TRUE) {
11855 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_ASD,
11856 debug_params->asd_debug_params);
11857 }
11858 // Stats
11859 if (debug_params->stats_debug_params_valid == TRUE) {
11860 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_STATS,
11861 debug_params->stats_debug_params);
11862 }
11863 // BE Stats
11864 if (debug_params->bestats_debug_params_valid == TRUE) {
11865 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BESTATS,
11866 debug_params->bestats_debug_params);
11867 }
11868 // BHIST
11869 if (debug_params->bhist_debug_params_valid == TRUE) {
11870 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BHIST,
11871 debug_params->bhist_debug_params);
11872 }
11873 // 3A Tuning
11874 if (debug_params->q3a_tuning_debug_params_valid == TRUE) {
11875 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_3A_TUNING,
11876 debug_params->q3a_tuning_debug_params);
11877 }
11878 }
11879
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011880 // Add metadata which reprocess needs
11881 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB)) {
11882 cam_reprocess_info_t *repro_info =
11883 (cam_reprocess_info_t *)frame_settings.find
11884 (QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB).data.u8;
Thierry Strudel3d639192016-09-09 11:52:26 -070011885 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_SENSOR,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011886 repro_info->sensor_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011887 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CAMIF,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011888 repro_info->camif_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011889 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_ISP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011890 repro_info->isp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011891 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CPP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011892 repro_info->cpp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011893 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_FOCAL_LENGTH_RATIO,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011894 repro_info->af_focal_length_ratio);
Thierry Strudel3d639192016-09-09 11:52:26 -070011895 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_FLIP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011896 repro_info->pipeline_flip);
11897 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_ROI,
11898 repro_info->af_roi);
11899 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_IMG_DYN_FEAT,
11900 repro_info->dyn_mask);
Thierry Strudel3d639192016-09-09 11:52:26 -070011901 /* If there is ANDROID_JPEG_ORIENTATION in frame setting,
11902 CAM_INTF_PARM_ROTATION metadata then has been added in
11903 translateToHalMetadata. HAL need to keep this new rotation
11904 metadata. Otherwise, the old rotation info saved in the vendor tag
11905 would be used */
11906 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
11907 CAM_INTF_PARM_ROTATION, reprocParam) {
11908 LOGD("CAM_INTF_PARM_ROTATION metadata is added in translateToHalMetadata");
11909 } else {
11910 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_ROTATION,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011911 repro_info->rotation_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011912 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011913 }
11914
11915 /* Add additional JPEG cropping information. App add QCAMERA3_JPEG_ENCODE_CROP_RECT
11916 to ask for cropping and use ROI for downscale/upscale during HW JPEG encoding.
11917 roi.width and roi.height would be the final JPEG size.
11918 For now, HAL only checks this for reprocess request */
11919 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ENABLE) &&
11920 frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_RECT)) {
11921 uint8_t *enable =
11922 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ENABLE).data.u8;
11923 if (*enable == TRUE) {
11924 int32_t *crop_data =
11925 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_RECT).data.i32;
11926 cam_stream_crop_info_t crop_meta;
11927 memset(&crop_meta, 0, sizeof(cam_stream_crop_info_t));
11928 crop_meta.stream_id = 0;
11929 crop_meta.crop.left = crop_data[0];
11930 crop_meta.crop.top = crop_data[1];
11931 crop_meta.crop.width = crop_data[2];
11932 crop_meta.crop.height = crop_data[3];
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011933 // The JPEG crop roi should match cpp output size
11934 IF_META_AVAILABLE(cam_stream_crop_info_t, cpp_crop,
11935 CAM_INTF_META_SNAP_CROP_INFO_CPP, reprocParam) {
11936 crop_meta.roi_map.left = 0;
11937 crop_meta.roi_map.top = 0;
11938 crop_meta.roi_map.width = cpp_crop->crop.width;
11939 crop_meta.roi_map.height = cpp_crop->crop.height;
Thierry Strudel3d639192016-09-09 11:52:26 -070011940 }
11941 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_ENCODE_CROP,
11942 crop_meta);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011943 LOGH("Add JPEG encode crop left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011944 crop_meta.crop.left, crop_meta.crop.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011945 crop_meta.crop.width, crop_meta.crop.height, mCameraId);
11946 LOGH("Add JPEG encode crop ROI left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070011947 crop_meta.roi_map.left, crop_meta.roi_map.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011948 crop_meta.roi_map.width, crop_meta.roi_map.height, mCameraId);
11949
11950 // Add JPEG scale information
11951 cam_dimension_t scale_dim;
11952 memset(&scale_dim, 0, sizeof(cam_dimension_t));
11953 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ROI)) {
11954 int32_t *roi =
11955 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ROI).data.i32;
11956 scale_dim.width = roi[2];
11957 scale_dim.height = roi[3];
11958 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_SCALE_DIMENSION,
11959 scale_dim);
11960 LOGH("Add JPEG encode scale width %d, height %d, mCameraId %d",
11961 scale_dim.width, scale_dim.height, mCameraId);
11962 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011963 }
11964 }
11965
11966 return rc;
11967}
11968
11969/*===========================================================================
11970 * FUNCTION : saveRequestSettings
11971 *
11972 * DESCRIPTION: Add any settings that might have changed to the request settings
11973 * and save the settings to be applied on the frame
11974 *
11975 * PARAMETERS :
11976 * @jpegMetadata : the extracted and/or modified jpeg metadata
11977 * @request : request with initial settings
11978 *
11979 * RETURN :
11980 * camera_metadata_t* : pointer to the saved request settings
11981 *==========================================================================*/
11982camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
11983 const CameraMetadata &jpegMetadata,
11984 camera3_capture_request_t *request)
11985{
11986 camera_metadata_t *resultMetadata;
11987 CameraMetadata camMetadata;
11988 camMetadata = request->settings;
11989
11990 if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
11991 int32_t thumbnail_size[2];
11992 thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
11993 thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
11994 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
11995 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
11996 }
11997
11998 if (request->input_buffer != NULL) {
11999 uint8_t reprocessFlags = 1;
12000 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
12001 (uint8_t*)&reprocessFlags,
12002 sizeof(reprocessFlags));
12003 }
12004
12005 resultMetadata = camMetadata.release();
12006 return resultMetadata;
12007}
12008
12009/*===========================================================================
12010 * FUNCTION : setHalFpsRange
12011 *
12012 * DESCRIPTION: set FPS range parameter
12013 *
12014 *
12015 * PARAMETERS :
12016 * @settings : Metadata from framework
12017 * @hal_metadata: Metadata buffer
12018 *
12019 *
12020 * RETURN : success: NO_ERROR
12021 * failure:
12022 *==========================================================================*/
12023int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
12024 metadata_buffer_t *hal_metadata)
12025{
12026 int32_t rc = NO_ERROR;
12027 cam_fps_range_t fps_range;
12028 fps_range.min_fps = (float)
12029 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
12030 fps_range.max_fps = (float)
12031 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
12032 fps_range.video_min_fps = fps_range.min_fps;
12033 fps_range.video_max_fps = fps_range.max_fps;
12034
12035 LOGD("aeTargetFpsRange fps: [%f %f]",
12036 fps_range.min_fps, fps_range.max_fps);
12037 /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
12038 * follows:
12039 * ---------------------------------------------------------------|
12040 * Video stream is absent in configure_streams |
12041 * (Camcorder preview before the first video record |
12042 * ---------------------------------------------------------------|
12043 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
12044 * | | | vid_min/max_fps|
12045 * ---------------------------------------------------------------|
12046 * NO | [ 30, 240] | 240 | [240, 240] |
12047 * |-------------|-------------|----------------|
12048 * | [240, 240] | 240 | [240, 240] |
12049 * ---------------------------------------------------------------|
12050 * Video stream is present in configure_streams |
12051 * ---------------------------------------------------------------|
12052 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
12053 * | | | vid_min/max_fps|
12054 * ---------------------------------------------------------------|
12055 * NO | [ 30, 240] | 240 | [240, 240] |
12056 * (camcorder prev |-------------|-------------|----------------|
12057 * after video rec | [240, 240] | 240 | [240, 240] |
12058 * is stopped) | | | |
12059 * ---------------------------------------------------------------|
12060 * YES | [ 30, 240] | 240 | [240, 240] |
12061 * |-------------|-------------|----------------|
12062 * | [240, 240] | 240 | [240, 240] |
12063 * ---------------------------------------------------------------|
12064 * When Video stream is absent in configure_streams,
12065 * preview fps = sensor_fps / batchsize
12066 * Eg: for 240fps at batchSize 4, preview = 60fps
12067 * for 120fps at batchSize 4, preview = 30fps
12068 *
12069 * When video stream is present in configure_streams, preview fps is as per
12070 * the ratio of preview buffers to video buffers requested in process
12071 * capture request
12072 */
12073 mBatchSize = 0;
12074 if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
12075 fps_range.min_fps = fps_range.video_max_fps;
12076 fps_range.video_min_fps = fps_range.video_max_fps;
12077 int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
12078 fps_range.max_fps);
12079 if (NAME_NOT_FOUND != val) {
12080 cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
12081 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
12082 return BAD_VALUE;
12083 }
12084
12085 if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
12086 /* If batchmode is currently in progress and the fps changes,
12087 * set the flag to restart the sensor */
12088 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
12089 (mHFRVideoFps != fps_range.max_fps)) {
12090 mNeedSensorRestart = true;
12091 }
12092 mHFRVideoFps = fps_range.max_fps;
12093 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
12094 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
12095 mBatchSize = MAX_HFR_BATCH_SIZE;
12096 }
12097 }
12098 LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
12099
12100 }
12101 } else {
12102 /* HFR mode is session param in backend/ISP. This should be reset when
12103 * in non-HFR mode */
12104 cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
12105 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
12106 return BAD_VALUE;
12107 }
12108 }
12109 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
12110 return BAD_VALUE;
12111 }
12112 LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
12113 fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
12114 return rc;
12115}
12116
12117/*===========================================================================
12118 * FUNCTION : translateToHalMetadata
12119 *
12120 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
12121 *
12122 *
12123 * PARAMETERS :
12124 * @request : request sent from framework
12125 *
12126 *
12127 * RETURN : success: NO_ERROR
12128 * failure:
12129 *==========================================================================*/
12130int QCamera3HardwareInterface::translateToHalMetadata
12131 (const camera3_capture_request_t *request,
12132 metadata_buffer_t *hal_metadata,
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012133 uint32_t snapshotStreamId) {
12134 if (request == nullptr || hal_metadata == nullptr) {
12135 return BAD_VALUE;
12136 }
12137
12138 int64_t minFrameDuration = getMinFrameDuration(request);
12139
12140 return translateFwkMetadataToHalMetadata(request->settings, hal_metadata, snapshotStreamId,
12141 minFrameDuration);
12142}
12143
12144int QCamera3HardwareInterface::translateFwkMetadataToHalMetadata(
12145 const camera_metadata_t *frameworkMetadata, metadata_buffer_t *hal_metadata,
12146 uint32_t snapshotStreamId, int64_t minFrameDuration) {
12147
Thierry Strudel3d639192016-09-09 11:52:26 -070012148 int rc = 0;
12149 CameraMetadata frame_settings;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012150 frame_settings = frameworkMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070012151
12152 /* Do not change the order of the following list unless you know what you are
12153 * doing.
12154 * The order is laid out in such a way that parameters in the front of the table
12155 * may be used to override the parameters later in the table. Examples are:
12156 * 1. META_MODE should precede AEC/AWB/AF MODE
12157 * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
12158 * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
12159 * 4. Any mode should precede it's corresponding settings
12160 */
12161 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
12162 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
12163 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
12164 rc = BAD_VALUE;
12165 }
12166 rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
12167 if (rc != NO_ERROR) {
12168 LOGE("extractSceneMode failed");
12169 }
12170 }
12171
12172 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
12173 uint8_t fwk_aeMode =
12174 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
12175 uint8_t aeMode;
12176 int32_t redeye;
12177
12178 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
12179 aeMode = CAM_AE_MODE_OFF;
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080012180 } else if (fwk_aeMode == NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH) {
12181 aeMode = CAM_AE_MODE_ON_EXTERNAL_FLASH;
Thierry Strudel3d639192016-09-09 11:52:26 -070012182 } else {
12183 aeMode = CAM_AE_MODE_ON;
12184 }
12185 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
12186 redeye = 1;
12187 } else {
12188 redeye = 0;
12189 }
12190
12191 int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
12192 fwk_aeMode);
12193 if (NAME_NOT_FOUND != val) {
12194 int32_t flashMode = (int32_t)val;
12195 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
12196 }
12197
12198 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
12199 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
12200 rc = BAD_VALUE;
12201 }
12202 }
12203
12204 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
12205 uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
12206 int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
12207 fwk_whiteLevel);
12208 if (NAME_NOT_FOUND != val) {
12209 uint8_t whiteLevel = (uint8_t)val;
12210 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
12211 rc = BAD_VALUE;
12212 }
12213 }
12214 }
12215
12216 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
12217 uint8_t fwk_cacMode =
12218 frame_settings.find(
12219 ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
12220 int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
12221 fwk_cacMode);
12222 if (NAME_NOT_FOUND != val) {
12223 cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
12224 bool entryAvailable = FALSE;
12225 // Check whether Frameworks set CAC mode is supported in device or not
12226 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
12227 if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
12228 entryAvailable = TRUE;
12229 break;
12230 }
12231 }
12232 LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
12233 // If entry not found then set the device supported mode instead of frameworks mode i.e,
12234 // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
12235 // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
12236 if (entryAvailable == FALSE) {
12237 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
12238 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
12239 } else {
12240 if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
12241 // High is not supported and so set the FAST as spec say's underlying
12242 // device implementation can be the same for both modes.
12243 cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
12244 } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
12245 // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
12246 // in order to avoid the fps drop due to high quality
12247 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
12248 } else {
12249 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
12250 }
12251 }
12252 }
12253 LOGD("Final cacMode is %d", cacMode);
12254 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
12255 rc = BAD_VALUE;
12256 }
12257 } else {
12258 LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
12259 }
12260 }
12261
Jason Lee84ae9972017-02-24 13:24:24 -080012262 uint8_t fwk_focusMode = 0;
Shuzhen Wangb57ec912017-07-31 13:24:27 -070012263 if (m_bForceInfinityAf == 0) {
Thierry Strudel2896d122017-02-23 19:18:03 -080012264 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
Jason Lee84ae9972017-02-24 13:24:24 -080012265 fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
Thierry Strudel2896d122017-02-23 19:18:03 -080012266 int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
12267 fwk_focusMode);
12268 if (NAME_NOT_FOUND != val) {
12269 uint8_t focusMode = (uint8_t)val;
12270 LOGD("set focus mode %d", focusMode);
12271 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12272 CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
12273 rc = BAD_VALUE;
12274 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012275 }
12276 }
Thierry Strudel2896d122017-02-23 19:18:03 -080012277 } else {
12278 uint8_t focusMode = (uint8_t)CAM_FOCUS_MODE_INFINITY;
12279 LOGE("Focus forced to infinity %d", focusMode);
12280 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
12281 rc = BAD_VALUE;
12282 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012283 }
12284
Jason Lee84ae9972017-02-24 13:24:24 -080012285 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE) &&
12286 fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012287 float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
12288 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
12289 focalDistance)) {
12290 rc = BAD_VALUE;
12291 }
12292 }
12293
12294 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
12295 uint8_t fwk_antibandingMode =
12296 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
12297 int val = lookupHalName(ANTIBANDING_MODES_MAP,
12298 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
12299 if (NAME_NOT_FOUND != val) {
12300 uint32_t hal_antibandingMode = (uint32_t)val;
Shuzhen Wangf6890e02016-08-12 14:28:54 -070012301 if (hal_antibandingMode == CAM_ANTIBANDING_MODE_AUTO) {
12302 if (m60HzZone) {
12303 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_60HZ;
12304 } else {
12305 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_50HZ;
12306 }
12307 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012308 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
12309 hal_antibandingMode)) {
12310 rc = BAD_VALUE;
12311 }
12312 }
12313 }
12314
12315 if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
12316 int32_t expCompensation = frame_settings.find(
12317 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
12318 if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
12319 expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
12320 if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
12321 expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012322 LOGD("Setting compensation:%d", expCompensation);
Thierry Strudel3d639192016-09-09 11:52:26 -070012323 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
12324 expCompensation)) {
12325 rc = BAD_VALUE;
12326 }
12327 }
12328
12329 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
12330 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
12331 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
12332 rc = BAD_VALUE;
12333 }
12334 }
12335 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
12336 rc = setHalFpsRange(frame_settings, hal_metadata);
12337 if (rc != NO_ERROR) {
12338 LOGE("setHalFpsRange failed");
12339 }
12340 }
12341
12342 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
12343 uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
12344 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
12345 rc = BAD_VALUE;
12346 }
12347 }
12348
12349 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
12350 uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
12351 int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
12352 fwk_effectMode);
12353 if (NAME_NOT_FOUND != val) {
12354 uint8_t effectMode = (uint8_t)val;
12355 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
12356 rc = BAD_VALUE;
12357 }
12358 }
12359 }
12360
12361 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
12362 uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
12363 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
12364 colorCorrectMode)) {
12365 rc = BAD_VALUE;
12366 }
12367 }
12368
12369 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
12370 cam_color_correct_gains_t colorCorrectGains;
12371 for (size_t i = 0; i < CC_GAIN_MAX; i++) {
12372 colorCorrectGains.gains[i] =
12373 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
12374 }
12375 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
12376 colorCorrectGains)) {
12377 rc = BAD_VALUE;
12378 }
12379 }
12380
12381 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
12382 cam_color_correct_matrix_t colorCorrectTransform;
12383 cam_rational_type_t transform_elem;
12384 size_t num = 0;
12385 for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
12386 for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
12387 transform_elem.numerator =
12388 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
12389 transform_elem.denominator =
12390 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
12391 colorCorrectTransform.transform_matrix[i][j] = transform_elem;
12392 num++;
12393 }
12394 }
12395 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
12396 colorCorrectTransform)) {
12397 rc = BAD_VALUE;
12398 }
12399 }
12400
12401 cam_trigger_t aecTrigger;
12402 aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
12403 aecTrigger.trigger_id = -1;
12404 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
12405 frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
12406 aecTrigger.trigger =
12407 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
12408 aecTrigger.trigger_id =
12409 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
12410 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
12411 aecTrigger)) {
12412 rc = BAD_VALUE;
12413 }
12414 LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
12415 aecTrigger.trigger, aecTrigger.trigger_id);
12416 }
12417
12418 /*af_trigger must come with a trigger id*/
12419 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
12420 frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
12421 cam_trigger_t af_trigger;
12422 af_trigger.trigger =
12423 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
12424 af_trigger.trigger_id =
12425 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
12426 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
12427 rc = BAD_VALUE;
12428 }
12429 LOGD("AfTrigger: %d AfTriggerID: %d",
12430 af_trigger.trigger, af_trigger.trigger_id);
12431 }
12432
12433 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
12434 int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
12435 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
12436 rc = BAD_VALUE;
12437 }
12438 }
12439 if (frame_settings.exists(ANDROID_EDGE_MODE)) {
12440 cam_edge_application_t edge_application;
12441 edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012442
Thierry Strudel3d639192016-09-09 11:52:26 -070012443 if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
12444 edge_application.sharpness = 0;
12445 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012446 edge_application.sharpness =
12447 gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
12448 if (frame_settings.exists(QCAMERA3_SHARPNESS_STRENGTH)) {
12449 int32_t sharpness =
12450 frame_settings.find(QCAMERA3_SHARPNESS_STRENGTH).data.i32[0];
12451 if (sharpness >= gCamCapability[mCameraId]->sharpness_ctrl.min_value &&
12452 sharpness <= gCamCapability[mCameraId]->sharpness_ctrl.max_value) {
12453 LOGD("Setting edge mode sharpness %d", sharpness);
12454 edge_application.sharpness = sharpness;
12455 }
12456 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012457 }
12458 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
12459 rc = BAD_VALUE;
12460 }
12461 }
12462
12463 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
12464 int32_t respectFlashMode = 1;
12465 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
12466 uint8_t fwk_aeMode =
12467 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080012468 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH ||
12469 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH ||
12470 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012471 respectFlashMode = 0;
12472 LOGH("AE Mode controls flash, ignore android.flash.mode");
12473 }
12474 }
12475 if (respectFlashMode) {
12476 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
12477 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
12478 LOGH("flash mode after mapping %d", val);
12479 // To check: CAM_INTF_META_FLASH_MODE usage
12480 if (NAME_NOT_FOUND != val) {
12481 uint8_t flashMode = (uint8_t)val;
12482 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
12483 rc = BAD_VALUE;
12484 }
12485 }
12486 }
12487 }
12488
12489 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
12490 uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
12491 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
12492 rc = BAD_VALUE;
12493 }
12494 }
12495
12496 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
12497 int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
12498 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
12499 flashFiringTime)) {
12500 rc = BAD_VALUE;
12501 }
12502 }
12503
12504 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
12505 uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
12506 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
12507 hotPixelMode)) {
12508 rc = BAD_VALUE;
12509 }
12510 }
12511
12512 if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
12513 float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
12514 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
12515 lensAperture)) {
12516 rc = BAD_VALUE;
12517 }
12518 }
12519
12520 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
12521 float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
12522 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
12523 filterDensity)) {
12524 rc = BAD_VALUE;
12525 }
12526 }
12527
12528 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
12529 float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
12530 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
12531 focalLength)) {
12532 rc = BAD_VALUE;
12533 }
12534 }
12535
12536 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
12537 uint8_t optStabMode =
12538 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
12539 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
12540 optStabMode)) {
12541 rc = BAD_VALUE;
12542 }
12543 }
12544
12545 if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
12546 uint8_t videoStabMode =
12547 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
12548 LOGD("videoStabMode from APP = %d", videoStabMode);
12549 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
12550 videoStabMode)) {
12551 rc = BAD_VALUE;
12552 }
12553 }
12554
12555
12556 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
12557 uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
12558 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
12559 noiseRedMode)) {
12560 rc = BAD_VALUE;
12561 }
12562 }
12563
12564 if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
12565 float reprocessEffectiveExposureFactor =
12566 frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
12567 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
12568 reprocessEffectiveExposureFactor)) {
12569 rc = BAD_VALUE;
12570 }
12571 }
12572
12573 cam_crop_region_t scalerCropRegion;
12574 bool scalerCropSet = false;
12575 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
12576 scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
12577 scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
12578 scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
12579 scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
12580
12581 // Map coordinate system from active array to sensor output.
12582 mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
12583 scalerCropRegion.width, scalerCropRegion.height);
12584
12585 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
12586 scalerCropRegion)) {
12587 rc = BAD_VALUE;
12588 }
12589 scalerCropSet = true;
12590 }
12591
12592 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
12593 int64_t sensorExpTime =
12594 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
12595 LOGD("setting sensorExpTime %lld", sensorExpTime);
12596 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
12597 sensorExpTime)) {
12598 rc = BAD_VALUE;
12599 }
12600 }
12601
12602 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
12603 int64_t sensorFrameDuration =
12604 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012605 sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
12606 if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
12607 sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
12608 LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
12609 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
12610 sensorFrameDuration)) {
12611 rc = BAD_VALUE;
12612 }
12613 }
12614
12615 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
12616 int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
12617 if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
12618 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
12619 if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
12620 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
12621 LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
12622 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
12623 sensorSensitivity)) {
12624 rc = BAD_VALUE;
12625 }
12626 }
12627
Thierry Strudel9e74aae2016-09-22 17:10:18 -070012628#ifndef USE_HAL_3_3
12629 if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
12630 int32_t ispSensitivity =
12631 frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
12632 if (ispSensitivity <
12633 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
12634 ispSensitivity =
12635 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
12636 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12637 }
12638 if (ispSensitivity >
12639 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
12640 ispSensitivity =
12641 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
12642 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12643 }
12644 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
12645 ispSensitivity)) {
12646 rc = BAD_VALUE;
12647 }
12648 }
12649#endif
12650
Thierry Strudel3d639192016-09-09 11:52:26 -070012651 if (frame_settings.exists(ANDROID_SHADING_MODE)) {
12652 uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
12653 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
12654 rc = BAD_VALUE;
12655 }
12656 }
12657
12658 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
12659 uint8_t fwk_facedetectMode =
12660 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
12661
12662 int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
12663 fwk_facedetectMode);
12664
12665 if (NAME_NOT_FOUND != val) {
12666 uint8_t facedetectMode = (uint8_t)val;
12667 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
12668 facedetectMode)) {
12669 rc = BAD_VALUE;
12670 }
12671 }
12672 }
12673
Thierry Strudel54dc9782017-02-15 12:12:10 -080012674 if (frame_settings.exists(QCAMERA3_HISTOGRAM_MODE)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012675 uint8_t histogramMode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012676 frame_settings.find(QCAMERA3_HISTOGRAM_MODE).data.u8[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012677 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12678 histogramMode)) {
12679 rc = BAD_VALUE;
12680 }
12681 }
12682
12683 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
12684 uint8_t sharpnessMapMode =
12685 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
12686 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
12687 sharpnessMapMode)) {
12688 rc = BAD_VALUE;
12689 }
12690 }
12691
12692 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
12693 uint8_t tonemapMode =
12694 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
12695 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
12696 rc = BAD_VALUE;
12697 }
12698 }
12699 /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
12700 /*All tonemap channels will have the same number of points*/
12701 if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
12702 frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
12703 frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
12704 cam_rgb_tonemap_curves tonemapCurves;
12705 tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
12706 if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
12707 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
12708 tonemapCurves.tonemap_points_cnt,
12709 CAM_MAX_TONEMAP_CURVE_SIZE);
12710 tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
12711 }
12712
12713 /* ch0 = G*/
12714 size_t point = 0;
12715 cam_tonemap_curve_t tonemapCurveGreen;
12716 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12717 for (size_t j = 0; j < 2; j++) {
12718 tonemapCurveGreen.tonemap_points[i][j] =
12719 frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
12720 point++;
12721 }
12722 }
12723 tonemapCurves.curves[0] = tonemapCurveGreen;
12724
12725 /* ch 1 = B */
12726 point = 0;
12727 cam_tonemap_curve_t tonemapCurveBlue;
12728 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12729 for (size_t j = 0; j < 2; j++) {
12730 tonemapCurveBlue.tonemap_points[i][j] =
12731 frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
12732 point++;
12733 }
12734 }
12735 tonemapCurves.curves[1] = tonemapCurveBlue;
12736
12737 /* ch 2 = R */
12738 point = 0;
12739 cam_tonemap_curve_t tonemapCurveRed;
12740 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12741 for (size_t j = 0; j < 2; j++) {
12742 tonemapCurveRed.tonemap_points[i][j] =
12743 frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
12744 point++;
12745 }
12746 }
12747 tonemapCurves.curves[2] = tonemapCurveRed;
12748
12749 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
12750 tonemapCurves)) {
12751 rc = BAD_VALUE;
12752 }
12753 }
12754
12755 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
12756 uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
12757 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
12758 captureIntent)) {
12759 rc = BAD_VALUE;
12760 }
12761 }
12762
12763 if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
12764 uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
12765 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
12766 blackLevelLock)) {
12767 rc = BAD_VALUE;
12768 }
12769 }
12770
12771 if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
12772 uint8_t lensShadingMapMode =
12773 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
12774 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
12775 lensShadingMapMode)) {
12776 rc = BAD_VALUE;
12777 }
12778 }
12779
12780 if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
12781 cam_area_t roi;
12782 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012783 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AE_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012784
12785 // Map coordinate system from active array to sensor output.
12786 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12787 roi.rect.height);
12788
12789 if (scalerCropSet) {
12790 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12791 }
12792 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
12793 rc = BAD_VALUE;
12794 }
12795 }
12796
12797 if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
12798 cam_area_t roi;
12799 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012800 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AF_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012801
12802 // Map coordinate system from active array to sensor output.
12803 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12804 roi.rect.height);
12805
12806 if (scalerCropSet) {
12807 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12808 }
12809 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
12810 rc = BAD_VALUE;
12811 }
12812 }
12813
12814 // CDS for non-HFR non-video mode
12815 if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
12816 !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
12817 int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
12818 if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
12819 LOGE("Invalid CDS mode %d!", *fwk_cds);
12820 } else {
12821 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12822 CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
12823 rc = BAD_VALUE;
12824 }
12825 }
12826 }
12827
Thierry Strudel04e026f2016-10-10 11:27:36 -070012828 // Video HDR
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012829 cam_video_hdr_mode_t vhdr = CAM_VIDEO_HDR_MODE_OFF;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012830 if (frame_settings.exists(QCAMERA3_VIDEO_HDR_MODE)) {
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012831 vhdr = (cam_video_hdr_mode_t) frame_settings.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
12832 }
12833 if (m_bVideoHdrEnabled)
12834 vhdr = CAM_VIDEO_HDR_MODE_ON;
12835
Thierry Strudel54dc9782017-02-15 12:12:10 -080012836 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
12837
12838 if(vhdr != curr_hdr_state)
12839 LOGH("PROFILE_SET_HDR_MODE %d" ,vhdr);
12840
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012841 rc = setVideoHdrMode(mParameters, vhdr);
12842 if (rc != NO_ERROR) {
12843 LOGE("setVideoHDR is failed");
Thierry Strudel04e026f2016-10-10 11:27:36 -070012844 }
12845
12846 //IR
12847 if(frame_settings.exists(QCAMERA3_IR_MODE)) {
12848 cam_ir_mode_type_t fwk_ir = (cam_ir_mode_type_t)
12849 frame_settings.find(QCAMERA3_IR_MODE).data.i32[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012850 uint8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR) != 0);
12851 uint8_t isIRon = 0;
12852
12853 (fwk_ir >0) ? (isIRon = 1) : (isIRon = 0) ;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012854 if ((CAM_IR_MODE_MAX <= fwk_ir) || (0 > fwk_ir)) {
12855 LOGE("Invalid IR mode %d!", fwk_ir);
12856 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012857 if(isIRon != curr_ir_state )
12858 LOGH("PROFILE_SET_IR_MODE %d" ,isIRon);
12859
Thierry Strudel04e026f2016-10-10 11:27:36 -070012860 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12861 CAM_INTF_META_IR_MODE, fwk_ir)) {
12862 rc = BAD_VALUE;
12863 }
12864 }
12865 }
12866
Thierry Strudel54dc9782017-02-15 12:12:10 -080012867 //Binning Correction Mode
12868 if(frame_settings.exists(QCAMERA3_BINNING_CORRECTION_MODE)) {
12869 cam_binning_correction_mode_t fwk_binning_correction = (cam_binning_correction_mode_t)
12870 frame_settings.find(QCAMERA3_BINNING_CORRECTION_MODE).data.i32[0];
12871 if ((CAM_BINNING_CORRECTION_MODE_MAX <= fwk_binning_correction)
12872 || (0 > fwk_binning_correction)) {
12873 LOGE("Invalid binning correction mode %d!", fwk_binning_correction);
12874 } else {
12875 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12876 CAM_INTF_META_BINNING_CORRECTION_MODE, fwk_binning_correction)) {
12877 rc = BAD_VALUE;
12878 }
12879 }
12880 }
12881
Thierry Strudel269c81a2016-10-12 12:13:59 -070012882 if (frame_settings.exists(QCAMERA3_AEC_CONVERGENCE_SPEED)) {
12883 float aec_speed;
12884 aec_speed = frame_settings.find(QCAMERA3_AEC_CONVERGENCE_SPEED).data.f[0];
12885 LOGD("AEC Speed :%f", aec_speed);
12886 if ( aec_speed < 0 ) {
12887 LOGE("Invalid AEC mode %f!", aec_speed);
12888 } else {
12889 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_CONVERGENCE_SPEED,
12890 aec_speed)) {
12891 rc = BAD_VALUE;
12892 }
12893 }
12894 }
12895
12896 if (frame_settings.exists(QCAMERA3_AWB_CONVERGENCE_SPEED)) {
12897 float awb_speed;
12898 awb_speed = frame_settings.find(QCAMERA3_AWB_CONVERGENCE_SPEED).data.f[0];
12899 LOGD("AWB Speed :%f", awb_speed);
12900 if ( awb_speed < 0 ) {
12901 LOGE("Invalid AWB mode %f!", awb_speed);
12902 } else {
12903 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AWB_CONVERGENCE_SPEED,
12904 awb_speed)) {
12905 rc = BAD_VALUE;
12906 }
12907 }
12908 }
12909
Thierry Strudel3d639192016-09-09 11:52:26 -070012910 // TNR
12911 if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
12912 frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
12913 uint8_t b_TnrRequested = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012914 uint8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0);
Thierry Strudel3d639192016-09-09 11:52:26 -070012915 cam_denoise_param_t tnr;
12916 tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
12917 tnr.process_plates =
12918 (cam_denoise_process_type_t)frame_settings.find(
12919 QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
12920 b_TnrRequested = tnr.denoise_enable;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012921
12922 if(b_TnrRequested != curr_tnr_state)
12923 LOGH("PROFILE_SET_TNR_MODE %d" ,b_TnrRequested);
12924
Thierry Strudel3d639192016-09-09 11:52:26 -070012925 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
12926 rc = BAD_VALUE;
12927 }
12928 }
12929
Thierry Strudel54dc9782017-02-15 12:12:10 -080012930 if (frame_settings.exists(QCAMERA3_EXPOSURE_METER)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012931 int32_t* exposure_metering_mode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012932 frame_settings.find(QCAMERA3_EXPOSURE_METER).data.i32;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070012933 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_ALGO_TYPE,
12934 *exposure_metering_mode)) {
12935 rc = BAD_VALUE;
12936 }
12937 }
12938
Thierry Strudel3d639192016-09-09 11:52:26 -070012939 if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
12940 int32_t fwk_testPatternMode =
12941 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
12942 int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
12943 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
12944
12945 if (NAME_NOT_FOUND != testPatternMode) {
12946 cam_test_pattern_data_t testPatternData;
12947 memset(&testPatternData, 0, sizeof(testPatternData));
12948 testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
12949 if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
12950 frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
12951 int32_t *fwk_testPatternData =
12952 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
12953 testPatternData.r = fwk_testPatternData[0];
12954 testPatternData.b = fwk_testPatternData[3];
12955 switch (gCamCapability[mCameraId]->color_arrangement) {
12956 case CAM_FILTER_ARRANGEMENT_RGGB:
12957 case CAM_FILTER_ARRANGEMENT_GRBG:
12958 testPatternData.gr = fwk_testPatternData[1];
12959 testPatternData.gb = fwk_testPatternData[2];
12960 break;
12961 case CAM_FILTER_ARRANGEMENT_GBRG:
12962 case CAM_FILTER_ARRANGEMENT_BGGR:
12963 testPatternData.gr = fwk_testPatternData[2];
12964 testPatternData.gb = fwk_testPatternData[1];
12965 break;
12966 default:
12967 LOGE("color arrangement %d is not supported",
12968 gCamCapability[mCameraId]->color_arrangement);
12969 break;
12970 }
12971 }
12972 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
12973 testPatternData)) {
12974 rc = BAD_VALUE;
12975 }
12976 } else {
12977 LOGE("Invalid framework sensor test pattern mode %d",
12978 fwk_testPatternMode);
12979 }
12980 }
12981
12982 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
12983 size_t count = 0;
12984 camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
12985 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
12986 gps_coords.data.d, gps_coords.count, count);
12987 if (gps_coords.count != count) {
12988 rc = BAD_VALUE;
12989 }
12990 }
12991
12992 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
12993 char gps_methods[GPS_PROCESSING_METHOD_SIZE];
12994 size_t count = 0;
12995 const char *gps_methods_src = (const char *)
12996 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
12997 memset(gps_methods, '\0', sizeof(gps_methods));
12998 strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
12999 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
13000 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
13001 if (GPS_PROCESSING_METHOD_SIZE != count) {
13002 rc = BAD_VALUE;
13003 }
13004 }
13005
13006 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
13007 int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
13008 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
13009 gps_timestamp)) {
13010 rc = BAD_VALUE;
13011 }
13012 }
13013
13014 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
13015 int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
13016 cam_rotation_info_t rotation_info;
13017 if (orientation == 0) {
13018 rotation_info.rotation = ROTATE_0;
13019 } else if (orientation == 90) {
13020 rotation_info.rotation = ROTATE_90;
13021 } else if (orientation == 180) {
13022 rotation_info.rotation = ROTATE_180;
13023 } else if (orientation == 270) {
13024 rotation_info.rotation = ROTATE_270;
13025 }
Shuzhen Wang6ec8eac2016-07-28 23:09:23 -070013026 rotation_info.device_rotation = ROTATE_0;
Thierry Strudel3d639192016-09-09 11:52:26 -070013027 rotation_info.streamId = snapshotStreamId;
13028 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
13029 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
13030 rc = BAD_VALUE;
13031 }
13032 }
13033
13034 if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
13035 uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
13036 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
13037 rc = BAD_VALUE;
13038 }
13039 }
13040
13041 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
13042 uint32_t thumb_quality = (uint32_t)
13043 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
13044 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
13045 thumb_quality)) {
13046 rc = BAD_VALUE;
13047 }
13048 }
13049
13050 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
13051 cam_dimension_t dim;
13052 dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
13053 dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
13054 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
13055 rc = BAD_VALUE;
13056 }
13057 }
13058
13059 // Internal metadata
13060 if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
13061 size_t count = 0;
13062 camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
13063 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
13064 privatedata.data.i32, privatedata.count, count);
13065 if (privatedata.count != count) {
13066 rc = BAD_VALUE;
13067 }
13068 }
13069
Thierry Strudel295a0ca2016-11-03 18:38:47 -070013070 // ISO/Exposure Priority
13071 if (frame_settings.exists(QCAMERA3_USE_ISO_EXP_PRIORITY) &&
13072 frame_settings.exists(QCAMERA3_SELECT_PRIORITY)) {
13073 cam_priority_mode_t mode =
13074 (cam_priority_mode_t)frame_settings.find(QCAMERA3_SELECT_PRIORITY).data.i32[0];
13075 if((CAM_ISO_PRIORITY == mode) || (CAM_EXP_PRIORITY == mode)) {
13076 cam_intf_parm_manual_3a_t use_iso_exp_pty;
13077 use_iso_exp_pty.previewOnly = FALSE;
13078 uint64_t* ptr = (uint64_t*)frame_settings.find(QCAMERA3_USE_ISO_EXP_PRIORITY).data.i64;
13079 use_iso_exp_pty.value = *ptr;
13080
13081 if(CAM_ISO_PRIORITY == mode) {
13082 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ISO,
13083 use_iso_exp_pty)) {
13084 rc = BAD_VALUE;
13085 }
13086 }
13087 else {
13088 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_TIME,
13089 use_iso_exp_pty)) {
13090 rc = BAD_VALUE;
13091 }
13092 }
Thierry Strudel54dc9782017-02-15 12:12:10 -080013093
13094 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 1)) {
13095 rc = BAD_VALUE;
13096 }
13097 }
13098 } else {
13099 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 0)) {
13100 rc = BAD_VALUE;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070013101 }
13102 }
13103
13104 // Saturation
13105 if (frame_settings.exists(QCAMERA3_USE_SATURATION)) {
13106 int32_t* use_saturation =
13107 frame_settings.find(QCAMERA3_USE_SATURATION).data.i32;
13108 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_SATURATION, *use_saturation)) {
13109 rc = BAD_VALUE;
13110 }
13111 }
13112
Thierry Strudel3d639192016-09-09 11:52:26 -070013113 // EV step
13114 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
13115 gCamCapability[mCameraId]->exp_compensation_step)) {
13116 rc = BAD_VALUE;
13117 }
13118
13119 // CDS info
13120 if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
13121 cam_cds_data_t *cdsData = (cam_cds_data_t *)
13122 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
13123
13124 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13125 CAM_INTF_META_CDS_DATA, *cdsData)) {
13126 rc = BAD_VALUE;
13127 }
13128 }
13129
Shuzhen Wang19463d72016-03-08 11:09:52 -080013130 // Hybrid AE
13131 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
13132 uint8_t *hybrid_ae = (uint8_t *)
13133 frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
Shuzhen Wang77b049a2017-08-30 12:24:36 -070013134 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
13135 rc = BAD_VALUE;
13136 }
Shuzhen Wang19463d72016-03-08 11:09:52 -080013137 }
13138
Shuzhen Wang14415f52016-11-16 18:26:18 -080013139 // Histogram
13140 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE)) {
13141 uint8_t histogramMode =
13142 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE).data.u8[0];
13143 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
13144 histogramMode)) {
13145 rc = BAD_VALUE;
13146 }
13147 }
13148
13149 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS)) {
13150 int32_t histogramBins =
13151 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS).data.i32[0];
13152 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_BINS,
13153 histogramBins)) {
13154 rc = BAD_VALUE;
13155 }
13156 }
13157
Shuzhen Wangcc386c52017-03-29 09:28:08 -070013158 // Tracking AF
13159 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER)) {
13160 uint8_t trackingAfTrigger =
13161 frame_settings.find(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER).data.u8[0];
13162 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TRACKING_AF_TRIGGER,
13163 trackingAfTrigger)) {
13164 rc = BAD_VALUE;
13165 }
13166 }
13167
Chien-Yu Chendbd619b2017-08-04 17:50:11 -070013168 // Makernote
13169 camera_metadata_entry entry = frame_settings.find(NEXUS_EXPERIMENTAL_2017_EXIF_MAKERNOTE);
13170 if (entry.count != 0) {
13171 if (entry.count <= MAX_MAKERNOTE_LENGTH) {
13172 cam_makernote_t makernote;
13173 makernote.length = entry.count;
13174 memcpy(makernote.data, entry.data.u8, makernote.length);
13175 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MAKERNOTE, makernote)) {
13176 rc = BAD_VALUE;
13177 }
13178 } else {
13179 ALOGE("%s: Makernote length %u is larger than %d", __FUNCTION__, entry.count,
13180 MAX_MAKERNOTE_LENGTH);
13181 rc = BAD_VALUE;
13182 }
13183 }
13184
Thierry Strudel3d639192016-09-09 11:52:26 -070013185 return rc;
13186}
13187
13188/*===========================================================================
13189 * FUNCTION : captureResultCb
13190 *
13191 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
13192 *
13193 * PARAMETERS :
13194 * @frame : frame information from mm-camera-interface
13195 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
13196 * @userdata: userdata
13197 *
13198 * RETURN : NONE
13199 *==========================================================================*/
13200void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
13201 camera3_stream_buffer_t *buffer,
13202 uint32_t frame_number, bool isInputBuffer, void *userdata)
13203{
13204 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
13205 if (hw == NULL) {
13206 LOGE("Invalid hw %p", hw);
13207 return;
13208 }
13209
13210 hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
13211 return;
13212}
13213
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013214/*===========================================================================
13215 * FUNCTION : setBufferErrorStatus
13216 *
13217 * DESCRIPTION: Callback handler for channels to report any buffer errors
13218 *
13219 * PARAMETERS :
13220 * @ch : Channel on which buffer error is reported from
13221 * @frame_number : frame number on which buffer error is reported on
13222 * @buffer_status : buffer error status
13223 * @userdata: userdata
13224 *
13225 * RETURN : NONE
13226 *==========================================================================*/
13227void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
13228 uint32_t frame_number, camera3_buffer_status_t err, void *userdata)
13229{
13230 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
13231 if (hw == NULL) {
13232 LOGE("Invalid hw %p", hw);
13233 return;
13234 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013235
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013236 hw->setBufferErrorStatus(ch, frame_number, err);
13237 return;
13238}
13239
13240void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
13241 uint32_t frameNumber, camera3_buffer_status_t err)
13242{
13243 LOGD("channel: %p, frame# %d, buf err: %d", ch, frameNumber, err);
13244 pthread_mutex_lock(&mMutex);
13245
13246 for (auto& req : mPendingBuffersMap.mPendingBuffersInRequest) {
13247 if (req.frame_number != frameNumber)
13248 continue;
13249 for (auto& k : req.mPendingBufferList) {
13250 if(k.stream->priv == ch) {
13251 k.bufStatus = CAMERA3_BUFFER_STATUS_ERROR;
13252 }
13253 }
13254 }
13255
13256 pthread_mutex_unlock(&mMutex);
13257 return;
13258}
Thierry Strudel3d639192016-09-09 11:52:26 -070013259/*===========================================================================
13260 * FUNCTION : initialize
13261 *
13262 * DESCRIPTION: Pass framework callback pointers to HAL
13263 *
13264 * PARAMETERS :
13265 *
13266 *
13267 * RETURN : Success : 0
13268 * Failure: -ENODEV
13269 *==========================================================================*/
13270
13271int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
13272 const camera3_callback_ops_t *callback_ops)
13273{
13274 LOGD("E");
13275 QCamera3HardwareInterface *hw =
13276 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13277 if (!hw) {
13278 LOGE("NULL camera device");
13279 return -ENODEV;
13280 }
13281
13282 int rc = hw->initialize(callback_ops);
13283 LOGD("X");
13284 return rc;
13285}
13286
13287/*===========================================================================
13288 * FUNCTION : configure_streams
13289 *
13290 * DESCRIPTION:
13291 *
13292 * PARAMETERS :
13293 *
13294 *
13295 * RETURN : Success: 0
13296 * Failure: -EINVAL (if stream configuration is invalid)
13297 * -ENODEV (fatal error)
13298 *==========================================================================*/
13299
13300int QCamera3HardwareInterface::configure_streams(
13301 const struct camera3_device *device,
13302 camera3_stream_configuration_t *stream_list)
13303{
13304 LOGD("E");
13305 QCamera3HardwareInterface *hw =
13306 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13307 if (!hw) {
13308 LOGE("NULL camera device");
13309 return -ENODEV;
13310 }
13311 int rc = hw->configureStreams(stream_list);
13312 LOGD("X");
13313 return rc;
13314}
13315
13316/*===========================================================================
13317 * FUNCTION : construct_default_request_settings
13318 *
13319 * DESCRIPTION: Configure a settings buffer to meet the required use case
13320 *
13321 * PARAMETERS :
13322 *
13323 *
13324 * RETURN : Success: Return valid metadata
13325 * Failure: Return NULL
13326 *==========================================================================*/
13327const camera_metadata_t* QCamera3HardwareInterface::
13328 construct_default_request_settings(const struct camera3_device *device,
13329 int type)
13330{
13331
13332 LOGD("E");
13333 camera_metadata_t* fwk_metadata = NULL;
13334 QCamera3HardwareInterface *hw =
13335 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13336 if (!hw) {
13337 LOGE("NULL camera device");
13338 return NULL;
13339 }
13340
13341 fwk_metadata = hw->translateCapabilityToMetadata(type);
13342
13343 LOGD("X");
13344 return fwk_metadata;
13345}
13346
13347/*===========================================================================
13348 * FUNCTION : process_capture_request
13349 *
13350 * DESCRIPTION:
13351 *
13352 * PARAMETERS :
13353 *
13354 *
13355 * RETURN :
13356 *==========================================================================*/
13357int QCamera3HardwareInterface::process_capture_request(
13358 const struct camera3_device *device,
13359 camera3_capture_request_t *request)
13360{
13361 LOGD("E");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013362 CAMSCOPE_UPDATE_FLAGS(CAMSCOPE_SECTION_HAL, kpi_camscope_flags);
Thierry Strudel3d639192016-09-09 11:52:26 -070013363 QCamera3HardwareInterface *hw =
13364 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13365 if (!hw) {
13366 LOGE("NULL camera device");
13367 return -EINVAL;
13368 }
13369
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013370 int rc = hw->orchestrateRequest(request);
Thierry Strudel3d639192016-09-09 11:52:26 -070013371 LOGD("X");
13372 return rc;
13373}
13374
13375/*===========================================================================
13376 * FUNCTION : dump
13377 *
13378 * DESCRIPTION:
13379 *
13380 * PARAMETERS :
13381 *
13382 *
13383 * RETURN :
13384 *==========================================================================*/
13385
13386void QCamera3HardwareInterface::dump(
13387 const struct camera3_device *device, int fd)
13388{
13389 /* Log level property is read when "adb shell dumpsys media.camera" is
13390 called so that the log level can be controlled without restarting
13391 the media server */
13392 getLogLevel();
13393
13394 LOGD("E");
13395 QCamera3HardwareInterface *hw =
13396 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13397 if (!hw) {
13398 LOGE("NULL camera device");
13399 return;
13400 }
13401
13402 hw->dump(fd);
13403 LOGD("X");
13404 return;
13405}
13406
13407/*===========================================================================
13408 * FUNCTION : flush
13409 *
13410 * DESCRIPTION:
13411 *
13412 * PARAMETERS :
13413 *
13414 *
13415 * RETURN :
13416 *==========================================================================*/
13417
13418int QCamera3HardwareInterface::flush(
13419 const struct camera3_device *device)
13420{
13421 int rc;
13422 LOGD("E");
13423 QCamera3HardwareInterface *hw =
13424 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13425 if (!hw) {
13426 LOGE("NULL camera device");
13427 return -EINVAL;
13428 }
13429
13430 pthread_mutex_lock(&hw->mMutex);
13431 // Validate current state
13432 switch (hw->mState) {
13433 case STARTED:
13434 /* valid state */
13435 break;
13436
13437 case ERROR:
13438 pthread_mutex_unlock(&hw->mMutex);
13439 hw->handleCameraDeviceError();
13440 return -ENODEV;
13441
13442 default:
13443 LOGI("Flush returned during state %d", hw->mState);
13444 pthread_mutex_unlock(&hw->mMutex);
13445 return 0;
13446 }
13447 pthread_mutex_unlock(&hw->mMutex);
13448
13449 rc = hw->flush(true /* restart channels */ );
13450 LOGD("X");
13451 return rc;
13452}
13453
13454/*===========================================================================
13455 * FUNCTION : close_camera_device
13456 *
13457 * DESCRIPTION:
13458 *
13459 * PARAMETERS :
13460 *
13461 *
13462 * RETURN :
13463 *==========================================================================*/
13464int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
13465{
13466 int ret = NO_ERROR;
13467 QCamera3HardwareInterface *hw =
13468 reinterpret_cast<QCamera3HardwareInterface *>(
13469 reinterpret_cast<camera3_device_t *>(device)->priv);
13470 if (!hw) {
13471 LOGE("NULL camera device");
13472 return BAD_VALUE;
13473 }
13474
13475 LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
13476 delete hw;
13477 LOGI("[KPI Perf]: X");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013478 CAMSCOPE_DESTROY(CAMSCOPE_SECTION_HAL);
Thierry Strudel3d639192016-09-09 11:52:26 -070013479 return ret;
13480}
13481
13482/*===========================================================================
13483 * FUNCTION : getWaveletDenoiseProcessPlate
13484 *
13485 * DESCRIPTION: query wavelet denoise process plate
13486 *
13487 * PARAMETERS : None
13488 *
13489 * RETURN : WNR prcocess plate value
13490 *==========================================================================*/
13491cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
13492{
13493 char prop[PROPERTY_VALUE_MAX];
13494 memset(prop, 0, sizeof(prop));
13495 property_get("persist.denoise.process.plates", prop, "0");
13496 int processPlate = atoi(prop);
13497 switch(processPlate) {
13498 case 0:
13499 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13500 case 1:
13501 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13502 case 2:
13503 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13504 case 3:
13505 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13506 default:
13507 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13508 }
13509}
13510
13511
13512/*===========================================================================
13513 * FUNCTION : getTemporalDenoiseProcessPlate
13514 *
13515 * DESCRIPTION: query temporal denoise process plate
13516 *
13517 * PARAMETERS : None
13518 *
13519 * RETURN : TNR prcocess plate value
13520 *==========================================================================*/
13521cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
13522{
13523 char prop[PROPERTY_VALUE_MAX];
13524 memset(prop, 0, sizeof(prop));
13525 property_get("persist.tnr.process.plates", prop, "0");
13526 int processPlate = atoi(prop);
13527 switch(processPlate) {
13528 case 0:
13529 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13530 case 1:
13531 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13532 case 2:
13533 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13534 case 3:
13535 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13536 default:
13537 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13538 }
13539}
13540
13541
13542/*===========================================================================
13543 * FUNCTION : extractSceneMode
13544 *
13545 * DESCRIPTION: Extract scene mode from frameworks set metadata
13546 *
13547 * PARAMETERS :
13548 * @frame_settings: CameraMetadata reference
13549 * @metaMode: ANDROID_CONTORL_MODE
13550 * @hal_metadata: hal metadata structure
13551 *
13552 * RETURN : None
13553 *==========================================================================*/
13554int32_t QCamera3HardwareInterface::extractSceneMode(
13555 const CameraMetadata &frame_settings, uint8_t metaMode,
13556 metadata_buffer_t *hal_metadata)
13557{
13558 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013559 uint8_t sceneMode = CAM_SCENE_MODE_OFF;
13560
13561 if (ANDROID_CONTROL_MODE_OFF_KEEP_STATE == metaMode) {
13562 LOGD("Ignoring control mode OFF_KEEP_STATE");
13563 return NO_ERROR;
13564 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013565
13566 if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
13567 camera_metadata_ro_entry entry =
13568 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
13569 if (0 == entry.count)
13570 return rc;
13571
13572 uint8_t fwk_sceneMode = entry.data.u8[0];
13573
13574 int val = lookupHalName(SCENE_MODES_MAP,
13575 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
13576 fwk_sceneMode);
13577 if (NAME_NOT_FOUND != val) {
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013578 sceneMode = (uint8_t)val;
Thierry Strudel3d639192016-09-09 11:52:26 -070013579 LOGD("sceneMode: %d", sceneMode);
Thierry Strudel3d639192016-09-09 11:52:26 -070013580 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013581 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013582
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013583 if ((sceneMode == CAM_SCENE_MODE_HDR) || m_bSensorHDREnabled) {
13584 rc = setSensorHDR(hal_metadata, (sceneMode == CAM_SCENE_MODE_HDR));
13585 }
13586
13587 if ((rc == NO_ERROR) && !m_bSensorHDREnabled) {
13588 if (sceneMode == ANDROID_CONTROL_SCENE_MODE_HDR) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013589 cam_hdr_param_t hdr_params;
13590 hdr_params.hdr_enable = 1;
13591 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13592 hdr_params.hdr_need_1x = false;
13593 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13594 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13595 rc = BAD_VALUE;
13596 }
13597 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013598
Thierry Strudel3d639192016-09-09 11:52:26 -070013599 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13600 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
13601 rc = BAD_VALUE;
13602 }
13603 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013604
13605 if (mForceHdrSnapshot) {
13606 cam_hdr_param_t hdr_params;
13607 hdr_params.hdr_enable = 1;
13608 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13609 hdr_params.hdr_need_1x = false;
13610 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13611 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13612 rc = BAD_VALUE;
13613 }
13614 }
13615
Thierry Strudel3d639192016-09-09 11:52:26 -070013616 return rc;
13617}
13618
13619/*===========================================================================
Thierry Strudel04e026f2016-10-10 11:27:36 -070013620 * FUNCTION : setVideoHdrMode
13621 *
13622 * DESCRIPTION: Set Video HDR mode from frameworks set metadata
13623 *
13624 * PARAMETERS :
13625 * @hal_metadata: hal metadata structure
13626 * @metaMode: QCAMERA3_VIDEO_HDR_MODE
13627 *
13628 * RETURN : None
13629 *==========================================================================*/
13630int32_t QCamera3HardwareInterface::setVideoHdrMode(
13631 metadata_buffer_t *hal_metadata, cam_video_hdr_mode_t vhdr)
13632{
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013633 if ( (vhdr >= CAM_VIDEO_HDR_MODE_OFF) && (vhdr < CAM_VIDEO_HDR_MODE_MAX)) {
13634 return setSensorHDR(hal_metadata, (vhdr == CAM_VIDEO_HDR_MODE_ON), true);
13635 }
13636
13637 LOGE("Invalid Video HDR mode %d!", vhdr);
13638 return BAD_VALUE;
13639}
13640
13641/*===========================================================================
13642 * FUNCTION : setSensorHDR
13643 *
13644 * DESCRIPTION: Enable/disable sensor HDR.
13645 *
13646 * PARAMETERS :
13647 * @hal_metadata: hal metadata structure
13648 * @enable: boolean whether to enable/disable sensor HDR
13649 *
13650 * RETURN : None
13651 *==========================================================================*/
13652int32_t QCamera3HardwareInterface::setSensorHDR(
13653 metadata_buffer_t *hal_metadata, bool enable, bool isVideoHdrEnable)
13654{
Thierry Strudel04e026f2016-10-10 11:27:36 -070013655 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013656 cam_sensor_hdr_type_t sensor_hdr = CAM_SENSOR_HDR_OFF;
13657
13658 if (enable) {
13659 char sensor_hdr_prop[PROPERTY_VALUE_MAX];
13660 memset(sensor_hdr_prop, 0, sizeof(sensor_hdr_prop));
13661 #ifdef _LE_CAMERA_
13662 //Default to staggered HDR for IOT
13663 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "3");
13664 #else
13665 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "0");
13666 #endif
13667 sensor_hdr = (cam_sensor_hdr_type_t) atoi(sensor_hdr_prop);
13668 }
13669
13670 bool isSupported = false;
13671 switch (sensor_hdr) {
13672 case CAM_SENSOR_HDR_IN_SENSOR:
13673 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13674 CAM_QCOM_FEATURE_SENSOR_HDR) {
13675 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013676 LOGD("Setting HDR mode In Sensor");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013677 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013678 break;
13679 case CAM_SENSOR_HDR_ZIGZAG:
13680 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13681 CAM_QCOM_FEATURE_ZIGZAG_HDR) {
13682 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013683 LOGD("Setting HDR mode Zigzag");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013684 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013685 break;
13686 case CAM_SENSOR_HDR_STAGGERED:
13687 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13688 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
13689 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013690 LOGD("Setting HDR mode Staggered");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013691 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013692 break;
13693 case CAM_SENSOR_HDR_OFF:
13694 isSupported = true;
13695 LOGD("Turning off sensor HDR");
13696 break;
13697 default:
13698 LOGE("HDR mode %d not supported", sensor_hdr);
13699 rc = BAD_VALUE;
13700 break;
13701 }
13702
13703 if(isSupported) {
13704 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13705 CAM_INTF_PARM_SENSOR_HDR, sensor_hdr)) {
13706 rc = BAD_VALUE;
13707 } else {
13708 if(!isVideoHdrEnable)
13709 m_bSensorHDREnabled = (sensor_hdr != CAM_SENSOR_HDR_OFF);
Thierry Strudel04e026f2016-10-10 11:27:36 -070013710 }
13711 }
13712 return rc;
13713}
13714
13715/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070013716 * FUNCTION : needRotationReprocess
13717 *
13718 * DESCRIPTION: if rotation needs to be done by reprocess in pp
13719 *
13720 * PARAMETERS : none
13721 *
13722 * RETURN : true: needed
13723 * false: no need
13724 *==========================================================================*/
13725bool QCamera3HardwareInterface::needRotationReprocess()
13726{
13727 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
13728 // current rotation is not zero, and pp has the capability to process rotation
13729 LOGH("need do reprocess for rotation");
13730 return true;
13731 }
13732
13733 return false;
13734}
13735
13736/*===========================================================================
13737 * FUNCTION : needReprocess
13738 *
13739 * DESCRIPTION: if reprocess in needed
13740 *
13741 * PARAMETERS : none
13742 *
13743 * RETURN : true: needed
13744 * false: no need
13745 *==========================================================================*/
13746bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
13747{
13748 if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
13749 // TODO: add for ZSL HDR later
13750 // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
13751 if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
13752 LOGH("need do reprocess for ZSL WNR or min PP reprocess");
13753 return true;
13754 } else {
13755 LOGH("already post processed frame");
13756 return false;
13757 }
13758 }
13759 return needRotationReprocess();
13760}
13761
13762/*===========================================================================
13763 * FUNCTION : needJpegExifRotation
13764 *
13765 * DESCRIPTION: if rotation from jpeg is needed
13766 *
13767 * PARAMETERS : none
13768 *
13769 * RETURN : true: needed
13770 * false: no need
13771 *==========================================================================*/
13772bool QCamera3HardwareInterface::needJpegExifRotation()
13773{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013774 /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
Thierry Strudel3d639192016-09-09 11:52:26 -070013775 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13776 LOGD("Need use Jpeg EXIF Rotation");
13777 return true;
13778 }
13779 return false;
13780}
13781
13782/*===========================================================================
13783 * FUNCTION : addOfflineReprocChannel
13784 *
13785 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
13786 * coming from input channel
13787 *
13788 * PARAMETERS :
13789 * @config : reprocess configuration
13790 * @inputChHandle : pointer to the input (source) channel
13791 *
13792 *
13793 * RETURN : Ptr to the newly created channel obj. NULL if failed.
13794 *==========================================================================*/
13795QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
13796 const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
13797{
13798 int32_t rc = NO_ERROR;
13799 QCamera3ReprocessChannel *pChannel = NULL;
13800
13801 pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013802 mChannelHandle, mCameraHandle->ops, captureResultCb, setBufferErrorStatus,
13803 config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle);
Thierry Strudel3d639192016-09-09 11:52:26 -070013804 if (NULL == pChannel) {
13805 LOGE("no mem for reprocess channel");
13806 return NULL;
13807 }
13808
13809 rc = pChannel->initialize(IS_TYPE_NONE);
13810 if (rc != NO_ERROR) {
13811 LOGE("init reprocess channel failed, ret = %d", rc);
13812 delete pChannel;
13813 return NULL;
13814 }
13815
13816 // pp feature config
13817 cam_pp_feature_config_t pp_config;
13818 memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
13819
13820 pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
13821 if (gCamCapability[mCameraId]->qcom_supported_feature_mask
13822 & CAM_QCOM_FEATURE_DSDN) {
13823 //Use CPP CDS incase h/w supports it.
13824 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
13825 pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
13826 }
13827 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13828 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
13829 }
13830
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013831 if (config.hdr_param.hdr_enable) {
13832 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13833 pp_config.hdr_param = config.hdr_param;
13834 }
13835
13836 if (mForceHdrSnapshot) {
13837 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13838 pp_config.hdr_param.hdr_enable = 1;
13839 pp_config.hdr_param.hdr_need_1x = 0;
13840 pp_config.hdr_param.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13841 }
13842
Thierry Strudel3d639192016-09-09 11:52:26 -070013843 rc = pChannel->addReprocStreamsFromSource(pp_config,
13844 config,
13845 IS_TYPE_NONE,
13846 mMetadataChannel);
13847
13848 if (rc != NO_ERROR) {
13849 delete pChannel;
13850 return NULL;
13851 }
13852 return pChannel;
13853}
13854
13855/*===========================================================================
13856 * FUNCTION : getMobicatMask
13857 *
13858 * DESCRIPTION: returns mobicat mask
13859 *
13860 * PARAMETERS : none
13861 *
13862 * RETURN : mobicat mask
13863 *
13864 *==========================================================================*/
13865uint8_t QCamera3HardwareInterface::getMobicatMask()
13866{
13867 return m_MobicatMask;
13868}
13869
13870/*===========================================================================
13871 * FUNCTION : setMobicat
13872 *
13873 * DESCRIPTION: set Mobicat on/off.
13874 *
13875 * PARAMETERS :
13876 * @params : none
13877 *
13878 * RETURN : int32_t type of status
13879 * NO_ERROR -- success
13880 * none-zero failure code
13881 *==========================================================================*/
13882int32_t QCamera3HardwareInterface::setMobicat()
13883{
Thierry Strudel3d639192016-09-09 11:52:26 -070013884 int32_t ret = NO_ERROR;
Thierry Strudel3d639192016-09-09 11:52:26 -070013885
Shuzhen Wangb57ec912017-07-31 13:24:27 -070013886 if (m_MobicatMask) {
Thierry Strudel3d639192016-09-09 11:52:26 -070013887 tune_cmd_t tune_cmd;
13888 tune_cmd.type = SET_RELOAD_CHROMATIX;
13889 tune_cmd.module = MODULE_ALL;
13890 tune_cmd.value = TRUE;
13891 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13892 CAM_INTF_PARM_SET_VFE_COMMAND,
13893 tune_cmd);
13894
13895 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13896 CAM_INTF_PARM_SET_PP_COMMAND,
13897 tune_cmd);
13898 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013899
13900 return ret;
13901}
13902
13903/*===========================================================================
13904* FUNCTION : getLogLevel
13905*
13906* DESCRIPTION: Reads the log level property into a variable
13907*
13908* PARAMETERS :
13909* None
13910*
13911* RETURN :
13912* None
13913*==========================================================================*/
13914void QCamera3HardwareInterface::getLogLevel()
13915{
13916 char prop[PROPERTY_VALUE_MAX];
13917 uint32_t globalLogLevel = 0;
13918
13919 property_get("persist.camera.hal.debug", prop, "0");
13920 int val = atoi(prop);
13921 if (0 <= val) {
13922 gCamHal3LogLevel = (uint32_t)val;
13923 }
13924
Thierry Strudel9ec39c62016-12-28 11:30:05 -080013925 property_get("persist.camera.kpi.debug", prop, "0");
Thierry Strudel3d639192016-09-09 11:52:26 -070013926 gKpiDebugLevel = atoi(prop);
13927
13928 property_get("persist.camera.global.debug", prop, "0");
13929 val = atoi(prop);
13930 if (0 <= val) {
13931 globalLogLevel = (uint32_t)val;
13932 }
13933
13934 /* Highest log level among hal.logs and global.logs is selected */
13935 if (gCamHal3LogLevel < globalLogLevel)
13936 gCamHal3LogLevel = globalLogLevel;
13937
13938 return;
13939}
13940
13941/*===========================================================================
13942 * FUNCTION : validateStreamRotations
13943 *
13944 * DESCRIPTION: Check if the rotations requested are supported
13945 *
13946 * PARAMETERS :
13947 * @stream_list : streams to be configured
13948 *
13949 * RETURN : NO_ERROR on success
13950 * -EINVAL on failure
13951 *
13952 *==========================================================================*/
13953int QCamera3HardwareInterface::validateStreamRotations(
13954 camera3_stream_configuration_t *streamList)
13955{
13956 int rc = NO_ERROR;
13957
13958 /*
13959 * Loop through all streams requested in configuration
13960 * Check if unsupported rotations have been requested on any of them
13961 */
13962 for (size_t j = 0; j < streamList->num_streams; j++){
13963 camera3_stream_t *newStream = streamList->streams[j];
13964
Emilian Peev35ceeed2017-06-29 11:58:56 -070013965 switch(newStream->rotation) {
13966 case CAMERA3_STREAM_ROTATION_0:
13967 case CAMERA3_STREAM_ROTATION_90:
13968 case CAMERA3_STREAM_ROTATION_180:
13969 case CAMERA3_STREAM_ROTATION_270:
13970 //Expected values
13971 break;
13972 default:
13973 ALOGE("%s: Error: Unsupported rotation of %d requested for stream"
13974 "type:%d and stream format:%d", __func__,
13975 newStream->rotation, newStream->stream_type,
13976 newStream->format);
13977 return -EINVAL;
13978 }
13979
Thierry Strudel3d639192016-09-09 11:52:26 -070013980 bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
13981 bool isImplDef = (newStream->format ==
13982 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
13983 bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
13984 isImplDef);
13985
13986 if (isRotated && (!isImplDef || isZsl)) {
13987 LOGE("Error: Unsupported rotation of %d requested for stream"
13988 "type:%d and stream format:%d",
13989 newStream->rotation, newStream->stream_type,
13990 newStream->format);
13991 rc = -EINVAL;
13992 break;
13993 }
13994 }
13995
13996 return rc;
13997}
13998
13999/*===========================================================================
14000* FUNCTION : getFlashInfo
14001*
14002* DESCRIPTION: Retrieve information about whether the device has a flash.
14003*
14004* PARAMETERS :
14005* @cameraId : Camera id to query
14006* @hasFlash : Boolean indicating whether there is a flash device
14007* associated with given camera
14008* @flashNode : If a flash device exists, this will be its device node.
14009*
14010* RETURN :
14011* None
14012*==========================================================================*/
14013void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
14014 bool& hasFlash,
14015 char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
14016{
14017 cam_capability_t* camCapability = gCamCapability[cameraId];
14018 if (NULL == camCapability) {
14019 hasFlash = false;
14020 flashNode[0] = '\0';
14021 } else {
14022 hasFlash = camCapability->flash_available;
14023 strlcpy(flashNode,
14024 (char*)camCapability->flash_dev_name,
14025 QCAMERA_MAX_FILEPATH_LENGTH);
14026 }
14027}
14028
14029/*===========================================================================
14030* FUNCTION : getEepromVersionInfo
14031*
14032* DESCRIPTION: Retrieve version info of the sensor EEPROM data
14033*
14034* PARAMETERS : None
14035*
14036* RETURN : string describing EEPROM version
14037* "\0" if no such info available
14038*==========================================================================*/
14039const char *QCamera3HardwareInterface::getEepromVersionInfo()
14040{
14041 return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
14042}
14043
14044/*===========================================================================
14045* FUNCTION : getLdafCalib
14046*
14047* DESCRIPTION: Retrieve Laser AF calibration data
14048*
14049* PARAMETERS : None
14050*
14051* RETURN : Two uint32_t describing laser AF calibration data
14052* NULL if none is available.
14053*==========================================================================*/
14054const uint32_t *QCamera3HardwareInterface::getLdafCalib()
14055{
14056 if (mLdafCalibExist) {
14057 return &mLdafCalib[0];
14058 } else {
14059 return NULL;
14060 }
14061}
14062
14063/*===========================================================================
Arnd Geis082a4d72017-08-24 10:33:07 -070014064* FUNCTION : getEaselFwVersion
14065*
14066* DESCRIPTION: Retrieve Easel firmware version
14067*
14068* PARAMETERS : None
14069*
14070* RETURN : string describing Firmware version
14071* "\0" if Easel manager client is not open
14072*==========================================================================*/
14073const char *QCamera3HardwareInterface::getEaselFwVersion()
14074{
14075 int rc = NO_ERROR;
14076
14077 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
14078 ALOGD("%s: Querying Easel firmware version", __FUNCTION__);
14079 if (EaselManagerClientOpened) {
14080 rc = gEaselManagerClient->getFwVersion(mEaselFwVersion);
14081 if (rc != OK)
14082 ALOGD("%s: Failed to query Easel firmware version", __FUNCTION__);
14083 else
14084 return (const char *)&mEaselFwVersion[0];
14085 }
14086 return NULL;
14087}
14088
14089/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014090 * FUNCTION : dynamicUpdateMetaStreamInfo
14091 *
14092 * DESCRIPTION: This function:
14093 * (1) stops all the channels
14094 * (2) returns error on pending requests and buffers
14095 * (3) sends metastream_info in setparams
14096 * (4) starts all channels
14097 * This is useful when sensor has to be restarted to apply any
14098 * settings such as frame rate from a different sensor mode
14099 *
14100 * PARAMETERS : None
14101 *
14102 * RETURN : NO_ERROR on success
14103 * Error codes on failure
14104 *
14105 *==========================================================================*/
14106int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
14107{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014108 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_DYN_UPDATE_META_STRM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070014109 int rc = NO_ERROR;
14110
14111 LOGD("E");
14112
14113 rc = stopAllChannels();
14114 if (rc < 0) {
14115 LOGE("stopAllChannels failed");
14116 return rc;
14117 }
14118
14119 rc = notifyErrorForPendingRequests();
14120 if (rc < 0) {
14121 LOGE("notifyErrorForPendingRequests failed");
14122 return rc;
14123 }
14124
14125 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
14126 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
14127 "Format:%d",
14128 mStreamConfigInfo.type[i],
14129 mStreamConfigInfo.stream_sizes[i].width,
14130 mStreamConfigInfo.stream_sizes[i].height,
14131 mStreamConfigInfo.postprocess_mask[i],
14132 mStreamConfigInfo.format[i]);
14133 }
14134
14135 /* Send meta stream info once again so that ISP can start */
14136 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
14137 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
14138 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
14139 mParameters);
14140 if (rc < 0) {
14141 LOGE("set Metastreaminfo failed. Sensor mode does not change");
14142 }
14143
14144 rc = startAllChannels();
14145 if (rc < 0) {
14146 LOGE("startAllChannels failed");
14147 return rc;
14148 }
14149
14150 LOGD("X");
14151 return rc;
14152}
14153
14154/*===========================================================================
14155 * FUNCTION : stopAllChannels
14156 *
14157 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
14158 *
14159 * PARAMETERS : None
14160 *
14161 * RETURN : NO_ERROR on success
14162 * Error codes on failure
14163 *
14164 *==========================================================================*/
14165int32_t QCamera3HardwareInterface::stopAllChannels()
14166{
14167 int32_t rc = NO_ERROR;
14168
14169 LOGD("Stopping all channels");
14170 // Stop the Streams/Channels
14171 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14172 it != mStreamInfo.end(); it++) {
14173 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14174 if (channel) {
14175 channel->stop();
14176 }
14177 (*it)->status = INVALID;
14178 }
14179
14180 if (mSupportChannel) {
14181 mSupportChannel->stop();
14182 }
14183 if (mAnalysisChannel) {
14184 mAnalysisChannel->stop();
14185 }
14186 if (mRawDumpChannel) {
14187 mRawDumpChannel->stop();
14188 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014189 if (mHdrPlusRawSrcChannel) {
14190 mHdrPlusRawSrcChannel->stop();
14191 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014192 if (mMetadataChannel) {
14193 /* If content of mStreamInfo is not 0, there is metadata stream */
14194 mMetadataChannel->stop();
14195 }
14196
14197 LOGD("All channels stopped");
14198 return rc;
14199}
14200
14201/*===========================================================================
14202 * FUNCTION : startAllChannels
14203 *
14204 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
14205 *
14206 * PARAMETERS : None
14207 *
14208 * RETURN : NO_ERROR on success
14209 * Error codes on failure
14210 *
14211 *==========================================================================*/
14212int32_t QCamera3HardwareInterface::startAllChannels()
14213{
14214 int32_t rc = NO_ERROR;
14215
14216 LOGD("Start all channels ");
14217 // Start the Streams/Channels
14218 if (mMetadataChannel) {
14219 /* If content of mStreamInfo is not 0, there is metadata stream */
14220 rc = mMetadataChannel->start();
14221 if (rc < 0) {
14222 LOGE("META channel start failed");
14223 return rc;
14224 }
14225 }
14226 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14227 it != mStreamInfo.end(); it++) {
14228 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14229 if (channel) {
14230 rc = channel->start();
14231 if (rc < 0) {
14232 LOGE("channel start failed");
14233 return rc;
14234 }
14235 }
14236 }
14237 if (mAnalysisChannel) {
14238 mAnalysisChannel->start();
14239 }
14240 if (mSupportChannel) {
14241 rc = mSupportChannel->start();
14242 if (rc < 0) {
14243 LOGE("Support channel start failed");
14244 return rc;
14245 }
14246 }
14247 if (mRawDumpChannel) {
14248 rc = mRawDumpChannel->start();
14249 if (rc < 0) {
14250 LOGE("RAW dump channel start failed");
14251 return rc;
14252 }
14253 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014254 if (mHdrPlusRawSrcChannel) {
14255 rc = mHdrPlusRawSrcChannel->start();
14256 if (rc < 0) {
14257 LOGE("HDR+ RAW channel start failed");
14258 return rc;
14259 }
14260 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014261
14262 LOGD("All channels started");
14263 return rc;
14264}
14265
14266/*===========================================================================
14267 * FUNCTION : notifyErrorForPendingRequests
14268 *
14269 * DESCRIPTION: This function sends error for all the pending requests/buffers
14270 *
14271 * PARAMETERS : None
14272 *
14273 * RETURN : Error codes
14274 * NO_ERROR on success
14275 *
14276 *==========================================================================*/
14277int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
14278{
Emilian Peev7650c122017-01-19 08:24:33 -080014279 notifyErrorFoPendingDepthData(mDepthChannel);
14280
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014281 auto pendingRequest = mPendingRequestsList.begin();
14282 auto pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.begin();
Thierry Strudel3d639192016-09-09 11:52:26 -070014283
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014284 // Iterate through pending requests (for which result metadata isn't sent yet) and pending
14285 // buffers (for which buffers aren't sent yet).
14286 while (pendingRequest != mPendingRequestsList.end() ||
14287 pendingBuffer != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
14288 if (pendingRequest == mPendingRequestsList.end() ||
14289 pendingBuffer->frame_number < pendingRequest->frame_number) {
14290 // If metadata for this frame was sent, notify about a buffer error and returns buffers
14291 // with error.
14292 for (auto &info : pendingBuffer->mPendingBufferList) {
14293 // Send a buffer error for this frame number.
Thierry Strudel3d639192016-09-09 11:52:26 -070014294 camera3_notify_msg_t notify_msg;
14295 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14296 notify_msg.type = CAMERA3_MSG_ERROR;
14297 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014298 notify_msg.message.error.error_stream = info.stream;
14299 notify_msg.message.error.frame_number = pendingBuffer->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014300 orchestrateNotify(&notify_msg);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014301
14302 camera3_stream_buffer_t buffer = {};
14303 buffer.acquire_fence = -1;
14304 buffer.release_fence = -1;
14305 buffer.buffer = info.buffer;
14306 buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
14307 buffer.stream = info.stream;
14308 mOutputBufferDispatcher.markBufferReady(pendingBuffer->frame_number, buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -070014309 }
14310
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014311 pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffer);
14312 } else if (pendingBuffer == mPendingBuffersMap.mPendingBuffersInRequest.end() ||
14313 pendingBuffer->frame_number > pendingRequest->frame_number) {
14314 // If the buffers for this frame were sent already, notify about a result error.
Thierry Strudel3d639192016-09-09 11:52:26 -070014315 camera3_notify_msg_t notify_msg;
14316 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14317 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014318 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_RESULT;
14319 notify_msg.message.error.error_stream = nullptr;
14320 notify_msg.message.error.frame_number = pendingRequest->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014321 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070014322
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014323 if (pendingRequest->input_buffer != nullptr) {
14324 camera3_capture_result result = {};
14325 result.frame_number = pendingRequest->frame_number;
14326 result.result = nullptr;
14327 result.input_buffer = pendingRequest->input_buffer;
14328 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070014329 }
14330
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014331 mShutterDispatcher.clear(pendingRequest->frame_number);
14332 pendingRequest = mPendingRequestsList.erase(pendingRequest);
14333 } else {
14334 // If both buffers and result metadata weren't sent yet, notify about a request error
14335 // and return buffers with error.
14336 for (auto &info : pendingBuffer->mPendingBufferList) {
14337 camera3_notify_msg_t notify_msg;
14338 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14339 notify_msg.type = CAMERA3_MSG_ERROR;
14340 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
14341 notify_msg.message.error.error_stream = info.stream;
14342 notify_msg.message.error.frame_number = pendingBuffer->frame_number;
14343 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070014344
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014345 camera3_stream_buffer_t buffer = {};
14346 buffer.acquire_fence = -1;
14347 buffer.release_fence = -1;
14348 buffer.buffer = info.buffer;
14349 buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
14350 buffer.stream = info.stream;
14351 mOutputBufferDispatcher.markBufferReady(pendingBuffer->frame_number, buffer);
14352 }
14353
14354 if (pendingRequest->input_buffer != nullptr) {
14355 camera3_capture_result result = {};
14356 result.frame_number = pendingRequest->frame_number;
14357 result.result = nullptr;
14358 result.input_buffer = pendingRequest->input_buffer;
14359 orchestrateResult(&result);
14360 }
14361
14362 mShutterDispatcher.clear(pendingRequest->frame_number);
14363 pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffer);
14364 pendingRequest = mPendingRequestsList.erase(pendingRequest);
Thierry Strudel3d639192016-09-09 11:52:26 -070014365 }
14366 }
14367
14368 /* Reset pending frame Drop list and requests list */
14369 mPendingFrameDropList.clear();
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014370 mShutterDispatcher.clear();
14371 mOutputBufferDispatcher.clear(/*clearConfiguredStreams*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -070014372 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Emilian Peev30522a12017-08-03 14:36:33 +010014373 mExpectedFrameDuration = 0;
14374 mExpectedInflightDuration = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -070014375 LOGH("Cleared all the pending buffers ");
14376
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014377 return NO_ERROR;
Thierry Strudel3d639192016-09-09 11:52:26 -070014378}
14379
14380bool QCamera3HardwareInterface::isOnEncoder(
14381 const cam_dimension_t max_viewfinder_size,
14382 uint32_t width, uint32_t height)
14383{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014384 return ((width > (uint32_t)max_viewfinder_size.width) ||
14385 (height > (uint32_t)max_viewfinder_size.height) ||
14386 (width > (uint32_t)VIDEO_4K_WIDTH) ||
14387 (height > (uint32_t)VIDEO_4K_HEIGHT));
Thierry Strudel3d639192016-09-09 11:52:26 -070014388}
14389
14390/*===========================================================================
14391 * FUNCTION : setBundleInfo
14392 *
14393 * DESCRIPTION: Set bundle info for all streams that are bundle.
14394 *
14395 * PARAMETERS : None
14396 *
14397 * RETURN : NO_ERROR on success
14398 * Error codes on failure
14399 *==========================================================================*/
14400int32_t QCamera3HardwareInterface::setBundleInfo()
14401{
14402 int32_t rc = NO_ERROR;
14403
14404 if (mChannelHandle) {
14405 cam_bundle_config_t bundleInfo;
14406 memset(&bundleInfo, 0, sizeof(bundleInfo));
14407 rc = mCameraHandle->ops->get_bundle_info(
14408 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
14409 if (rc != NO_ERROR) {
14410 LOGE("get_bundle_info failed");
14411 return rc;
14412 }
14413 if (mAnalysisChannel) {
14414 mAnalysisChannel->setBundleInfo(bundleInfo);
14415 }
14416 if (mSupportChannel) {
14417 mSupportChannel->setBundleInfo(bundleInfo);
14418 }
14419 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14420 it != mStreamInfo.end(); it++) {
14421 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14422 channel->setBundleInfo(bundleInfo);
14423 }
14424 if (mRawDumpChannel) {
14425 mRawDumpChannel->setBundleInfo(bundleInfo);
14426 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014427 if (mHdrPlusRawSrcChannel) {
14428 mHdrPlusRawSrcChannel->setBundleInfo(bundleInfo);
14429 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014430 }
14431
14432 return rc;
14433}
14434
14435/*===========================================================================
Thierry Strudel295a0ca2016-11-03 18:38:47 -070014436 * FUNCTION : setInstantAEC
14437 *
14438 * DESCRIPTION: Set Instant AEC related params.
14439 *
14440 * PARAMETERS :
14441 * @meta: CameraMetadata reference
14442 *
14443 * RETURN : NO_ERROR on success
14444 * Error codes on failure
14445 *==========================================================================*/
14446int32_t QCamera3HardwareInterface::setInstantAEC(const CameraMetadata &meta)
14447{
14448 int32_t rc = NO_ERROR;
14449 uint8_t val = 0;
14450 char prop[PROPERTY_VALUE_MAX];
14451
14452 // First try to configure instant AEC from framework metadata
14453 if (meta.exists(QCAMERA3_INSTANT_AEC_MODE)) {
14454 val = (uint8_t)meta.find(QCAMERA3_INSTANT_AEC_MODE).data.i32[0];
14455 }
14456
14457 // If framework did not set this value, try to read from set prop.
14458 if (val == 0) {
14459 memset(prop, 0, sizeof(prop));
14460 property_get("persist.camera.instant.aec", prop, "0");
14461 val = (uint8_t)atoi(prop);
14462 }
14463
14464 if ((val >= (uint8_t)CAM_AEC_NORMAL_CONVERGENCE) &&
14465 ( val < (uint8_t)CAM_AEC_CONVERGENCE_MAX)) {
14466 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_INSTANT_AEC, val);
14467 mInstantAEC = val;
14468 mInstantAECSettledFrameNumber = 0;
14469 mInstantAecFrameIdxCount = 0;
14470 LOGH("instantAEC value set %d",val);
14471 if (mInstantAEC) {
14472 memset(prop, 0, sizeof(prop));
14473 property_get("persist.camera.ae.instant.bound", prop, "10");
14474 int32_t aec_frame_skip_cnt = atoi(prop);
14475 if (aec_frame_skip_cnt >= 0) {
14476 mAecSkipDisplayFrameBound = (uint8_t)aec_frame_skip_cnt;
14477 } else {
14478 LOGE("Invalid prop for aec frame bound %d", aec_frame_skip_cnt);
14479 rc = BAD_VALUE;
14480 }
14481 }
14482 } else {
14483 LOGE("Bad instant aec value set %d", val);
14484 rc = BAD_VALUE;
14485 }
14486 return rc;
14487}
14488
14489/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014490 * FUNCTION : get_num_overall_buffers
14491 *
14492 * DESCRIPTION: Estimate number of pending buffers across all requests.
14493 *
14494 * PARAMETERS : None
14495 *
14496 * RETURN : Number of overall pending buffers
14497 *
14498 *==========================================================================*/
14499uint32_t PendingBuffersMap::get_num_overall_buffers()
14500{
14501 uint32_t sum_buffers = 0;
14502 for (auto &req : mPendingBuffersInRequest) {
14503 sum_buffers += req.mPendingBufferList.size();
14504 }
14505 return sum_buffers;
14506}
14507
14508/*===========================================================================
14509 * FUNCTION : removeBuf
14510 *
14511 * DESCRIPTION: Remove a matching buffer from tracker.
14512 *
14513 * PARAMETERS : @buffer: image buffer for the callback
14514 *
14515 * RETURN : None
14516 *
14517 *==========================================================================*/
14518void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
14519{
14520 bool buffer_found = false;
14521 for (auto req = mPendingBuffersInRequest.begin();
14522 req != mPendingBuffersInRequest.end(); req++) {
14523 for (auto k = req->mPendingBufferList.begin();
14524 k != req->mPendingBufferList.end(); k++ ) {
14525 if (k->buffer == buffer) {
14526 LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
14527 req->frame_number, buffer);
14528 k = req->mPendingBufferList.erase(k);
14529 if (req->mPendingBufferList.empty()) {
14530 // Remove this request from Map
14531 req = mPendingBuffersInRequest.erase(req);
14532 }
14533 buffer_found = true;
14534 break;
14535 }
14536 }
14537 if (buffer_found) {
14538 break;
14539 }
14540 }
14541 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
14542 get_num_overall_buffers());
14543}
14544
14545/*===========================================================================
Thierry Strudelc2ee3302016-11-17 12:33:12 -080014546 * FUNCTION : getBufErrStatus
14547 *
14548 * DESCRIPTION: get buffer error status
14549 *
14550 * PARAMETERS : @buffer: buffer handle
14551 *
14552 * RETURN : Error status
14553 *
14554 *==========================================================================*/
14555int32_t PendingBuffersMap::getBufErrStatus(buffer_handle_t *buffer)
14556{
14557 for (auto& req : mPendingBuffersInRequest) {
14558 for (auto& k : req.mPendingBufferList) {
14559 if (k.buffer == buffer)
14560 return k.bufStatus;
14561 }
14562 }
14563 return CAMERA3_BUFFER_STATUS_OK;
14564}
14565
14566/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014567 * FUNCTION : setPAAFSupport
14568 *
14569 * DESCRIPTION: Set the preview-assisted auto focus support bit in
14570 * feature mask according to stream type and filter
14571 * arrangement
14572 *
14573 * PARAMETERS : @feature_mask: current feature mask, which may be modified
14574 * @stream_type: stream type
14575 * @filter_arrangement: filter arrangement
14576 *
14577 * RETURN : None
14578 *==========================================================================*/
14579void QCamera3HardwareInterface::setPAAFSupport(
14580 cam_feature_mask_t& feature_mask,
14581 cam_stream_type_t stream_type,
14582 cam_color_filter_arrangement_t filter_arrangement)
14583{
Thierry Strudel3d639192016-09-09 11:52:26 -070014584 switch (filter_arrangement) {
14585 case CAM_FILTER_ARRANGEMENT_RGGB:
14586 case CAM_FILTER_ARRANGEMENT_GRBG:
14587 case CAM_FILTER_ARRANGEMENT_GBRG:
14588 case CAM_FILTER_ARRANGEMENT_BGGR:
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014589 if ((stream_type == CAM_STREAM_TYPE_PREVIEW) ||
14590 (stream_type == CAM_STREAM_TYPE_ANALYSIS) ||
Thierry Strudel3d639192016-09-09 11:52:26 -070014591 (stream_type == CAM_STREAM_TYPE_VIDEO)) {
Thierry Strudel2896d122017-02-23 19:18:03 -080014592 if (!(feature_mask & CAM_QTI_FEATURE_PPEISCORE))
14593 feature_mask |= CAM_QCOM_FEATURE_PAAF;
Thierry Strudel3d639192016-09-09 11:52:26 -070014594 }
14595 break;
14596 case CAM_FILTER_ARRANGEMENT_Y:
14597 if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
14598 feature_mask |= CAM_QCOM_FEATURE_PAAF;
14599 }
14600 break;
14601 default:
14602 break;
14603 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -070014604 LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
14605 feature_mask, stream_type, filter_arrangement);
14606
14607
Thierry Strudel3d639192016-09-09 11:52:26 -070014608}
14609
14610/*===========================================================================
14611* FUNCTION : getSensorMountAngle
14612*
14613* DESCRIPTION: Retrieve sensor mount angle
14614*
14615* PARAMETERS : None
14616*
14617* RETURN : sensor mount angle in uint32_t
14618*==========================================================================*/
14619uint32_t QCamera3HardwareInterface::getSensorMountAngle()
14620{
14621 return gCamCapability[mCameraId]->sensor_mount_angle;
14622}
14623
14624/*===========================================================================
14625* FUNCTION : getRelatedCalibrationData
14626*
14627* DESCRIPTION: Retrieve related system calibration data
14628*
14629* PARAMETERS : None
14630*
14631* RETURN : Pointer of related system calibration data
14632*==========================================================================*/
14633const cam_related_system_calibration_data_t *QCamera3HardwareInterface::getRelatedCalibrationData()
14634{
14635 return (const cam_related_system_calibration_data_t *)
14636 &(gCamCapability[mCameraId]->related_cam_calibration);
14637}
Shuzhen Wangf6890e02016-08-12 14:28:54 -070014638
14639/*===========================================================================
14640 * FUNCTION : is60HzZone
14641 *
14642 * DESCRIPTION: Whether the phone is in zone with 60hz electricity frequency
14643 *
14644 * PARAMETERS : None
14645 *
14646 * RETURN : True if in 60Hz zone, False otherwise
14647 *==========================================================================*/
14648bool QCamera3HardwareInterface::is60HzZone()
14649{
14650 time_t t = time(NULL);
14651 struct tm lt;
14652
14653 struct tm* r = localtime_r(&t, &lt);
14654
14655 if (r == NULL || lt.tm_gmtoff <= -2*60*60 || lt.tm_gmtoff >= 8*60*60)
14656 return true;
14657 else
14658 return false;
14659}
Shuzhen Wanga5da1022016-07-13 20:18:42 -070014660
14661/*===========================================================================
14662 * FUNCTION : adjustBlackLevelForCFA
14663 *
14664 * DESCRIPTION: Adjust the black level pattern in the order of RGGB to the order
14665 * of bayer CFA (Color Filter Array).
14666 *
14667 * PARAMETERS : @input: black level pattern in the order of RGGB
14668 * @output: black level pattern in the order of CFA
14669 * @color_arrangement: CFA color arrangement
14670 *
14671 * RETURN : None
14672 *==========================================================================*/
14673template<typename T>
14674void QCamera3HardwareInterface::adjustBlackLevelForCFA(
14675 T input[BLACK_LEVEL_PATTERN_CNT],
14676 T output[BLACK_LEVEL_PATTERN_CNT],
14677 cam_color_filter_arrangement_t color_arrangement)
14678{
14679 switch (color_arrangement) {
14680 case CAM_FILTER_ARRANGEMENT_GRBG:
14681 output[0] = input[1];
14682 output[1] = input[0];
14683 output[2] = input[3];
14684 output[3] = input[2];
14685 break;
14686 case CAM_FILTER_ARRANGEMENT_GBRG:
14687 output[0] = input[2];
14688 output[1] = input[3];
14689 output[2] = input[0];
14690 output[3] = input[1];
14691 break;
14692 case CAM_FILTER_ARRANGEMENT_BGGR:
14693 output[0] = input[3];
14694 output[1] = input[2];
14695 output[2] = input[1];
14696 output[3] = input[0];
14697 break;
14698 case CAM_FILTER_ARRANGEMENT_RGGB:
14699 output[0] = input[0];
14700 output[1] = input[1];
14701 output[2] = input[2];
14702 output[3] = input[3];
14703 break;
14704 default:
14705 LOGE("Invalid color arrangement to derive dynamic blacklevel");
14706 break;
14707 }
14708}
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014709
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014710void QCamera3HardwareInterface::updateHdrPlusResultMetadata(
14711 CameraMetadata &resultMetadata,
14712 std::shared_ptr<metadata_buffer_t> settings)
14713{
14714 if (settings == nullptr) {
14715 ALOGE("%s: settings is nullptr.", __FUNCTION__);
14716 return;
14717 }
14718
14719 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, settings) {
14720 resultMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
14721 }
14722
14723 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, settings) {
14724 String8 str((const char *)gps_methods);
14725 resultMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
14726 }
14727
14728 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, settings) {
14729 resultMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
14730 }
14731
14732 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, settings) {
14733 resultMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
14734 }
14735
14736 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, settings) {
14737 uint8_t fwk_jpeg_quality = static_cast<uint8_t>(*jpeg_quality);
14738 resultMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
14739 }
14740
14741 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, settings) {
14742 uint8_t fwk_thumb_quality = static_cast<uint8_t>(*thumb_quality);
14743 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
14744 }
14745
14746 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, settings) {
14747 int32_t fwk_thumb_size[2];
14748 fwk_thumb_size[0] = thumb_size->width;
14749 fwk_thumb_size[1] = thumb_size->height;
14750 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
14751 }
14752
14753 IF_META_AVAILABLE(uint32_t, intent, CAM_INTF_META_CAPTURE_INTENT, settings) {
14754 uint8_t fwk_intent = intent[0];
14755 resultMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &fwk_intent, 1);
14756 }
14757}
14758
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014759bool QCamera3HardwareInterface::isRequestHdrPlusCompatible(
14760 const camera3_capture_request_t &request, const CameraMetadata &metadata) {
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014761 if (!metadata.exists(ANDROID_NOISE_REDUCTION_MODE) ||
14762 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0] !=
14763 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY) {
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014764 ALOGV("%s: ANDROID_NOISE_REDUCTION_MODE is not HQ: %d", __FUNCTION__,
Chien-Yu Chenee335912017-02-09 17:53:20 -080014765 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0]);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014766 return false;
14767 }
14768
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014769 if (!metadata.exists(ANDROID_EDGE_MODE) ||
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014770 metadata.find(ANDROID_EDGE_MODE).data.u8[0] != ANDROID_EDGE_MODE_HIGH_QUALITY) {
14771 ALOGV("%s: ANDROID_EDGE_MODE is not HQ.", __FUNCTION__);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014772 return false;
14773 }
14774
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014775 if (!metadata.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE) ||
14776 metadata.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0] !=
14777 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY) {
14778 ALOGV("%s: ANDROID_COLOR_CORRECTION_ABERRATION_MODE is not HQ.", __FUNCTION__);
14779 return false;
14780 }
14781
14782 if (!metadata.exists(ANDROID_CONTROL_AE_MODE) ||
14783 (metadata.find(ANDROID_CONTROL_AE_MODE).data.u8[0] != ANDROID_CONTROL_AE_MODE_ON &&
14784 metadata.find(ANDROID_CONTROL_AE_MODE).data.u8[0] !=
14785 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH)) {
14786 ALOGV("%s: ANDROID_CONTROL_AE_MODE is not ON or ON_AUTO_FLASH.", __FUNCTION__);
14787 return false;
14788 }
14789
14790 if (!metadata.exists(ANDROID_CONTROL_AWB_MODE) ||
14791 metadata.find(ANDROID_CONTROL_AWB_MODE).data.u8[0] != ANDROID_CONTROL_AWB_MODE_AUTO) {
14792 ALOGV("%s: ANDROID_CONTROL_AWB_MODE is not AUTO.", __FUNCTION__);
14793 return false;
14794 }
14795
14796 if (!metadata.exists(ANDROID_CONTROL_EFFECT_MODE) ||
14797 metadata.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0] !=
14798 ANDROID_CONTROL_EFFECT_MODE_OFF) {
14799 ALOGV("%s: ANDROID_CONTROL_EFFECT_MODE_OFF is not OFF.", __FUNCTION__);
14800 return false;
14801 }
14802
14803 if (!metadata.exists(ANDROID_CONTROL_MODE) ||
14804 (metadata.find(ANDROID_CONTROL_MODE).data.u8[0] != ANDROID_CONTROL_MODE_AUTO &&
14805 metadata.find(ANDROID_CONTROL_MODE).data.u8[0] !=
14806 ANDROID_CONTROL_MODE_USE_SCENE_MODE)) {
14807 ALOGV("%s: ANDROID_CONTROL_MODE is not AUTO or USE_SCENE_MODE.", __FUNCTION__);
14808 return false;
14809 }
14810
14811 // TODO (b/32585046): support non-ZSL.
14812 if (!metadata.exists(ANDROID_CONTROL_ENABLE_ZSL) ||
14813 metadata.find(ANDROID_CONTROL_ENABLE_ZSL).data.u8[0] != ANDROID_CONTROL_ENABLE_ZSL_TRUE) {
14814 ALOGV("%s: ANDROID_CONTROL_ENABLE_ZSL is not true.", __FUNCTION__);
14815 return false;
14816 }
14817
14818 // TODO (b/32586081): support flash.
14819 if (!metadata.exists(ANDROID_FLASH_MODE) ||
14820 metadata.find(ANDROID_FLASH_MODE).data.u8[0] != ANDROID_FLASH_MODE_OFF) {
14821 ALOGV("%s: ANDROID_FLASH_MODE is not OFF.", __FUNCTION__);
14822 return false;
14823 }
14824
14825 // TODO (b/36492953): support digital zoom.
14826 if (!metadata.exists(ANDROID_SCALER_CROP_REGION) ||
14827 metadata.find(ANDROID_SCALER_CROP_REGION).data.i32[0] != 0 ||
14828 metadata.find(ANDROID_SCALER_CROP_REGION).data.i32[1] != 0 ||
14829 metadata.find(ANDROID_SCALER_CROP_REGION).data.i32[2] !=
14830 gCamCapability[mCameraId]->active_array_size.width ||
14831 metadata.find(ANDROID_SCALER_CROP_REGION).data.i32[3] !=
14832 gCamCapability[mCameraId]->active_array_size.height) {
14833 ALOGV("%s: ANDROID_SCALER_CROP_REGION is not the same as active array region.",
14834 __FUNCTION__);
14835 return false;
14836 }
14837
14838 if (!metadata.exists(ANDROID_TONEMAP_MODE) ||
14839 metadata.find(ANDROID_TONEMAP_MODE).data.u8[0] != ANDROID_TONEMAP_MODE_HIGH_QUALITY) {
14840 ALOGV("%s: ANDROID_TONEMAP_MODE is not HQ.", __FUNCTION__);
14841 return false;
14842 }
14843
14844 // TODO (b/36693254, b/36690506): support other outputs.
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014845 if (request.num_output_buffers != 1 ||
14846 request.output_buffers[0].stream->format != HAL_PIXEL_FORMAT_BLOB) {
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014847 ALOGV("%s: Not an HDR+ request: Only Jpeg output is supported.", __FUNCTION__);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014848 for (uint32_t i = 0; i < request.num_output_buffers; i++) {
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014849 ALOGV("%s: output_buffers[%u]: %dx%d format %d", __FUNCTION__, i,
Chien-Yu Chenee335912017-02-09 17:53:20 -080014850 request.output_buffers[0].stream->width,
14851 request.output_buffers[0].stream->height,
14852 request.output_buffers[0].stream->format);
14853 }
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014854 return false;
14855 }
14856
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014857 return true;
14858}
14859
14860bool QCamera3HardwareInterface::trySubmittingHdrPlusRequestLocked(
14861 HdrPlusPendingRequest *hdrPlusRequest, const camera3_capture_request_t &request,
14862 const CameraMetadata &metadata)
14863{
14864 if (hdrPlusRequest == nullptr) return false;
14865 if (!isRequestHdrPlusCompatible(request, metadata)) return false;
14866
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014867 // Get a YUV buffer from pic channel.
14868 QCamera3PicChannel *picChannel = (QCamera3PicChannel*)request.output_buffers[0].stream->priv;
14869 auto yuvBuffer = std::make_shared<mm_camera_buf_def_t>();
14870 status_t res = picChannel->getYuvBufferForRequest(yuvBuffer.get(), request.frame_number);
14871 if (res != OK) {
14872 ALOGE("%s: Getting an available YUV buffer from pic channel failed: %s (%d)",
14873 __FUNCTION__, strerror(-res), res);
14874 return false;
14875 }
14876
14877 pbcamera::StreamBuffer buffer;
14878 buffer.streamId = kPbYuvOutputStreamId;
Chien-Yu Chenb0f68922017-03-08 11:37:13 -080014879 buffer.dmaBufFd = yuvBuffer->fd;
Chien-Yu Chencec36ed2017-07-21 13:54:29 -070014880 buffer.data = yuvBuffer->fd == -1 ? yuvBuffer->buffer : nullptr;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014881 buffer.dataSize = yuvBuffer->frame_len;
14882
14883 pbcamera::CaptureRequest pbRequest;
14884 pbRequest.id = request.frame_number;
14885 pbRequest.outputBuffers.push_back(buffer);
14886
14887 // Submit an HDR+ capture request to HDR+ service.
Chien-Yu Chen17cec362017-07-05 17:10:31 -070014888 res = gHdrPlusClient->submitCaptureRequest(&pbRequest, metadata);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014889 if (res != OK) {
14890 ALOGE("%s: %d: Submitting a capture request failed: %s (%d)", __FUNCTION__, __LINE__,
14891 strerror(-res), res);
14892 return false;
14893 }
14894
14895 hdrPlusRequest->yuvBuffer = yuvBuffer;
14896 hdrPlusRequest->frameworkOutputBuffers.push_back(request.output_buffers[0]);
14897
14898 return true;
14899}
14900
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070014901status_t QCamera3HardwareInterface::openHdrPlusClientAsyncLocked()
14902{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014903 if (gHdrPlusClientOpening || gHdrPlusClient != nullptr) {
14904 return OK;
14905 }
14906
Chien-Yu Chend77a5462017-06-02 18:00:38 -070014907 status_t res = gEaselManagerClient->openHdrPlusClientAsync(this);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014908 if (res != OK) {
14909 ALOGE("%s: Opening HDR+ client asynchronously failed: %s (%d)", __FUNCTION__,
14910 strerror(-res), res);
14911 return res;
14912 }
14913 gHdrPlusClientOpening = true;
14914
14915 return OK;
14916}
14917
Chien-Yu Chenee335912017-02-09 17:53:20 -080014918status_t QCamera3HardwareInterface::enableHdrPlusModeLocked()
14919{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014920 status_t res;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014921
Chien-Yu Chena6c99062017-05-23 13:45:06 -070014922 if (mHdrPlusModeEnabled) {
14923 return OK;
14924 }
14925
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014926 // Check if gHdrPlusClient is opened or being opened.
14927 if (gHdrPlusClient == nullptr) {
14928 if (gHdrPlusClientOpening) {
14929 // HDR+ client is being opened. HDR+ mode will be enabled when it's opened.
14930 return OK;
14931 }
14932
14933 res = openHdrPlusClientAsyncLocked();
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014934 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014935 ALOGE("%s: Failed to open HDR+ client asynchronously: %s (%d).", __FUNCTION__,
14936 strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014937 return res;
14938 }
14939
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070014940 // When opening HDR+ client completes, HDR+ mode will be enabled.
14941 return OK;
14942
Chien-Yu Chenee335912017-02-09 17:53:20 -080014943 }
14944
14945 // Configure stream for HDR+.
14946 res = configureHdrPlusStreamsLocked();
14947 if (res != OK) {
14948 LOGE("%s: Failed to configure HDR+ streams: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014949 return res;
14950 }
14951
14952 // Enable HDR+ mode so Easel will start capturing ZSL raw buffers.
14953 res = gHdrPlusClient->setZslHdrPlusMode(true);
14954 if (res != OK) {
14955 LOGE("%s: Failed to enable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chenee335912017-02-09 17:53:20 -080014956 return res;
14957 }
14958
14959 mHdrPlusModeEnabled = true;
14960 ALOGD("%s: HDR+ mode enabled", __FUNCTION__);
14961
14962 return OK;
14963}
14964
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070014965void QCamera3HardwareInterface::finishHdrPlusClientOpeningLocked(std::unique_lock<std::mutex> &lock)
14966{
14967 if (gHdrPlusClientOpening) {
14968 gHdrPlusClientOpenCond.wait(lock, [&] { return !gHdrPlusClientOpening; });
14969 }
14970}
14971
Chien-Yu Chenee335912017-02-09 17:53:20 -080014972void QCamera3HardwareInterface::disableHdrPlusModeLocked()
14973{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014974 // Disable HDR+ mode.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080014975 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070014976 status_t res = gHdrPlusClient->setZslHdrPlusMode(false);
14977 if (res != OK) {
14978 ALOGE("%s: Failed to disable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
14979 }
Chien-Yu Chen3b17c672017-04-24 12:49:52 -070014980
14981 // Close HDR+ client so Easel can enter low power mode.
Chien-Yu Chend77a5462017-06-02 18:00:38 -070014982 gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
Chien-Yu Chen3b17c672017-04-24 12:49:52 -070014983 gHdrPlusClient = nullptr;
Chien-Yu Chenee335912017-02-09 17:53:20 -080014984 }
14985
14986 mHdrPlusModeEnabled = false;
14987 ALOGD("%s: HDR+ mode disabled", __FUNCTION__);
14988}
14989
Chien-Yu Chendeaebad2017-06-30 11:46:34 -070014990bool QCamera3HardwareInterface::isSessionHdrPlusModeCompatible()
14991{
14992 // Check if mPictureChannel is valid.
14993 // TODO: Support YUV (b/36693254) and RAW (b/36690506)
14994 if (mPictureChannel == nullptr) {
14995 return false;
14996 }
14997
14998 return true;
14999}
15000
Chien-Yu Chenee335912017-02-09 17:53:20 -080015001status_t QCamera3HardwareInterface::configureHdrPlusStreamsLocked()
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015002{
15003 pbcamera::InputConfiguration inputConfig;
15004 std::vector<pbcamera::StreamConfiguration> outputStreamConfigs;
15005 status_t res = OK;
15006
15007 // Configure HDR+ client streams.
15008 // Get input config.
15009 if (mHdrPlusRawSrcChannel) {
15010 // HDR+ input buffers will be provided by HAL.
15011 res = fillPbStreamConfig(&inputConfig.streamConfig, kPbRaw10InputStreamId,
15012 HAL_PIXEL_FORMAT_RAW10, mHdrPlusRawSrcChannel, /*stream index*/0);
15013 if (res != OK) {
15014 LOGE("%s: Failed to get fill stream config for HDR+ raw src stream: %s (%d)",
15015 __FUNCTION__, strerror(-res), res);
15016 return res;
15017 }
15018
15019 inputConfig.isSensorInput = false;
15020 } else {
15021 // Sensor MIPI will send data to Easel.
15022 inputConfig.isSensorInput = true;
Chien-Yu Chen8bea7192017-03-01 13:48:05 -080015023 inputConfig.sensorMode.cameraId = mCameraId;
Chien-Yu Chenee335912017-02-09 17:53:20 -080015024 inputConfig.sensorMode.pixelArrayWidth = mSensorModeInfo.pixel_array_size.width;
15025 inputConfig.sensorMode.pixelArrayHeight = mSensorModeInfo.pixel_array_size.height;
15026 inputConfig.sensorMode.activeArrayWidth = mSensorModeInfo.active_array_size.width;
15027 inputConfig.sensorMode.activeArrayHeight = mSensorModeInfo.active_array_size.height;
15028 inputConfig.sensorMode.outputPixelClkHz = mSensorModeInfo.op_pixel_clk;
Yin-Chia Yeheeb10422017-05-23 11:37:46 -070015029 inputConfig.sensorMode.timestampOffsetNs = mSensorModeInfo.timestamp_offset;
Chien-Yu Chenee335912017-02-09 17:53:20 -080015030 if (mSensorModeInfo.num_raw_bits != 10) {
15031 ALOGE("%s: Only RAW10 is supported but this sensor mode has %d raw bits.", __FUNCTION__,
15032 mSensorModeInfo.num_raw_bits);
15033 return BAD_VALUE;
15034 }
15035
15036 inputConfig.sensorMode.format = HAL_PIXEL_FORMAT_RAW10;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015037 }
15038
15039 // Get output configurations.
15040 // Easel may need to output RAW16 buffers if mRawChannel was created.
Chien-Yu Chenee335912017-02-09 17:53:20 -080015041 // TODO: handle RAW16 outputs.
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015042
15043 // Easel may need to output YUV output buffers if mPictureChannel was created.
15044 pbcamera::StreamConfiguration yuvOutputConfig;
15045 if (mPictureChannel != nullptr) {
15046 res = fillPbStreamConfig(&yuvOutputConfig, kPbYuvOutputStreamId,
15047 HAL_PIXEL_FORMAT_YCrCb_420_SP, mPictureChannel, /*stream index*/0);
15048 if (res != OK) {
15049 LOGE("%s: Failed to get fill stream config for YUV stream: %s (%d)",
15050 __FUNCTION__, strerror(-res), res);
15051
15052 return res;
15053 }
15054
15055 outputStreamConfigs.push_back(yuvOutputConfig);
15056 }
15057
15058 // TODO: consider other channels for YUV output buffers.
15059
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080015060 res = gHdrPlusClient->configureStreams(inputConfig, outputStreamConfigs);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015061 if (res != OK) {
15062 LOGE("%d: Failed to configure streams with HDR+ client: %s (%d)", __FUNCTION__,
15063 strerror(-res), res);
15064 return res;
15065 }
15066
15067 return OK;
15068}
15069
Chien-Yu Chen90f1fc12017-07-14 14:31:53 -070015070void QCamera3HardwareInterface::onEaselFatalError(std::string errMsg)
15071{
15072 ALOGE("%s: Got an Easel fatal error: %s", __FUNCTION__, errMsg.c_str());
15073 // Set HAL state to error.
15074 pthread_mutex_lock(&mMutex);
15075 mState = ERROR;
15076 pthread_mutex_unlock(&mMutex);
15077
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -070015078 handleCameraDeviceError(/*stopChannelImmediately*/true);
Chien-Yu Chen90f1fc12017-07-14 14:31:53 -070015079}
15080
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015081void QCamera3HardwareInterface::onOpened(std::unique_ptr<HdrPlusClient> client)
15082{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015083 if (client == nullptr) {
15084 ALOGE("%s: Opened client is null.", __FUNCTION__);
15085 return;
15086 }
15087
Chien-Yu Chene96475e2017-04-11 11:53:26 -070015088 logEaselEvent("EASEL_STARTUP_LATENCY", "HDR+ client opened.");
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015089 ALOGI("%s: HDR+ client opened.", __FUNCTION__);
15090
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070015091 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015092 if (!gHdrPlusClientOpening) {
15093 ALOGW("%s: HDR+ is disabled while HDR+ client is being opened.", __FUNCTION__);
15094 return;
15095 }
15096
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015097 gHdrPlusClient = std::move(client);
15098 gHdrPlusClientOpening = false;
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070015099 gHdrPlusClientOpenCond.notify_one();
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015100
15101 // Set static metadata.
15102 status_t res = gHdrPlusClient->setStaticMetadata(*gStaticMetadata[mCameraId]);
15103 if (res != OK) {
15104 LOGE("%s: Failed to set static metadata in HDR+ client: %s (%d). Closing HDR+ client.",
15105 __FUNCTION__, strerror(-res), res);
Chien-Yu Chend77a5462017-06-02 18:00:38 -070015106 gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015107 gHdrPlusClient = nullptr;
15108 return;
15109 }
15110
15111 // Enable HDR+ mode.
15112 res = enableHdrPlusModeLocked();
15113 if (res != OK) {
15114 LOGE("%s: Failed to configure HDR+ streams.", __FUNCTION__);
15115 }
15116}
15117
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015118void QCamera3HardwareInterface::onOpenFailed(status_t err)
15119{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015120 ALOGE("%s: Opening HDR+ client failed: %s (%d)", __FUNCTION__, strerror(-err), err);
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070015121 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015122 gHdrPlusClientOpening = false;
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070015123 gHdrPlusClientOpenCond.notify_one();
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015124}
15125
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015126void QCamera3HardwareInterface::onFatalError()
15127{
15128 ALOGE("%s: HDR+ client has a fatal error.", __FUNCTION__);
15129
15130 // Set HAL state to error.
15131 pthread_mutex_lock(&mMutex);
15132 mState = ERROR;
15133 pthread_mutex_unlock(&mMutex);
15134
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -070015135 handleCameraDeviceError(/*stopChannelImmediately*/true);
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015136}
15137
Chien-Yu Chen29fd1d72017-04-27 18:42:09 -070015138void QCamera3HardwareInterface::onShutter(uint32_t requestId, int64_t apSensorTimestampNs)
15139{
15140 ALOGV("%s: %d: Received a shutter for HDR+ request %d timestamp %" PRId64, __FUNCTION__,
15141 __LINE__, requestId, apSensorTimestampNs);
15142
15143 mShutterDispatcher.markShutterReady(requestId, apSensorTimestampNs);
15144}
15145
Chien-Yu Chendaf68892017-08-25 12:56:40 -070015146void QCamera3HardwareInterface::onNextCaptureReady(uint32_t requestId)
15147{
15148 pthread_mutex_lock(&mMutex);
15149
15150 // Find the pending request for this result metadata.
15151 auto requestIter = mPendingRequestsList.begin();
15152 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != requestId) {
15153 requestIter++;
15154 }
15155
15156 if (requestIter == mPendingRequestsList.end()) {
15157 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, requestId);
15158 pthread_mutex_unlock(&mMutex);
15159 return;
15160 }
15161
15162 requestIter->partial_result_cnt++;
15163
15164 CameraMetadata metadata;
15165 uint8_t ready = true;
15166 metadata.update(NEXUS_EXPERIMENTAL_2017_NEXT_STILL_INTENT_REQUEST_READY, &ready, 1);
15167
15168 // Send it to framework.
15169 camera3_capture_result_t result = {};
15170
15171 result.result = metadata.getAndLock();
15172 // Populate metadata result
15173 result.frame_number = requestId;
15174 result.num_output_buffers = 0;
15175 result.output_buffers = NULL;
15176 result.partial_result = requestIter->partial_result_cnt;
15177
15178 orchestrateResult(&result);
15179 metadata.unlock(result.result);
15180
15181 pthread_mutex_unlock(&mMutex);
15182}
15183
Chien-Yu Chen0a921f92017-08-27 17:25:33 -070015184void QCamera3HardwareInterface::onPostview(uint32_t requestId,
15185 std::unique_ptr<std::vector<uint8_t>> postview, uint32_t width, uint32_t height,
15186 uint32_t stride, int32_t format)
15187{
15188 if (property_get_bool("persist.camera.hdrplus.dump_postview", false)) {
15189 ALOGI("%s: %d: Received a postview %dx%d for HDR+ request %d", __FUNCTION__,
15190 __LINE__, width, height, requestId);
15191 char buf[FILENAME_MAX] = {};
15192 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"postview_%d_%dx%d.ppm",
15193 requestId, width, height);
15194
15195 pbcamera::StreamConfiguration config = {};
15196 config.image.width = width;
15197 config.image.height = height;
15198 config.image.format = format;
15199
15200 pbcamera::PlaneConfiguration plane = {};
15201 plane.stride = stride;
15202 plane.scanline = height;
15203
15204 config.image.planes.push_back(plane);
15205
15206 pbcamera::StreamBuffer buffer = {};
15207 buffer.streamId = 0;
15208 buffer.dmaBufFd = -1;
15209 buffer.data = postview->data();
15210 buffer.dataSize = postview->size();
15211
15212 hdrplus_client_utils::writePpm(buf, config, buffer);
15213 }
15214
15215 pthread_mutex_lock(&mMutex);
15216
15217 // Find the pending request for this result metadata.
15218 auto requestIter = mPendingRequestsList.begin();
15219 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != requestId) {
15220 requestIter++;
15221 }
15222
15223 if (requestIter == mPendingRequestsList.end()) {
15224 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, requestId);
15225 pthread_mutex_unlock(&mMutex);
15226 return;
15227 }
15228
15229 requestIter->partial_result_cnt++;
15230
15231 CameraMetadata metadata;
15232 int32_t config[3] = {static_cast<int32_t>(width), static_cast<int32_t>(height),
15233 static_cast<int32_t>(stride)};
15234 metadata.update(NEXUS_EXPERIMENTAL_2017_POSTVIEW_CONFIG, config, 3);
15235 metadata.update(NEXUS_EXPERIMENTAL_2017_POSTVIEW_DATA, postview->data(), postview->size());
15236
15237 // Send it to framework.
15238 camera3_capture_result_t result = {};
15239
15240 result.result = metadata.getAndLock();
15241 // Populate metadata result
15242 result.frame_number = requestId;
15243 result.num_output_buffers = 0;
15244 result.output_buffers = NULL;
15245 result.partial_result = requestIter->partial_result_cnt;
15246
15247 orchestrateResult(&result);
15248 metadata.unlock(result.result);
15249
15250 pthread_mutex_unlock(&mMutex);
15251}
15252
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015253void QCamera3HardwareInterface::onCaptureResult(pbcamera::CaptureResult *result,
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015254 const camera_metadata_t &resultMetadata)
15255{
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015256 if (result != nullptr) {
15257 if (result->outputBuffers.size() != 1) {
15258 ALOGE("%s: Number of output buffers (%u) is not supported.", __FUNCTION__,
15259 result->outputBuffers.size());
15260 return;
15261 }
15262
15263 if (result->outputBuffers[0].streamId != kPbYuvOutputStreamId) {
15264 ALOGE("%s: Only YUV output stream is supported. (stream id %d).", __FUNCTION__,
15265 result->outputBuffers[0].streamId);
15266 return;
15267 }
15268
Chien-Yu Chendaf68892017-08-25 12:56:40 -070015269 // TODO (b/34854987): initiate this from HDR+ service.
15270 onNextCaptureReady(result->requestId);
15271
Chien-Yu Chen92724a82017-01-06 11:50:30 -080015272 // Find the pending HDR+ request.
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015273 HdrPlusPendingRequest pendingRequest;
15274 {
15275 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
15276 auto req = mHdrPlusPendingRequests.find(result->requestId);
15277 pendingRequest = req->second;
15278 }
15279
Chien-Yu Chen92724a82017-01-06 11:50:30 -080015280 // Update the result metadata with the settings of the HDR+ still capture request because
15281 // the result metadata belongs to a ZSL buffer.
15282 CameraMetadata metadata;
15283 metadata = &resultMetadata;
15284 updateHdrPlusResultMetadata(metadata, pendingRequest.settings);
15285 camera_metadata_t* updatedResultMetadata = metadata.release();
15286
15287 QCamera3PicChannel *picChannel =
15288 (QCamera3PicChannel*)pendingRequest.frameworkOutputBuffers[0].stream->priv;
15289
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015290 // Check if dumping HDR+ YUV output is enabled.
15291 char prop[PROPERTY_VALUE_MAX];
15292 property_get("persist.camera.hdrplus.dump_yuv", prop, "0");
15293 bool dumpYuvOutput = atoi(prop);
15294
15295 if (dumpYuvOutput) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015296 // Dump yuv buffer to a ppm file.
15297 pbcamera::StreamConfiguration outputConfig;
15298 status_t rc = fillPbStreamConfig(&outputConfig, kPbYuvOutputStreamId,
15299 HAL_PIXEL_FORMAT_YCrCb_420_SP, picChannel, /*stream index*/0);
15300 if (rc == OK) {
15301 char buf[FILENAME_MAX] = {};
15302 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"s_%d_%d_%dx%d.ppm",
15303 result->requestId, result->outputBuffers[0].streamId,
15304 outputConfig.image.width, outputConfig.image.height);
15305
15306 hdrplus_client_utils::writePpm(buf, outputConfig, result->outputBuffers[0]);
15307 } else {
15308 LOGW("%s: Couldn't dump YUV buffer because getting stream config failed: %s (%d).",
15309 __FUNCTION__, strerror(-rc), rc);
15310 }
15311 }
15312
Chien-Yu Chen92724a82017-01-06 11:50:30 -080015313 uint32_t halStreamId = picChannel->getStreamID(picChannel->getStreamTypeMask());
15314 auto halMetadata = std::make_shared<metadata_buffer_t>();
15315 clear_metadata_buffer(halMetadata.get());
15316
15317 // Convert updated result metadata to HAL metadata and return the yuv buffer for Jpeg
15318 // encoding.
15319 status_t res = translateFwkMetadataToHalMetadata(updatedResultMetadata, halMetadata.get(),
15320 halStreamId, /*minFrameDuration*/0);
15321 if (res == OK) {
15322 // Return the buffer to pic channel for encoding.
15323 picChannel->returnYuvBufferAndEncode(pendingRequest.yuvBuffer.get(),
15324 pendingRequest.frameworkOutputBuffers[0].buffer, result->requestId,
15325 halMetadata);
15326 } else {
15327 // Return the buffer without encoding.
15328 // TODO: This should not happen but we may want to report an error buffer to camera
15329 // service.
15330 picChannel->returnYuvBuffer(pendingRequest.yuvBuffer.get());
15331 ALOGE("%s: Translate framework metadata to HAL metadata failed: %s (%d).", __FUNCTION__,
15332 strerror(-res), res);
15333 }
15334
15335 // Send HDR+ metadata to framework.
15336 {
15337 pthread_mutex_lock(&mMutex);
15338
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015339 // updatedResultMetadata will be freed in handlePendingResultMetadataWithLock.
15340 handlePendingResultMetadataWithLock(result->requestId, updatedResultMetadata);
Chien-Yu Chen92724a82017-01-06 11:50:30 -080015341 pthread_mutex_unlock(&mMutex);
15342 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015343
15344 // Remove the HDR+ pending request.
15345 {
15346 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
15347 auto req = mHdrPlusPendingRequests.find(result->requestId);
15348 mHdrPlusPendingRequests.erase(req);
15349 }
15350 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070015351}
15352
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015353void QCamera3HardwareInterface::onFailedCaptureResult(pbcamera::CaptureResult *failedResult)
15354{
15355 if (failedResult == nullptr) {
15356 ALOGE("%s: Got an empty failed result.", __FUNCTION__);
15357 return;
15358 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015359
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015360 ALOGE("%s: Got a failed HDR+ result for request %d", __FUNCTION__, failedResult->requestId);
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015361
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015362 // Remove the pending HDR+ request.
15363 {
15364 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
15365 auto pendingRequest = mHdrPlusPendingRequests.find(failedResult->requestId);
15366
15367 // Return the buffer to pic channel.
15368 QCamera3PicChannel *picChannel =
15369 (QCamera3PicChannel*)pendingRequest->second.frameworkOutputBuffers[0].stream->priv;
15370 picChannel->returnYuvBuffer(pendingRequest->second.yuvBuffer.get());
15371
15372 mHdrPlusPendingRequests.erase(pendingRequest);
15373 }
15374
15375 pthread_mutex_lock(&mMutex);
15376
15377 // Find the pending buffers.
15378 auto pendingBuffers = mPendingBuffersMap.mPendingBuffersInRequest.begin();
15379 while (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
15380 if (pendingBuffers->frame_number == failedResult->requestId) {
15381 break;
15382 }
15383 pendingBuffers++;
15384 }
15385
15386 // Send out buffer errors for the pending buffers.
15387 if (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
15388 std::vector<camera3_stream_buffer_t> streamBuffers;
15389 for (auto &buffer : pendingBuffers->mPendingBufferList) {
15390 // Prepare a stream buffer.
15391 camera3_stream_buffer_t streamBuffer = {};
15392 streamBuffer.stream = buffer.stream;
15393 streamBuffer.buffer = buffer.buffer;
15394 streamBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
15395 streamBuffer.acquire_fence = -1;
15396 streamBuffer.release_fence = -1;
15397
15398 streamBuffers.push_back(streamBuffer);
15399
15400 // Send out error buffer event.
15401 camera3_notify_msg_t notify_msg = {};
15402 notify_msg.type = CAMERA3_MSG_ERROR;
15403 notify_msg.message.error.frame_number = pendingBuffers->frame_number;
15404 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
15405 notify_msg.message.error.error_stream = buffer.stream;
15406
15407 orchestrateNotify(&notify_msg);
15408 }
15409
15410 camera3_capture_result_t result = {};
15411 result.frame_number = pendingBuffers->frame_number;
15412 result.num_output_buffers = streamBuffers.size();
15413 result.output_buffers = &streamBuffers[0];
15414
15415 // Send out result with buffer errors.
15416 orchestrateResult(&result);
15417
15418 // Remove pending buffers.
15419 mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffers);
15420 }
15421
15422 // Remove pending request.
15423 auto halRequest = mPendingRequestsList.begin();
15424 while (halRequest != mPendingRequestsList.end()) {
15425 if (halRequest->frame_number == failedResult->requestId) {
15426 mPendingRequestsList.erase(halRequest);
15427 break;
15428 }
15429 halRequest++;
15430 }
15431
15432 pthread_mutex_unlock(&mMutex);
Chien-Yu Chen8e599492016-11-01 13:37:46 -070015433}
15434
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015435
15436ShutterDispatcher::ShutterDispatcher(QCamera3HardwareInterface *parent) :
15437 mParent(parent) {}
15438
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015439void ShutterDispatcher::expectShutter(uint32_t frameNumber, bool isReprocess)
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015440{
15441 std::lock_guard<std::mutex> lock(mLock);
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015442
15443 if (isReprocess) {
15444 mReprocessShutters.emplace(frameNumber, Shutter());
15445 } else {
15446 mShutters.emplace(frameNumber, Shutter());
15447 }
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015448}
15449
15450void ShutterDispatcher::markShutterReady(uint32_t frameNumber, uint64_t timestamp)
15451{
15452 std::lock_guard<std::mutex> lock(mLock);
15453
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015454 std::map<uint32_t, Shutter> *shutters = nullptr;
15455
15456 // Find the shutter entry.
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015457 auto shutter = mShutters.find(frameNumber);
15458 if (shutter == mShutters.end()) {
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015459 shutter = mReprocessShutters.find(frameNumber);
15460 if (shutter == mReprocessShutters.end()) {
15461 // Shutter was already sent.
15462 return;
15463 }
15464 shutters = &mReprocessShutters;
15465 } else {
15466 shutters = &mShutters;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015467 }
15468
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015469 // Make this frame's shutter ready.
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015470 shutter->second.ready = true;
15471 shutter->second.timestamp = timestamp;
15472
15473 // Iterate throught the shutters and send out shuters until the one that's not ready yet.
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015474 shutter = shutters->begin();
15475 while (shutter != shutters->end()) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015476 if (!shutter->second.ready) {
15477 // If this shutter is not ready, the following shutters can't be sent.
15478 break;
15479 }
15480
15481 camera3_notify_msg_t msg = {};
15482 msg.type = CAMERA3_MSG_SHUTTER;
15483 msg.message.shutter.frame_number = shutter->first;
15484 msg.message.shutter.timestamp = shutter->second.timestamp;
15485 mParent->orchestrateNotify(&msg);
15486
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015487 shutter = shutters->erase(shutter);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015488 }
15489}
15490
15491void ShutterDispatcher::clear(uint32_t frameNumber)
15492{
15493 std::lock_guard<std::mutex> lock(mLock);
15494 mShutters.erase(frameNumber);
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015495 mReprocessShutters.erase(frameNumber);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015496}
15497
15498void ShutterDispatcher::clear()
15499{
15500 std::lock_guard<std::mutex> lock(mLock);
15501
15502 // Log errors for stale shutters.
15503 for (auto &shutter : mShutters) {
15504 ALOGE("%s: stale shutter: frame number %u, ready %d, timestamp %" PRId64,
15505 __FUNCTION__, shutter.first, shutter.second.ready,
15506 shutter.second.timestamp);
15507 }
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015508
15509 // Log errors for stale reprocess shutters.
15510 for (auto &shutter : mReprocessShutters) {
15511 ALOGE("%s: stale reprocess shutter: frame number %u, ready %d, timestamp %" PRId64,
15512 __FUNCTION__, shutter.first, shutter.second.ready,
15513 shutter.second.timestamp);
15514 }
15515
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015516 mShutters.clear();
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015517 mReprocessShutters.clear();
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015518}
15519
15520OutputBufferDispatcher::OutputBufferDispatcher(QCamera3HardwareInterface *parent) :
15521 mParent(parent) {}
15522
15523status_t OutputBufferDispatcher::configureStreams(camera3_stream_configuration_t *streamList)
15524{
15525 std::lock_guard<std::mutex> lock(mLock);
15526 mStreamBuffers.clear();
15527 if (!streamList) {
15528 ALOGE("%s: streamList is nullptr.", __FUNCTION__);
15529 return -EINVAL;
15530 }
15531
15532 // Create a "frame-number -> buffer" map for each stream.
15533 for (uint32_t i = 0; i < streamList->num_streams; i++) {
15534 mStreamBuffers.emplace(streamList->streams[i], std::map<uint32_t, Buffer>());
15535 }
15536
15537 return OK;
15538}
15539
15540status_t OutputBufferDispatcher::expectBuffer(uint32_t frameNumber, camera3_stream_t *stream)
15541{
15542 std::lock_guard<std::mutex> lock(mLock);
15543
15544 // Find the "frame-number -> buffer" map for the stream.
15545 auto buffers = mStreamBuffers.find(stream);
15546 if (buffers == mStreamBuffers.end()) {
15547 ALOGE("%s: Stream %p was not configured.", __FUNCTION__, stream);
15548 return -EINVAL;
15549 }
15550
15551 // Create an unready buffer for this frame number.
15552 buffers->second.emplace(frameNumber, Buffer());
15553 return OK;
15554}
15555
15556void OutputBufferDispatcher::markBufferReady(uint32_t frameNumber,
15557 const camera3_stream_buffer_t &buffer)
15558{
15559 std::lock_guard<std::mutex> lock(mLock);
15560
15561 // Find the frame number -> buffer map for the stream.
15562 auto buffers = mStreamBuffers.find(buffer.stream);
15563 if (buffers == mStreamBuffers.end()) {
15564 ALOGE("%s: Cannot find pending buffers for stream %p.", __FUNCTION__, buffer.stream);
15565 return;
15566 }
15567
15568 // Find the unready buffer this frame number and mark it ready.
15569 auto pendingBuffer = buffers->second.find(frameNumber);
15570 if (pendingBuffer == buffers->second.end()) {
15571 ALOGE("%s: Cannot find the pending buffer for frame number %u.", __FUNCTION__, frameNumber);
15572 return;
15573 }
15574
15575 pendingBuffer->second.ready = true;
15576 pendingBuffer->second.buffer = buffer;
15577
15578 // Iterate through the buffers and send out buffers until the one that's not ready yet.
15579 pendingBuffer = buffers->second.begin();
15580 while (pendingBuffer != buffers->second.end()) {
15581 if (!pendingBuffer->second.ready) {
15582 // If this buffer is not ready, the following buffers can't be sent.
15583 break;
15584 }
15585
15586 camera3_capture_result_t result = {};
15587 result.frame_number = pendingBuffer->first;
15588 result.num_output_buffers = 1;
15589 result.output_buffers = &pendingBuffer->second.buffer;
15590
15591 // Send out result with buffer errors.
15592 mParent->orchestrateResult(&result);
15593
15594 pendingBuffer = buffers->second.erase(pendingBuffer);
15595 }
15596}
15597
15598void OutputBufferDispatcher::clear(bool clearConfiguredStreams)
15599{
15600 std::lock_guard<std::mutex> lock(mLock);
15601
15602 // Log errors for stale buffers.
15603 for (auto &buffers : mStreamBuffers) {
15604 for (auto &buffer : buffers.second) {
15605 ALOGE("%s: stale buffer: stream %p, frame number %u, ready %d",
15606 __FUNCTION__, buffers.first, buffer.first, buffer.second.ready);
15607 }
15608 buffers.second.clear();
15609 }
15610
15611 if (clearConfiguredStreams) {
15612 mStreamBuffers.clear();
15613 }
15614}
15615
Thierry Strudel3d639192016-09-09 11:52:26 -070015616}; //end namespace qcamera