blob: 7b4dcb7bc9e35f8d80635b8ec552505540cc19a4 [file] [log] [blame]
Thierry Strudel3d639192016-09-09 11:52:26 -07001/* Copyright (c) 2012-2016, The Linux Foundation. All rights reserved.
2*
3* Redistribution and use in source and binary forms, with or without
4* modification, are permitted provided that the following conditions are
5* met:
6* * Redistributions of source code must retain the above copyright
7* notice, this list of conditions and the following disclaimer.
8* * Redistributions in binary form must reproduce the above
9* copyright notice, this list of conditions and the following
10* disclaimer in the documentation and/or other materials provided
11* with the distribution.
12* * Neither the name of The Linux Foundation nor the names of its
13* contributors may be used to endorse or promote products derived
14* from this software without specific prior written permission.
15*
16* THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
17* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
18* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
19* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
20* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27*
28*/
29
30#define LOG_TAG "QCamera3HWI"
31//#define LOG_NDEBUG 0
32
33#define __STDC_LIMIT_MACROS
34
35// To remove
36#include <cutils/properties.h>
37
38// System dependencies
39#include <dlfcn.h>
40#include <fcntl.h>
41#include <stdio.h>
42#include <stdlib.h>
43#include "utils/Timers.h"
44#include "sys/ioctl.h"
Shuzhen Wangf6890e02016-08-12 14:28:54 -070045#include <time.h>
Thierry Strudel3d639192016-09-09 11:52:26 -070046#include <sync/sync.h>
47#include "gralloc_priv.h"
Thierry Strudele80ad7c2016-12-06 10:16:27 -080048#include <map>
Thierry Strudel3d639192016-09-09 11:52:26 -070049
50// Display dependencies
51#include "qdMetaData.h"
52
53// Camera dependencies
54#include "android/QCamera3External.h"
55#include "util/QCameraFlash.h"
56#include "QCamera3HWI.h"
57#include "QCamera3VendorTags.h"
58#include "QCameraTrace.h"
59
Chien-Yu Chene687bd02016-12-07 18:30:26 -080060#include "HdrPlusClientUtils.h"
61
Thierry Strudel3d639192016-09-09 11:52:26 -070062extern "C" {
63#include "mm_camera_dbg.h"
64}
Shuzhen Wangfb961e52016-11-28 11:48:02 -080065#include "cam_cond.h"
Thierry Strudel3d639192016-09-09 11:52:26 -070066
Jiyong Parkd4caeb72017-06-12 17:16:36 +090067using ::android::hardware::camera::common::V1_0::helper::CameraMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070068using namespace android;
69
70namespace qcamera {
71
72#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
73
74#define EMPTY_PIPELINE_DELAY 2
Chien-Yu Chen0a921f92017-08-27 17:25:33 -070075// mm_camera has 2 partial results: 3A, and final result.
76// HDR+ requests have 3 partial results: postview, next request ready, and final result.
77#define PARTIAL_RESULT_COUNT 3
Thierry Strudel3d639192016-09-09 11:52:26 -070078#define FRAME_SKIP_DELAY 0
79
80#define MAX_VALUE_8BIT ((1<<8)-1)
81#define MAX_VALUE_10BIT ((1<<10)-1)
82#define MAX_VALUE_12BIT ((1<<12)-1)
83
84#define VIDEO_4K_WIDTH 3840
85#define VIDEO_4K_HEIGHT 2160
86
Jason Leeb9e76432017-03-10 17:14:19 -080087#define MAX_EIS_WIDTH 3840
88#define MAX_EIS_HEIGHT 2160
Thierry Strudel3d639192016-09-09 11:52:26 -070089
90#define MAX_RAW_STREAMS 1
91#define MAX_STALLING_STREAMS 1
92#define MAX_PROCESSED_STREAMS 3
93/* Batch mode is enabled only if FPS set is equal to or greater than this */
94#define MIN_FPS_FOR_BATCH_MODE (120)
95#define PREVIEW_FPS_FOR_HFR (30)
96#define DEFAULT_VIDEO_FPS (30.0)
Thierry Strudele80ad7c2016-12-06 10:16:27 -080097#define TEMPLATE_MAX_PREVIEW_FPS (30.0)
Thierry Strudel3d639192016-09-09 11:52:26 -070098#define MAX_HFR_BATCH_SIZE (8)
99#define REGIONS_TUPLE_COUNT 5
Thierry Strudel3d639192016-09-09 11:52:26 -0700100// Set a threshold for detection of missing buffers //seconds
Emilian Peev30522a12017-08-03 14:36:33 +0100101#define MISSING_REQUEST_BUF_TIMEOUT 5
Chien-Yu Chene687bd02016-12-07 18:30:26 -0800102#define MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT 30
Thierry Strudel3d639192016-09-09 11:52:26 -0700103#define FLUSH_TIMEOUT 3
104#define METADATA_MAP_SIZE(MAP) (sizeof(MAP)/sizeof(MAP[0]))
105
106#define CAM_QCOM_FEATURE_PP_SUPERSET_HAL3 ( CAM_QCOM_FEATURE_DENOISE2D |\
107 CAM_QCOM_FEATURE_CROP |\
108 CAM_QCOM_FEATURE_ROTATION |\
109 CAM_QCOM_FEATURE_SHARPNESS |\
110 CAM_QCOM_FEATURE_SCALE |\
111 CAM_QCOM_FEATURE_CAC |\
112 CAM_QCOM_FEATURE_CDS )
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700113/* Per configuration size for static metadata length*/
114#define PER_CONFIGURATION_SIZE_3 (3)
Thierry Strudel3d639192016-09-09 11:52:26 -0700115
116#define TIMEOUT_NEVER -1
117
Jason Lee8ce36fa2017-04-19 19:40:37 -0700118/* Face rect indices */
119#define FACE_LEFT 0
120#define FACE_TOP 1
121#define FACE_RIGHT 2
122#define FACE_BOTTOM 3
123#define FACE_WEIGHT 4
124
Thierry Strudel04e026f2016-10-10 11:27:36 -0700125/* Face landmarks indices */
126#define LEFT_EYE_X 0
127#define LEFT_EYE_Y 1
128#define RIGHT_EYE_X 2
129#define RIGHT_EYE_Y 3
130#define MOUTH_X 4
131#define MOUTH_Y 5
132#define TOTAL_LANDMARK_INDICES 6
133
Zhijun He2a5df222017-04-04 18:20:38 -0700134// Max preferred zoom
Zhijun He76870072017-05-08 17:13:17 -0700135#define MAX_PREFERRED_ZOOM_RATIO 7.0
Zhijun He2a5df222017-04-04 18:20:38 -0700136
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700137// Whether to check for the GPU stride padding, or use the default
138//#define CHECK_GPU_PIXEL_ALIGNMENT
139
Thierry Strudel3d639192016-09-09 11:52:26 -0700140cam_capability_t *gCamCapability[MM_CAMERA_MAX_NUM_SENSORS];
141const camera_metadata_t *gStaticMetadata[MM_CAMERA_MAX_NUM_SENSORS];
142extern pthread_mutex_t gCamLock;
143volatile uint32_t gCamHal3LogLevel = 1;
144extern uint8_t gNumCameraSessions;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700145
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800146// Note that this doesn't support concurrent front and back camera b/35960155.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700147// The following Easel related variables must be protected by gHdrPlusClientLock.
Chien-Yu Chend77a5462017-06-02 18:00:38 -0700148std::unique_ptr<EaselManagerClient> gEaselManagerClient;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700149bool EaselManagerClientOpened = false; // If gEaselManagerClient is opened.
150std::unique_ptr<HdrPlusClient> gHdrPlusClient = nullptr;
151bool gHdrPlusClientOpening = false; // If HDR+ client is being opened.
Chien-Yu Chen77ccd022017-06-23 12:00:36 -0700152std::condition_variable gHdrPlusClientOpenCond; // Used to synchronize HDR+ client opening.
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700153bool gEaselProfilingEnabled = false; // If Easel profiling is enabled.
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700154bool gExposeEnableZslKey = false; // If HAL makes android.control.enableZsl available.
Chien-Yu Chen14d3e392017-07-10 18:27:05 -0700155bool gEnableMultipleHdrplusOutputs = false; // Whether to enable multiple output from Easel HDR+.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700156
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800157// If Easel is in bypass only mode. If true, Easel HDR+ won't be enabled.
158bool gEaselBypassOnly;
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700159
Chien-Yu Chen77ccd022017-06-23 12:00:36 -0700160std::mutex gHdrPlusClientLock; // Protect above Easel related variables.
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -0700161
Thierry Strudel3d639192016-09-09 11:52:26 -0700162
163const QCamera3HardwareInterface::QCameraPropMap QCamera3HardwareInterface::CDS_MAP [] = {
164 {"On", CAM_CDS_MODE_ON},
165 {"Off", CAM_CDS_MODE_OFF},
166 {"Auto",CAM_CDS_MODE_AUTO}
167};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700168const QCamera3HardwareInterface::QCameraMap<
169 camera_metadata_enum_android_video_hdr_mode_t,
170 cam_video_hdr_mode_t> QCamera3HardwareInterface::VIDEO_HDR_MODES_MAP[] = {
171 { QCAMERA3_VIDEO_HDR_MODE_OFF, CAM_VIDEO_HDR_MODE_OFF },
172 { QCAMERA3_VIDEO_HDR_MODE_ON, CAM_VIDEO_HDR_MODE_ON }
173};
174
Thierry Strudel54dc9782017-02-15 12:12:10 -0800175const QCamera3HardwareInterface::QCameraMap<
176 camera_metadata_enum_android_binning_correction_mode_t,
177 cam_binning_correction_mode_t> QCamera3HardwareInterface::BINNING_CORRECTION_MODES_MAP[] = {
178 { QCAMERA3_BINNING_CORRECTION_MODE_OFF, CAM_BINNING_CORRECTION_MODE_OFF },
179 { QCAMERA3_BINNING_CORRECTION_MODE_ON, CAM_BINNING_CORRECTION_MODE_ON }
180};
Thierry Strudel04e026f2016-10-10 11:27:36 -0700181
182const QCamera3HardwareInterface::QCameraMap<
183 camera_metadata_enum_android_ir_mode_t,
184 cam_ir_mode_type_t> QCamera3HardwareInterface::IR_MODES_MAP [] = {
185 {QCAMERA3_IR_MODE_OFF, CAM_IR_MODE_OFF},
186 {QCAMERA3_IR_MODE_ON, CAM_IR_MODE_ON},
187 {QCAMERA3_IR_MODE_AUTO, CAM_IR_MODE_AUTO}
188};
Thierry Strudel3d639192016-09-09 11:52:26 -0700189
190const QCamera3HardwareInterface::QCameraMap<
191 camera_metadata_enum_android_control_effect_mode_t,
192 cam_effect_mode_type> QCamera3HardwareInterface::EFFECT_MODES_MAP[] = {
193 { ANDROID_CONTROL_EFFECT_MODE_OFF, CAM_EFFECT_MODE_OFF },
194 { ANDROID_CONTROL_EFFECT_MODE_MONO, CAM_EFFECT_MODE_MONO },
195 { ANDROID_CONTROL_EFFECT_MODE_NEGATIVE, CAM_EFFECT_MODE_NEGATIVE },
196 { ANDROID_CONTROL_EFFECT_MODE_SOLARIZE, CAM_EFFECT_MODE_SOLARIZE },
197 { ANDROID_CONTROL_EFFECT_MODE_SEPIA, CAM_EFFECT_MODE_SEPIA },
198 { ANDROID_CONTROL_EFFECT_MODE_POSTERIZE, CAM_EFFECT_MODE_POSTERIZE },
199 { ANDROID_CONTROL_EFFECT_MODE_WHITEBOARD, CAM_EFFECT_MODE_WHITEBOARD },
200 { ANDROID_CONTROL_EFFECT_MODE_BLACKBOARD, CAM_EFFECT_MODE_BLACKBOARD },
201 { ANDROID_CONTROL_EFFECT_MODE_AQUA, CAM_EFFECT_MODE_AQUA }
202};
203
204const QCamera3HardwareInterface::QCameraMap<
205 camera_metadata_enum_android_control_awb_mode_t,
206 cam_wb_mode_type> QCamera3HardwareInterface::WHITE_BALANCE_MODES_MAP[] = {
207 { ANDROID_CONTROL_AWB_MODE_OFF, CAM_WB_MODE_OFF },
208 { ANDROID_CONTROL_AWB_MODE_AUTO, CAM_WB_MODE_AUTO },
209 { ANDROID_CONTROL_AWB_MODE_INCANDESCENT, CAM_WB_MODE_INCANDESCENT },
210 { ANDROID_CONTROL_AWB_MODE_FLUORESCENT, CAM_WB_MODE_FLUORESCENT },
211 { ANDROID_CONTROL_AWB_MODE_WARM_FLUORESCENT,CAM_WB_MODE_WARM_FLUORESCENT},
212 { ANDROID_CONTROL_AWB_MODE_DAYLIGHT, CAM_WB_MODE_DAYLIGHT },
213 { ANDROID_CONTROL_AWB_MODE_CLOUDY_DAYLIGHT, CAM_WB_MODE_CLOUDY_DAYLIGHT },
214 { ANDROID_CONTROL_AWB_MODE_TWILIGHT, CAM_WB_MODE_TWILIGHT },
215 { ANDROID_CONTROL_AWB_MODE_SHADE, CAM_WB_MODE_SHADE }
216};
217
218const QCamera3HardwareInterface::QCameraMap<
219 camera_metadata_enum_android_control_scene_mode_t,
220 cam_scene_mode_type> QCamera3HardwareInterface::SCENE_MODES_MAP[] = {
221 { ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY, CAM_SCENE_MODE_FACE_PRIORITY },
222 { ANDROID_CONTROL_SCENE_MODE_ACTION, CAM_SCENE_MODE_ACTION },
223 { ANDROID_CONTROL_SCENE_MODE_PORTRAIT, CAM_SCENE_MODE_PORTRAIT },
224 { ANDROID_CONTROL_SCENE_MODE_LANDSCAPE, CAM_SCENE_MODE_LANDSCAPE },
225 { ANDROID_CONTROL_SCENE_MODE_NIGHT, CAM_SCENE_MODE_NIGHT },
226 { ANDROID_CONTROL_SCENE_MODE_NIGHT_PORTRAIT, CAM_SCENE_MODE_NIGHT_PORTRAIT },
227 { ANDROID_CONTROL_SCENE_MODE_THEATRE, CAM_SCENE_MODE_THEATRE },
228 { ANDROID_CONTROL_SCENE_MODE_BEACH, CAM_SCENE_MODE_BEACH },
229 { ANDROID_CONTROL_SCENE_MODE_SNOW, CAM_SCENE_MODE_SNOW },
230 { ANDROID_CONTROL_SCENE_MODE_SUNSET, CAM_SCENE_MODE_SUNSET },
231 { ANDROID_CONTROL_SCENE_MODE_STEADYPHOTO, CAM_SCENE_MODE_ANTISHAKE },
232 { ANDROID_CONTROL_SCENE_MODE_FIREWORKS , CAM_SCENE_MODE_FIREWORKS },
233 { ANDROID_CONTROL_SCENE_MODE_SPORTS , CAM_SCENE_MODE_SPORTS },
234 { ANDROID_CONTROL_SCENE_MODE_PARTY, CAM_SCENE_MODE_PARTY },
235 { ANDROID_CONTROL_SCENE_MODE_CANDLELIGHT, CAM_SCENE_MODE_CANDLELIGHT },
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800236 { ANDROID_CONTROL_SCENE_MODE_BARCODE, CAM_SCENE_MODE_BARCODE},
237 { ANDROID_CONTROL_SCENE_MODE_HDR, CAM_SCENE_MODE_HDR}
Thierry Strudel3d639192016-09-09 11:52:26 -0700238};
239
240const QCamera3HardwareInterface::QCameraMap<
241 camera_metadata_enum_android_control_af_mode_t,
242 cam_focus_mode_type> QCamera3HardwareInterface::FOCUS_MODES_MAP[] = {
243 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_OFF },
244 { ANDROID_CONTROL_AF_MODE_OFF, CAM_FOCUS_MODE_FIXED },
245 { ANDROID_CONTROL_AF_MODE_AUTO, CAM_FOCUS_MODE_AUTO },
246 { ANDROID_CONTROL_AF_MODE_MACRO, CAM_FOCUS_MODE_MACRO },
247 { ANDROID_CONTROL_AF_MODE_EDOF, CAM_FOCUS_MODE_EDOF },
248 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE, CAM_FOCUS_MODE_CONTINOUS_PICTURE },
249 { ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO, CAM_FOCUS_MODE_CONTINOUS_VIDEO }
250};
251
252const QCamera3HardwareInterface::QCameraMap<
253 camera_metadata_enum_android_color_correction_aberration_mode_t,
254 cam_aberration_mode_t> QCamera3HardwareInterface::COLOR_ABERRATION_MAP[] = {
255 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
256 CAM_COLOR_CORRECTION_ABERRATION_OFF },
257 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
258 CAM_COLOR_CORRECTION_ABERRATION_FAST },
259 { ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY,
260 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY },
261};
262
263const QCamera3HardwareInterface::QCameraMap<
264 camera_metadata_enum_android_control_ae_antibanding_mode_t,
265 cam_antibanding_mode_type> QCamera3HardwareInterface::ANTIBANDING_MODES_MAP[] = {
266 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_OFF, CAM_ANTIBANDING_MODE_OFF },
267 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_50HZ, CAM_ANTIBANDING_MODE_50HZ },
268 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_60HZ, CAM_ANTIBANDING_MODE_60HZ },
269 { ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO, CAM_ANTIBANDING_MODE_AUTO }
270};
271
272const QCamera3HardwareInterface::QCameraMap<
273 camera_metadata_enum_android_control_ae_mode_t,
274 cam_flash_mode_t> QCamera3HardwareInterface::AE_FLASH_MODE_MAP[] = {
275 { ANDROID_CONTROL_AE_MODE_OFF, CAM_FLASH_MODE_OFF },
276 { ANDROID_CONTROL_AE_MODE_ON, CAM_FLASH_MODE_OFF },
277 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH, CAM_FLASH_MODE_AUTO},
278 { ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH, CAM_FLASH_MODE_ON },
Shuzhen Wang3d11a642017-08-18 09:57:29 -0700279 { ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE, CAM_FLASH_MODE_AUTO},
280 { (camera_metadata_enum_android_control_ae_mode_t)
281 NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH, CAM_FLASH_MODE_OFF }
Thierry Strudel3d639192016-09-09 11:52:26 -0700282};
283
284const QCamera3HardwareInterface::QCameraMap<
285 camera_metadata_enum_android_flash_mode_t,
286 cam_flash_mode_t> QCamera3HardwareInterface::FLASH_MODES_MAP[] = {
287 { ANDROID_FLASH_MODE_OFF, CAM_FLASH_MODE_OFF },
288 { ANDROID_FLASH_MODE_SINGLE, CAM_FLASH_MODE_SINGLE },
289 { ANDROID_FLASH_MODE_TORCH, CAM_FLASH_MODE_TORCH }
290};
291
292const QCamera3HardwareInterface::QCameraMap<
293 camera_metadata_enum_android_statistics_face_detect_mode_t,
294 cam_face_detect_mode_t> QCamera3HardwareInterface::FACEDETECT_MODES_MAP[] = {
295 { ANDROID_STATISTICS_FACE_DETECT_MODE_OFF, CAM_FACE_DETECT_MODE_OFF },
296 { ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE, CAM_FACE_DETECT_MODE_SIMPLE },
297 { ANDROID_STATISTICS_FACE_DETECT_MODE_FULL, CAM_FACE_DETECT_MODE_FULL }
298};
299
300const QCamera3HardwareInterface::QCameraMap<
301 camera_metadata_enum_android_lens_info_focus_distance_calibration_t,
302 cam_focus_calibration_t> QCamera3HardwareInterface::FOCUS_CALIBRATION_MAP[] = {
303 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_UNCALIBRATED,
304 CAM_FOCUS_UNCALIBRATED },
305 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_APPROXIMATE,
306 CAM_FOCUS_APPROXIMATE },
307 { ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION_CALIBRATED,
308 CAM_FOCUS_CALIBRATED }
309};
310
311const QCamera3HardwareInterface::QCameraMap<
312 camera_metadata_enum_android_lens_state_t,
313 cam_af_lens_state_t> QCamera3HardwareInterface::LENS_STATE_MAP[] = {
314 { ANDROID_LENS_STATE_STATIONARY, CAM_AF_LENS_STATE_STATIONARY},
315 { ANDROID_LENS_STATE_MOVING, CAM_AF_LENS_STATE_MOVING}
316};
317
318const int32_t available_thumbnail_sizes[] = {0, 0,
319 176, 144,
320 240, 144,
321 256, 144,
322 240, 160,
323 256, 154,
324 240, 240,
325 320, 240};
326
327const QCamera3HardwareInterface::QCameraMap<
328 camera_metadata_enum_android_sensor_test_pattern_mode_t,
329 cam_test_pattern_mode_t> QCamera3HardwareInterface::TEST_PATTERN_MAP[] = {
330 { ANDROID_SENSOR_TEST_PATTERN_MODE_OFF, CAM_TEST_PATTERN_OFF },
331 { ANDROID_SENSOR_TEST_PATTERN_MODE_SOLID_COLOR, CAM_TEST_PATTERN_SOLID_COLOR },
332 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS, CAM_TEST_PATTERN_COLOR_BARS },
333 { ANDROID_SENSOR_TEST_PATTERN_MODE_COLOR_BARS_FADE_TO_GRAY, CAM_TEST_PATTERN_COLOR_BARS_FADE_TO_GRAY },
334 { ANDROID_SENSOR_TEST_PATTERN_MODE_PN9, CAM_TEST_PATTERN_PN9 },
335 { ANDROID_SENSOR_TEST_PATTERN_MODE_CUSTOM1, CAM_TEST_PATTERN_CUSTOM1},
336};
337
338/* Since there is no mapping for all the options some Android enum are not listed.
339 * Also, the order in this list is important because while mapping from HAL to Android it will
340 * traverse from lower to higher index which means that for HAL values that are map to different
341 * Android values, the traverse logic will select the first one found.
342 */
343const QCamera3HardwareInterface::QCameraMap<
344 camera_metadata_enum_android_sensor_reference_illuminant1_t,
345 cam_illuminat_t> QCamera3HardwareInterface::REFERENCE_ILLUMINANT_MAP[] = {
346 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FLUORESCENT, CAM_AWB_WARM_FLO},
347 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
348 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_COOL_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO },
349 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_STANDARD_A, CAM_AWB_A },
350 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D55, CAM_AWB_NOON },
351 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D65, CAM_AWB_D65 },
352 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D75, CAM_AWB_D75 },
353 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_D50, CAM_AWB_D50 },
354 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_ISO_STUDIO_TUNGSTEN, CAM_AWB_CUSTOM_A},
355 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAYLIGHT, CAM_AWB_D50 },
356 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_TUNGSTEN, CAM_AWB_A },
357 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_FINE_WEATHER, CAM_AWB_D50 },
358 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_CLOUDY_WEATHER, CAM_AWB_D65 },
359 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_SHADE, CAM_AWB_D75 },
360 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_DAY_WHITE_FLUORESCENT, CAM_AWB_CUSTOM_DAYLIGHT },
361 { ANDROID_SENSOR_REFERENCE_ILLUMINANT1_WHITE_FLUORESCENT, CAM_AWB_COLD_FLO},
362};
363
364const QCamera3HardwareInterface::QCameraMap<
365 int32_t, cam_hfr_mode_t> QCamera3HardwareInterface::HFR_MODE_MAP[] = {
366 { 60, CAM_HFR_MODE_60FPS},
367 { 90, CAM_HFR_MODE_90FPS},
368 { 120, CAM_HFR_MODE_120FPS},
369 { 150, CAM_HFR_MODE_150FPS},
370 { 180, CAM_HFR_MODE_180FPS},
371 { 210, CAM_HFR_MODE_210FPS},
372 { 240, CAM_HFR_MODE_240FPS},
373 { 480, CAM_HFR_MODE_480FPS},
374};
375
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700376const QCamera3HardwareInterface::QCameraMap<
377 qcamera3_ext_instant_aec_mode_t,
378 cam_aec_convergence_type> QCamera3HardwareInterface::INSTANT_AEC_MODES_MAP[] = {
379 { QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE, CAM_AEC_NORMAL_CONVERGENCE},
380 { QCAMERA3_INSTANT_AEC_AGGRESSIVE_CONVERGENCE, CAM_AEC_AGGRESSIVE_CONVERGENCE},
381 { QCAMERA3_INSTANT_AEC_FAST_CONVERGENCE, CAM_AEC_FAST_CONVERGENCE},
382};
Thierry Strudel54dc9782017-02-15 12:12:10 -0800383
384const QCamera3HardwareInterface::QCameraMap<
385 qcamera3_ext_exposure_meter_mode_t,
386 cam_auto_exposure_mode_type> QCamera3HardwareInterface::AEC_MODES_MAP[] = {
387 { QCAMERA3_EXP_METER_MODE_FRAME_AVERAGE, CAM_AEC_MODE_FRAME_AVERAGE },
388 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED, CAM_AEC_MODE_CENTER_WEIGHTED },
389 { QCAMERA3_EXP_METER_MODE_SPOT_METERING, CAM_AEC_MODE_SPOT_METERING },
390 { QCAMERA3_EXP_METER_MODE_SMART_METERING, CAM_AEC_MODE_SMART_METERING },
391 { QCAMERA3_EXP_METER_MODE_USER_METERING, CAM_AEC_MODE_USER_METERING },
392 { QCAMERA3_EXP_METER_MODE_SPOT_METERING_ADV, CAM_AEC_MODE_SPOT_METERING_ADV },
393 { QCAMERA3_EXP_METER_MODE_CENTER_WEIGHTED_ADV, CAM_AEC_MODE_CENTER_WEIGHTED_ADV },
394};
395
396const QCamera3HardwareInterface::QCameraMap<
397 qcamera3_ext_iso_mode_t,
398 cam_iso_mode_type> QCamera3HardwareInterface::ISO_MODES_MAP[] = {
399 { QCAMERA3_ISO_MODE_AUTO, CAM_ISO_MODE_AUTO },
400 { QCAMERA3_ISO_MODE_DEBLUR, CAM_ISO_MODE_DEBLUR },
401 { QCAMERA3_ISO_MODE_100, CAM_ISO_MODE_100 },
402 { QCAMERA3_ISO_MODE_200, CAM_ISO_MODE_200 },
403 { QCAMERA3_ISO_MODE_400, CAM_ISO_MODE_400 },
404 { QCAMERA3_ISO_MODE_800, CAM_ISO_MODE_800 },
405 { QCAMERA3_ISO_MODE_1600, CAM_ISO_MODE_1600 },
406 { QCAMERA3_ISO_MODE_3200, CAM_ISO_MODE_3200 },
407};
408
Thierry Strudel3d639192016-09-09 11:52:26 -0700409camera3_device_ops_t QCamera3HardwareInterface::mCameraOps = {
410 .initialize = QCamera3HardwareInterface::initialize,
411 .configure_streams = QCamera3HardwareInterface::configure_streams,
412 .register_stream_buffers = NULL,
413 .construct_default_request_settings = QCamera3HardwareInterface::construct_default_request_settings,
414 .process_capture_request = QCamera3HardwareInterface::process_capture_request,
415 .get_metadata_vendor_tag_ops = NULL,
416 .dump = QCamera3HardwareInterface::dump,
417 .flush = QCamera3HardwareInterface::flush,
418 .reserved = {0},
419};
420
421// initialise to some default value
422uint32_t QCamera3HardwareInterface::sessionId[] = {0xDEADBEEF, 0xDEADBEEF, 0xDEADBEEF};
423
Chien-Yu Chen509314b2017-04-07 15:27:55 -0700424static inline void logEaselEvent(const char *tag, const char *event) {
425 if (CC_UNLIKELY(gEaselProfilingEnabled)) {
426 struct timespec ts = {};
427 static int64_t kMsPerSec = 1000;
428 static int64_t kNsPerMs = 1000000;
429 status_t res = clock_gettime(CLOCK_BOOTTIME, &ts);
430 if (res != OK) {
431 ALOGE("[%s] Failed to get boot time for <%s>.", tag, event);
432 } else {
433 int64_t now = static_cast<int64_t>(ts.tv_sec) * kMsPerSec + ts.tv_nsec / kNsPerMs;
434 ALOGI("[%s] %s at %" PRId64 " ms", tag, event, now);
435 }
436 }
437}
438
Thierry Strudel3d639192016-09-09 11:52:26 -0700439/*===========================================================================
440 * FUNCTION : QCamera3HardwareInterface
441 *
442 * DESCRIPTION: constructor of QCamera3HardwareInterface
443 *
444 * PARAMETERS :
445 * @cameraId : camera ID
446 *
447 * RETURN : none
448 *==========================================================================*/
449QCamera3HardwareInterface::QCamera3HardwareInterface(uint32_t cameraId,
450 const camera_module_callbacks_t *callbacks)
451 : mCameraId(cameraId),
452 mCameraHandle(NULL),
453 mCameraInitialized(false),
454 mCallbackOps(NULL),
455 mMetadataChannel(NULL),
456 mPictureChannel(NULL),
457 mRawChannel(NULL),
458 mSupportChannel(NULL),
459 mAnalysisChannel(NULL),
460 mRawDumpChannel(NULL),
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700461 mHdrPlusRawSrcChannel(NULL),
Thierry Strudel3d639192016-09-09 11:52:26 -0700462 mDummyBatchChannel(NULL),
Emilian Peev7650c122017-01-19 08:24:33 -0800463 mDepthChannel(NULL),
Emilian Peev656e4fa2017-06-02 16:47:04 +0100464 mDepthCloudMode(CAM_PD_DATA_SKIP),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800465 mPerfLockMgr(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700466 mChannelHandle(0),
467 mFirstConfiguration(true),
468 mFlush(false),
469 mFlushPerf(false),
470 mParamHeap(NULL),
471 mParameters(NULL),
472 mPrevParameters(NULL),
473 m_bIsVideo(false),
474 m_bIs4KVideo(false),
475 m_bEisSupportedSize(false),
476 m_bEisEnable(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800477 m_bEis3PropertyEnabled(false),
Binhao Lin09245482017-08-31 18:25:29 -0700478 m_bAVTimerEnabled(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700479 m_MobicatMask(0),
Chien-Yu Chen3f303522017-05-19 15:21:45 -0700480 mShutterDispatcher(this),
481 mOutputBufferDispatcher(this),
Thierry Strudel3d639192016-09-09 11:52:26 -0700482 mMinProcessedFrameDuration(0),
483 mMinJpegFrameDuration(0),
484 mMinRawFrameDuration(0),
Emilian Peev30522a12017-08-03 14:36:33 +0100485 mExpectedFrameDuration(0),
486 mExpectedInflightDuration(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700487 mMetaFrameCount(0U),
488 mUpdateDebugLevel(false),
489 mCallbacks(callbacks),
490 mCaptureIntent(0),
491 mCacMode(0),
Samuel Ha68ba5172016-12-15 18:41:12 -0800492 /* DevCamDebug metadata internal m control*/
493 mDevCamDebugMetaEnable(0),
494 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -0700495 mBatchSize(0),
496 mToBeQueuedVidBufs(0),
497 mHFRVideoFps(DEFAULT_VIDEO_FPS),
498 mOpMode(CAMERA3_STREAM_CONFIGURATION_NORMAL_MODE),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800499 mStreamConfig(false),
Thierry Strudel2896d122017-02-23 19:18:03 -0800500 mCommon(),
Thierry Strudel3d639192016-09-09 11:52:26 -0700501 mFirstFrameNumberInBatch(0),
502 mNeedSensorRestart(false),
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800503 mPreviewStarted(false),
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700504 mMinInFlightRequests(MIN_INFLIGHT_REQUESTS),
505 mMaxInFlightRequests(MAX_INFLIGHT_REQUESTS),
Emilian Peev0f3c3162017-03-15 12:57:46 +0000506 mPDSupported(false),
507 mPDIndex(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700508 mInstantAEC(false),
509 mResetInstantAEC(false),
510 mInstantAECSettledFrameNumber(0),
511 mAecSkipDisplayFrameBound(0),
512 mInstantAecFrameIdxCount(0),
Chien-Yu Chenbc730232017-07-12 14:49:55 -0700513 mLastRequestedLensShadingMapMode(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF),
Thierry Strudel54dc9782017-02-15 12:12:10 -0800514 mCurrFeatureState(0),
Thierry Strudel3d639192016-09-09 11:52:26 -0700515 mLdafCalibExist(false),
Thierry Strudel3d639192016-09-09 11:52:26 -0700516 mLastCustIntentFrmNum(-1),
Shuzhen Wang3c077d72017-04-20 22:48:59 -0700517 mFirstMetadataCallback(true),
Thierry Strudel3d639192016-09-09 11:52:26 -0700518 mState(CLOSED),
519 mIsDeviceLinked(false),
520 mIsMainCamera(true),
521 mLinkedCameraId(0),
Thierry Strudel295a0ca2016-11-03 18:38:47 -0700522 m_pDualCamCmdHeap(NULL),
Mansoor Aftab58465fa2017-01-26 15:02:44 -0800523 m_pDualCamCmdPtr(NULL),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800524 mHdrPlusModeEnabled(false),
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -0700525 mZslEnabled(false),
Chien-Yu Chen153c5172017-09-08 11:33:19 -0700526 mEaselMipiStarted(false),
Chien-Yu Chenee335912017-02-09 17:53:20 -0800527 mIsApInputUsedForHdrPlus(false),
528 mFirstPreviewIntentSeen(false),
Shuzhen Wang181c57b2017-07-21 11:39:44 -0700529 m_bSensorHDREnabled(false),
Shuzhen Wang3569d4a2017-09-04 19:10:28 -0700530 mAfTrigger(),
531 mSceneDistance(-1)
Thierry Strudel3d639192016-09-09 11:52:26 -0700532{
533 getLogLevel();
Thierry Strudel3d639192016-09-09 11:52:26 -0700534 mCommon.init(gCamCapability[cameraId]);
535 mCameraDevice.common.tag = HARDWARE_DEVICE_TAG;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700536#ifndef USE_HAL_3_3
537 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_4;
538#else
Thierry Strudel3d639192016-09-09 11:52:26 -0700539 mCameraDevice.common.version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700540#endif
Thierry Strudel3d639192016-09-09 11:52:26 -0700541 mCameraDevice.common.close = close_camera_device;
542 mCameraDevice.ops = &mCameraOps;
543 mCameraDevice.priv = this;
544 gCamCapability[cameraId]->version = CAM_HAL_V3;
545 // TODO: hardcode for now until mctl add support for min_num_pp_bufs
546 //TBD - To see if this hardcoding is needed. Check by printing if this is filled by mctl to 3
547 gCamCapability[cameraId]->min_num_pp_bufs = 3;
548
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800549 PTHREAD_COND_INIT(&mBuffersCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700550
Shuzhen Wangfb961e52016-11-28 11:48:02 -0800551 PTHREAD_COND_INIT(&mRequestCond);
Thierry Strudel3d639192016-09-09 11:52:26 -0700552 mPendingLiveRequest = 0;
553 mCurrentRequestId = -1;
554 pthread_mutex_init(&mMutex, NULL);
555
556 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
557 mDefaultMetadata[i] = NULL;
558
559 // Getting system props of different kinds
560 char prop[PROPERTY_VALUE_MAX];
561 memset(prop, 0, sizeof(prop));
562 property_get("persist.camera.raw.dump", prop, "0");
563 mEnableRawDump = atoi(prop);
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800564 property_get("persist.camera.hal3.force.hdr", prop, "0");
565 mForceHdrSnapshot = atoi(prop);
566
Thierry Strudel3d639192016-09-09 11:52:26 -0700567 if (mEnableRawDump)
568 LOGD("Raw dump from Camera HAL enabled");
569
570 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
571 memset(mLdafCalib, 0, sizeof(mLdafCalib));
572
Arnd Geis082a4d72017-08-24 10:33:07 -0700573 memset(mEaselFwVersion, 0, sizeof(mEaselFwVersion));
Arnd Geis8cbfc182017-09-07 14:46:41 -0700574 mEaselFwUpdated = false;
Arnd Geis082a4d72017-08-24 10:33:07 -0700575
Thierry Strudel3d639192016-09-09 11:52:26 -0700576 memset(prop, 0, sizeof(prop));
577 property_get("persist.camera.tnr.preview", prop, "0");
578 m_bTnrPreview = (uint8_t)atoi(prop);
579
580 memset(prop, 0, sizeof(prop));
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800581 property_get("persist.camera.swtnr.preview", prop, "1");
582 m_bSwTnrPreview = (uint8_t)atoi(prop);
583
584 memset(prop, 0, sizeof(prop));
Binhao Lincdb362a2017-04-20 13:31:54 -0700585 property_get("persist.camera.tnr.video", prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -0700586 m_bTnrVideo = (uint8_t)atoi(prop);
587
588 memset(prop, 0, sizeof(prop));
589 property_get("persist.camera.avtimer.debug", prop, "0");
590 m_debug_avtimer = (uint8_t)atoi(prop);
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800591 LOGI("AV timer enabled: %d", m_debug_avtimer);
Thierry Strudel3d639192016-09-09 11:52:26 -0700592
Thierry Strudel54dc9782017-02-15 12:12:10 -0800593 memset(prop, 0, sizeof(prop));
594 property_get("persist.camera.cacmode.disable", prop, "0");
595 m_cacModeDisabled = (uint8_t)atoi(prop);
596
Shuzhen Wangb57ec912017-07-31 13:24:27 -0700597 m_bForceInfinityAf = property_get_bool("persist.camera.af.infinity", 0);
Shuzhen Wang8c276ef2017-08-09 11:12:20 -0700598 m_MobicatMask = (uint8_t)property_get_int32("persist.camera.mobicat", 0);
Shuzhen Wangb57ec912017-07-31 13:24:27 -0700599
Thierry Strudel3d639192016-09-09 11:52:26 -0700600 //Load and read GPU library.
601 lib_surface_utils = NULL;
602 LINK_get_surface_pixel_alignment = NULL;
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700603 mSurfaceStridePadding = CAM_PAD_TO_64;
604#ifdef CHECK_GPU_PIXEL_ALIGNMENT
Thierry Strudel3d639192016-09-09 11:52:26 -0700605 lib_surface_utils = dlopen("libadreno_utils.so", RTLD_NOW);
606 if (lib_surface_utils) {
607 *(void **)&LINK_get_surface_pixel_alignment =
608 dlsym(lib_surface_utils, "get_gpu_pixel_alignment");
609 if (LINK_get_surface_pixel_alignment) {
610 mSurfaceStridePadding = LINK_get_surface_pixel_alignment();
611 }
612 dlclose(lib_surface_utils);
613 }
Eino-Ville Talvala0362b5a2017-05-25 15:47:16 -0700614#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +0000615 mPDIndex = getPDStatIndex(gCamCapability[cameraId]);
616 mPDSupported = (0 <= mPDIndex) ? true : false;
617
Shuzhen Wangf6890e02016-08-12 14:28:54 -0700618 m60HzZone = is60HzZone();
Thierry Strudel3d639192016-09-09 11:52:26 -0700619}
620
621/*===========================================================================
622 * FUNCTION : ~QCamera3HardwareInterface
623 *
624 * DESCRIPTION: destructor of QCamera3HardwareInterface
625 *
626 * PARAMETERS : none
627 *
628 * RETURN : none
629 *==========================================================================*/
630QCamera3HardwareInterface::~QCamera3HardwareInterface()
631{
632 LOGD("E");
633
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800634 int32_t rc = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -0700635
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800636 // Disable power hint and enable the perf lock for close camera
637 mPerfLockMgr.releasePerfLock(PERF_LOCK_POWERHINT_ENCODE);
638 mPerfLockMgr.acquirePerfLock(PERF_LOCK_CLOSE_CAMERA);
639
Chien-Yu Chen153c5172017-09-08 11:33:19 -0700640 // Close HDR+ client first before destroying HAL.
641 {
642 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
643 finishHdrPlusClientOpeningLocked(l);
644 if (gHdrPlusClient != nullptr) {
645 // Disable HDR+ mode.
646 disableHdrPlusModeLocked();
647 // Disconnect Easel if it's connected.
648 gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
649 gHdrPlusClient = nullptr;
650 }
651 }
652
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800653 // unlink of dualcam during close camera
654 if (mIsDeviceLinked) {
655 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
656 &m_pDualCamCmdPtr->bundle_info;
657 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
658 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
659 pthread_mutex_lock(&gCamLock);
660
661 if (mIsMainCamera == 1) {
662 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
663 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
664 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
665 // related session id should be session id of linked session
666 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
667 } else {
668 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
669 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
670 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
671 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
672 }
Thierry Strudel2896d122017-02-23 19:18:03 -0800673 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800674 pthread_mutex_unlock(&gCamLock);
675
676 rc = mCameraHandle->ops->set_dual_cam_cmd(
677 mCameraHandle->camera_handle);
678 if (rc < 0) {
679 LOGE("Dualcam: Unlink failed, but still proceed to close");
680 }
681 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700682
683 /* We need to stop all streams before deleting any stream */
684 if (mRawDumpChannel) {
685 mRawDumpChannel->stop();
686 }
687
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700688 if (mHdrPlusRawSrcChannel) {
689 mHdrPlusRawSrcChannel->stop();
690 }
691
Thierry Strudel3d639192016-09-09 11:52:26 -0700692 // NOTE: 'camera3_stream_t *' objects are already freed at
693 // this stage by the framework
694 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
695 it != mStreamInfo.end(); it++) {
696 QCamera3ProcessingChannel *channel = (*it)->channel;
697 if (channel) {
698 channel->stop();
699 }
700 }
701 if (mSupportChannel)
702 mSupportChannel->stop();
703
704 if (mAnalysisChannel) {
705 mAnalysisChannel->stop();
706 }
707 if (mMetadataChannel) {
708 mMetadataChannel->stop();
709 }
710 if (mChannelHandle) {
Chien-Yu Chen153c5172017-09-08 11:33:19 -0700711 stopChannelLocked(/*stop_immediately*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -0700712 }
713
714 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
715 it != mStreamInfo.end(); it++) {
716 QCamera3ProcessingChannel *channel = (*it)->channel;
717 if (channel)
718 delete channel;
719 free (*it);
720 }
721 if (mSupportChannel) {
722 delete mSupportChannel;
723 mSupportChannel = NULL;
724 }
725
726 if (mAnalysisChannel) {
727 delete mAnalysisChannel;
728 mAnalysisChannel = NULL;
729 }
730 if (mRawDumpChannel) {
731 delete mRawDumpChannel;
732 mRawDumpChannel = NULL;
733 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -0700734 if (mHdrPlusRawSrcChannel) {
735 delete mHdrPlusRawSrcChannel;
736 mHdrPlusRawSrcChannel = NULL;
737 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700738 if (mDummyBatchChannel) {
739 delete mDummyBatchChannel;
740 mDummyBatchChannel = NULL;
741 }
742
743 mPictureChannel = NULL;
Emilian Peev7650c122017-01-19 08:24:33 -0800744 mDepthChannel = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -0700745
746 if (mMetadataChannel) {
747 delete mMetadataChannel;
748 mMetadataChannel = NULL;
749 }
750
751 /* Clean up all channels */
752 if (mCameraInitialized) {
753 if(!mFirstConfiguration){
754 //send the last unconfigure
755 cam_stream_size_info_t stream_config_info;
756 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
757 stream_config_info.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
758 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -0800759 m_bIs4KVideo ? 0 :
Jason Leea46ad5e2017-07-07 15:20:56 -0700760 m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -0700761 clear_metadata_buffer(mParameters);
Thierry Strudel3d639192016-09-09 11:52:26 -0700762 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_INFO,
763 stream_config_info);
764 int rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
765 if (rc < 0) {
766 LOGE("set_parms failed for unconfigure");
767 }
768 }
769 deinitParameters();
770 }
771
772 if (mChannelHandle) {
773 mCameraHandle->ops->delete_channel(mCameraHandle->camera_handle,
774 mChannelHandle);
775 LOGH("deleting channel %d", mChannelHandle);
776 mChannelHandle = 0;
777 }
778
779 if (mState != CLOSED)
780 closeCamera();
781
782 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
783 req.mPendingBufferList.clear();
784 }
785 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Thierry Strudel3d639192016-09-09 11:52:26 -0700786 for (pendingRequestIterator i = mPendingRequestsList.begin();
787 i != mPendingRequestsList.end();) {
788 i = erasePendingRequest(i);
789 }
790 for (size_t i = 0; i < CAMERA3_TEMPLATE_COUNT; i++)
791 if (mDefaultMetadata[i])
792 free_camera_metadata(mDefaultMetadata[i]);
793
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800794 mPerfLockMgr.releasePerfLock(PERF_LOCK_CLOSE_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700795
796 pthread_cond_destroy(&mRequestCond);
797
798 pthread_cond_destroy(&mBuffersCond);
799
800 pthread_mutex_destroy(&mMutex);
801 LOGD("X");
802}
803
804/*===========================================================================
805 * FUNCTION : erasePendingRequest
806 *
807 * DESCRIPTION: function to erase a desired pending request after freeing any
808 * allocated memory
809 *
810 * PARAMETERS :
811 * @i : iterator pointing to pending request to be erased
812 *
813 * RETURN : iterator pointing to the next request
814 *==========================================================================*/
815QCamera3HardwareInterface::pendingRequestIterator
816 QCamera3HardwareInterface::erasePendingRequest (pendingRequestIterator i)
817{
818 if (i->input_buffer != NULL) {
819 free(i->input_buffer);
820 i->input_buffer = NULL;
821 }
822 if (i->settings != NULL)
823 free_camera_metadata((camera_metadata_t*)i->settings);
Emilian Peev30522a12017-08-03 14:36:33 +0100824
825 mExpectedInflightDuration -= i->expectedFrameDuration;
826 if (mExpectedInflightDuration < 0) {
827 LOGE("Negative expected in-flight duration!");
828 mExpectedInflightDuration = 0;
829 }
830
Thierry Strudel3d639192016-09-09 11:52:26 -0700831 return mPendingRequestsList.erase(i);
832}
833
834/*===========================================================================
835 * FUNCTION : camEvtHandle
836 *
837 * DESCRIPTION: Function registered to mm-camera-interface to handle events
838 *
839 * PARAMETERS :
840 * @camera_handle : interface layer camera handle
841 * @evt : ptr to event
842 * @user_data : user data ptr
843 *
844 * RETURN : none
845 *==========================================================================*/
846void QCamera3HardwareInterface::camEvtHandle(uint32_t /*camera_handle*/,
847 mm_camera_event_t *evt,
848 void *user_data)
849{
850 QCamera3HardwareInterface *obj = (QCamera3HardwareInterface *)user_data;
851 if (obj && evt) {
852 switch(evt->server_event_type) {
853 case CAM_EVENT_TYPE_DAEMON_DIED:
854 pthread_mutex_lock(&obj->mMutex);
855 obj->mState = ERROR;
856 pthread_mutex_unlock(&obj->mMutex);
857 LOGE("Fatal, camera daemon died");
858 break;
859
860 case CAM_EVENT_TYPE_DAEMON_PULL_REQ:
861 LOGD("HAL got request pull from Daemon");
862 pthread_mutex_lock(&obj->mMutex);
863 obj->mWokenUpByDaemon = true;
864 obj->unblockRequestIfNecessary();
865 pthread_mutex_unlock(&obj->mMutex);
866 break;
867
868 default:
869 LOGW("Warning: Unhandled event %d",
870 evt->server_event_type);
871 break;
872 }
873 } else {
874 LOGE("NULL user_data/evt");
875 }
876}
877
878/*===========================================================================
879 * FUNCTION : openCamera
880 *
881 * DESCRIPTION: open camera
882 *
883 * PARAMETERS :
884 * @hw_device : double ptr for camera device struct
885 *
886 * RETURN : int32_t type of status
887 * NO_ERROR -- success
888 * none-zero failure code
889 *==========================================================================*/
890int QCamera3HardwareInterface::openCamera(struct hw_device_t **hw_device)
891{
892 int rc = 0;
893 if (mState != CLOSED) {
894 *hw_device = NULL;
895 return PERMISSION_DENIED;
896 }
897
Chien-Yu Chene96475e2017-04-11 11:53:26 -0700898 logEaselEvent("EASEL_STARTUP_LATENCY", "Camera Open");
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800899 mPerfLockMgr.acquirePerfLock(PERF_LOCK_OPEN_CAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -0700900 LOGI("[KPI Perf]: E PROFILE_OPEN_CAMERA camera id %d",
901 mCameraId);
902
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700903 if (mCameraHandle) {
904 LOGE("Failure: Camera already opened");
905 return ALREADY_EXISTS;
906 }
907
908 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -0700909 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chend77a5462017-06-02 18:00:38 -0700910 if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice()) {
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700911 logEaselEvent("EASEL_STARTUP_LATENCY", "Resume");
Chien-Yu Chen90f1fc12017-07-14 14:31:53 -0700912 rc = gEaselManagerClient->resume(this);
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700913 if (rc != 0) {
914 ALOGE("%s: Resuming Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
915 return rc;
916 }
Arnd Geis8cbfc182017-09-07 14:46:41 -0700917 mEaselFwUpdated = false;
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700918 }
919 }
920
Thierry Strudel3d639192016-09-09 11:52:26 -0700921 rc = openCamera();
922 if (rc == 0) {
923 *hw_device = &mCameraDevice.common;
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800924 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -0700925 *hw_device = NULL;
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700926
927 // Suspend Easel because opening camera failed.
928 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -0700929 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chend77a5462017-06-02 18:00:38 -0700930 if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice()) {
931 status_t suspendErr = gEaselManagerClient->suspend();
Chien-Yu Chend8e57982017-05-25 12:10:21 -0700932 if (suspendErr != 0) {
933 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__,
934 strerror(-suspendErr), suspendErr);
935 }
936 }
937 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -0800938 }
Thierry Strudel3d639192016-09-09 11:52:26 -0700939
Thierry Strudel3d639192016-09-09 11:52:26 -0700940 LOGI("[KPI Perf]: X PROFILE_OPEN_CAMERA camera id %d, rc: %d",
941 mCameraId, rc);
942
943 if (rc == NO_ERROR) {
944 mState = OPENED;
945 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800946
Thierry Strudel3d639192016-09-09 11:52:26 -0700947 return rc;
948}
949
950/*===========================================================================
951 * FUNCTION : openCamera
952 *
953 * DESCRIPTION: open camera
954 *
955 * PARAMETERS : none
956 *
957 * RETURN : int32_t type of status
958 * NO_ERROR -- success
959 * none-zero failure code
960 *==========================================================================*/
961int QCamera3HardwareInterface::openCamera()
962{
963 int rc = 0;
964 char value[PROPERTY_VALUE_MAX];
965
Thierry Strudele80ad7c2016-12-06 10:16:27 -0800966 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_OPENCAMERA);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -0800967
Thierry Strudel3d639192016-09-09 11:52:26 -0700968 rc = QCameraFlash::getInstance().reserveFlashForCamera(mCameraId);
969 if (rc < 0) {
970 LOGE("Failed to reserve flash for camera id: %d",
971 mCameraId);
972 return UNKNOWN_ERROR;
973 }
974
975 rc = camera_open((uint8_t)mCameraId, &mCameraHandle);
976 if (rc) {
977 LOGE("camera_open failed. rc = %d, mCameraHandle = %p", rc, mCameraHandle);
978 return rc;
979 }
980
981 if (!mCameraHandle) {
982 LOGE("camera_open failed. mCameraHandle = %p", mCameraHandle);
983 return -ENODEV;
984 }
985
986 rc = mCameraHandle->ops->register_event_notify(mCameraHandle->camera_handle,
987 camEvtHandle, (void *)this);
988
989 if (rc < 0) {
990 LOGE("Error, failed to register event callback");
991 /* Not closing camera here since it is already handled in destructor */
992 return FAILED_TRANSACTION;
993 }
994
995 mExifParams.debug_params =
996 (mm_jpeg_debug_exif_params_t *) malloc (sizeof(mm_jpeg_debug_exif_params_t));
997 if (mExifParams.debug_params) {
998 memset(mExifParams.debug_params, 0, sizeof(mm_jpeg_debug_exif_params_t));
999 } else {
1000 LOGE("Out of Memory. Allocation failed for 3A debug exif params");
1001 return NO_MEMORY;
1002 }
1003 mFirstConfiguration = true;
1004
1005 //Notify display HAL that a camera session is active.
1006 //But avoid calling the same during bootup because camera service might open/close
1007 //cameras at boot time during its initialization and display service will also internally
1008 //wait for camera service to initialize first while calling this display API, resulting in a
1009 //deadlock situation. Since boot time camera open/close calls are made only to fetch
1010 //capabilities, no need of this display bw optimization.
1011 //Use "service.bootanim.exit" property to know boot status.
1012 property_get("service.bootanim.exit", value, "0");
1013 if (atoi(value) == 1) {
1014 pthread_mutex_lock(&gCamLock);
1015 if (gNumCameraSessions++ == 0) {
1016 setCameraLaunchStatus(true);
1017 }
1018 pthread_mutex_unlock(&gCamLock);
1019 }
1020
1021 //fill the session id needed while linking dual cam
1022 pthread_mutex_lock(&gCamLock);
1023 rc = mCameraHandle->ops->get_session_id(mCameraHandle->camera_handle,
1024 &sessionId[mCameraId]);
1025 pthread_mutex_unlock(&gCamLock);
1026
1027 if (rc < 0) {
1028 LOGE("Error, failed to get sessiion id");
1029 return UNKNOWN_ERROR;
1030 } else {
1031 //Allocate related cam sync buffer
1032 //this is needed for the payload that goes along with bundling cmd for related
1033 //camera use cases
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001034 m_pDualCamCmdHeap = new QCamera3HeapMemory(1);
1035 rc = m_pDualCamCmdHeap->allocate(sizeof(cam_dual_camera_cmd_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07001036 if(rc != OK) {
1037 rc = NO_MEMORY;
1038 LOGE("Dualcam: Failed to allocate Related cam sync Heap memory");
1039 return NO_MEMORY;
1040 }
1041
1042 //Map memory for related cam sync buffer
1043 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001044 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF,
1045 m_pDualCamCmdHeap->getFd(0),
1046 sizeof(cam_dual_camera_cmd_info_t),
1047 m_pDualCamCmdHeap->getPtr(0));
Thierry Strudel3d639192016-09-09 11:52:26 -07001048 if(rc < 0) {
1049 LOGE("Dualcam: failed to map Related cam sync buffer");
1050 rc = FAILED_TRANSACTION;
1051 return NO_MEMORY;
1052 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001053 m_pDualCamCmdPtr =
1054 (cam_dual_camera_cmd_info_t*) DATA_PTR(m_pDualCamCmdHeap,0);
Thierry Strudel3d639192016-09-09 11:52:26 -07001055 }
1056
1057 LOGH("mCameraId=%d",mCameraId);
1058
1059 return NO_ERROR;
1060}
1061
1062/*===========================================================================
1063 * FUNCTION : closeCamera
1064 *
1065 * DESCRIPTION: close camera
1066 *
1067 * PARAMETERS : none
1068 *
1069 * RETURN : int32_t type of status
1070 * NO_ERROR -- success
1071 * none-zero failure code
1072 *==========================================================================*/
1073int QCamera3HardwareInterface::closeCamera()
1074{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001075 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CLOSECAMERA);
Thierry Strudel3d639192016-09-09 11:52:26 -07001076 int rc = NO_ERROR;
1077 char value[PROPERTY_VALUE_MAX];
1078
1079 LOGI("[KPI Perf]: E PROFILE_CLOSE_CAMERA camera id %d",
1080 mCameraId);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001081
1082 // unmap memory for related cam sync buffer
1083 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001084 CAM_MAPPING_BUF_TYPE_DUAL_CAM_CMD_BUF);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001085 if (NULL != m_pDualCamCmdHeap) {
1086 m_pDualCamCmdHeap->deallocate();
1087 delete m_pDualCamCmdHeap;
1088 m_pDualCamCmdHeap = NULL;
1089 m_pDualCamCmdPtr = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001090 }
1091
Thierry Strudel3d639192016-09-09 11:52:26 -07001092 rc = mCameraHandle->ops->close_camera(mCameraHandle->camera_handle);
1093 mCameraHandle = NULL;
1094
1095 //reset session id to some invalid id
1096 pthread_mutex_lock(&gCamLock);
1097 sessionId[mCameraId] = 0xDEADBEEF;
1098 pthread_mutex_unlock(&gCamLock);
1099
1100 //Notify display HAL that there is no active camera session
1101 //but avoid calling the same during bootup. Refer to openCamera
1102 //for more details.
1103 property_get("service.bootanim.exit", value, "0");
1104 if (atoi(value) == 1) {
1105 pthread_mutex_lock(&gCamLock);
1106 if (--gNumCameraSessions == 0) {
1107 setCameraLaunchStatus(false);
1108 }
1109 pthread_mutex_unlock(&gCamLock);
1110 }
1111
Thierry Strudel3d639192016-09-09 11:52:26 -07001112 if (mExifParams.debug_params) {
1113 free(mExifParams.debug_params);
1114 mExifParams.debug_params = NULL;
1115 }
1116 if (QCameraFlash::getInstance().releaseFlashFromCamera(mCameraId) != 0) {
1117 LOGW("Failed to release flash for camera id: %d",
1118 mCameraId);
1119 }
1120 mState = CLOSED;
1121 LOGI("[KPI Perf]: X PROFILE_CLOSE_CAMERA camera id %d, rc: %d",
1122 mCameraId, rc);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001123
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001124 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07001125 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen5abecb52017-04-06 11:25:21 -07001126 if (EaselManagerClientOpened) {
Chien-Yu Chend77a5462017-06-02 18:00:38 -07001127 rc = gEaselManagerClient->suspend();
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07001128 if (rc != 0) {
1129 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
1130 }
Chien-Yu Chen27ec9622017-02-23 13:39:41 -08001131 }
1132 }
1133
Thierry Strudel3d639192016-09-09 11:52:26 -07001134 return rc;
1135}
1136
1137/*===========================================================================
1138 * FUNCTION : initialize
1139 *
1140 * DESCRIPTION: Initialize frameworks callback functions
1141 *
1142 * PARAMETERS :
1143 * @callback_ops : callback function to frameworks
1144 *
1145 * RETURN :
1146 *
1147 *==========================================================================*/
1148int QCamera3HardwareInterface::initialize(
1149 const struct camera3_callback_ops *callback_ops)
1150{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001151 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_INIT);
Thierry Strudel3d639192016-09-09 11:52:26 -07001152 int rc;
1153
1154 LOGI("E :mCameraId = %d mState = %d", mCameraId, mState);
1155 pthread_mutex_lock(&mMutex);
1156
1157 // Validate current state
1158 switch (mState) {
1159 case OPENED:
1160 /* valid state */
1161 break;
1162 default:
1163 LOGE("Invalid state %d", mState);
1164 rc = -ENODEV;
1165 goto err1;
1166 }
1167
1168 rc = initParameters();
1169 if (rc < 0) {
1170 LOGE("initParamters failed %d", rc);
1171 goto err1;
1172 }
1173 mCallbackOps = callback_ops;
1174
1175 mChannelHandle = mCameraHandle->ops->add_channel(
1176 mCameraHandle->camera_handle, NULL, NULL, this);
1177 if (mChannelHandle == 0) {
1178 LOGE("add_channel failed");
1179 rc = -ENOMEM;
1180 pthread_mutex_unlock(&mMutex);
1181 return rc;
1182 }
1183
1184 pthread_mutex_unlock(&mMutex);
1185 mCameraInitialized = true;
1186 mState = INITIALIZED;
1187 LOGI("X");
1188 return 0;
1189
1190err1:
1191 pthread_mutex_unlock(&mMutex);
1192 return rc;
1193}
1194
1195/*===========================================================================
1196 * FUNCTION : validateStreamDimensions
1197 *
1198 * DESCRIPTION: Check if the configuration requested are those advertised
1199 *
1200 * PARAMETERS :
1201 * @stream_list : streams to be configured
1202 *
1203 * RETURN :
1204 *
1205 *==========================================================================*/
1206int QCamera3HardwareInterface::validateStreamDimensions(
1207 camera3_stream_configuration_t *streamList)
1208{
1209 int rc = NO_ERROR;
1210 size_t count = 0;
Emilian Peev0f3c3162017-03-15 12:57:46 +00001211 uint32_t depthWidth = 0;
1212 uint32_t depthHeight = 0;
1213 if (mPDSupported) {
1214 depthWidth = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].width;
1215 depthHeight = gCamCapability[mCameraId]->raw_meta_dim[mPDIndex].height;
1216 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001217
1218 camera3_stream_t *inputStream = NULL;
1219 /*
1220 * Loop through all streams to find input stream if it exists*
1221 */
1222 for (size_t i = 0; i< streamList->num_streams; i++) {
1223 if (streamList->streams[i]->stream_type == CAMERA3_STREAM_INPUT) {
1224 if (inputStream != NULL) {
1225 LOGE("Error, Multiple input streams requested");
1226 return -EINVAL;
1227 }
1228 inputStream = streamList->streams[i];
1229 }
1230 }
1231 /*
1232 * Loop through all streams requested in configuration
1233 * Check if unsupported sizes have been requested on any of them
1234 */
1235 for (size_t j = 0; j < streamList->num_streams; j++) {
1236 bool sizeFound = false;
1237 camera3_stream_t *newStream = streamList->streams[j];
1238
1239 uint32_t rotatedHeight = newStream->height;
1240 uint32_t rotatedWidth = newStream->width;
1241 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
1242 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
1243 rotatedHeight = newStream->width;
1244 rotatedWidth = newStream->height;
1245 }
1246
1247 /*
1248 * Sizes are different for each type of stream format check against
1249 * appropriate table.
1250 */
1251 switch (newStream->format) {
1252 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
1253 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
1254 case HAL_PIXEL_FORMAT_RAW10:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001255 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
1256 (ANDROID_SCALER_AVAILABLE_FORMATS_RAW16 == newStream->format) &&
1257 mPDSupported) {
1258 if ((depthWidth == newStream->width) &&
1259 (depthHeight == newStream->height)) {
1260 sizeFound = true;
1261 }
1262 break;
1263 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001264 count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
1265 for (size_t i = 0; i < count; i++) {
1266 if ((gCamCapability[mCameraId]->raw_dim[i].width == (int32_t)rotatedWidth) &&
1267 (gCamCapability[mCameraId]->raw_dim[i].height == (int32_t)rotatedHeight)) {
1268 sizeFound = true;
1269 break;
1270 }
1271 }
1272 break;
1273 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev0f3c3162017-03-15 12:57:46 +00001274 if ((newStream->data_space == HAL_DATASPACE_DEPTH) &&
1275 mPDSupported) {
Emilian Peev7650c122017-01-19 08:24:33 -08001276 //As per spec. depth cloud should be sample count / 16
Emilian Peev0f3c3162017-03-15 12:57:46 +00001277 uint32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
Emilian Peev7650c122017-01-19 08:24:33 -08001278 if ((depthSamplesCount == newStream->width) &&
1279 (1 == newStream->height)) {
1280 sizeFound = true;
1281 }
1282 break;
1283 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001284 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
1285 /* Verify set size against generated sizes table */
1286 for (size_t i = 0; i < count; i++) {
1287 if (((int32_t)rotatedWidth ==
1288 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1289 ((int32_t)rotatedHeight ==
1290 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1291 sizeFound = true;
1292 break;
1293 }
1294 }
1295 break;
1296 case HAL_PIXEL_FORMAT_YCbCr_420_888:
1297 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
1298 default:
1299 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
1300 || newStream->stream_type == CAMERA3_STREAM_INPUT
1301 || IS_USAGE_ZSL(newStream->usage)) {
1302 if (((int32_t)rotatedWidth ==
1303 gCamCapability[mCameraId]->active_array_size.width) &&
1304 ((int32_t)rotatedHeight ==
1305 gCamCapability[mCameraId]->active_array_size.height)) {
1306 sizeFound = true;
1307 break;
1308 }
1309 /* We could potentially break here to enforce ZSL stream
1310 * set from frameworks always is full active array size
1311 * but it is not clear from the spc if framework will always
1312 * follow that, also we have logic to override to full array
1313 * size, so keeping the logic lenient at the moment
1314 */
1315 }
1316 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt,
1317 MAX_SIZES_CNT);
1318 for (size_t i = 0; i < count; i++) {
1319 if (((int32_t)rotatedWidth ==
1320 gCamCapability[mCameraId]->picture_sizes_tbl[i].width) &&
1321 ((int32_t)rotatedHeight ==
1322 gCamCapability[mCameraId]->picture_sizes_tbl[i].height)) {
1323 sizeFound = true;
1324 break;
1325 }
1326 }
1327 break;
1328 } /* End of switch(newStream->format) */
1329
1330 /* We error out even if a single stream has unsupported size set */
1331 if (!sizeFound) {
1332 LOGE("Error: Unsupported size: %d x %d type: %d array size: %d x %d",
1333 rotatedWidth, rotatedHeight, newStream->format,
1334 gCamCapability[mCameraId]->active_array_size.width,
1335 gCamCapability[mCameraId]->active_array_size.height);
1336 rc = -EINVAL;
1337 break;
1338 }
1339 } /* End of for each stream */
1340 return rc;
1341}
1342
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001343/*===========================================================================
1344 * FUNCTION : validateUsageFlags
1345 *
1346 * DESCRIPTION: Check if the configuration usage flags map to same internal format.
1347 *
1348 * PARAMETERS :
1349 * @stream_list : streams to be configured
1350 *
1351 * RETURN :
1352 * NO_ERROR if the usage flags are supported
1353 * error code if usage flags are not supported
1354 *
1355 *==========================================================================*/
1356int QCamera3HardwareInterface::validateUsageFlags(
1357 const camera3_stream_configuration_t* streamList)
1358{
1359 for (size_t j = 0; j < streamList->num_streams; j++) {
1360 const camera3_stream_t *newStream = streamList->streams[j];
1361
1362 if (newStream->format != HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
1363 (newStream->stream_type != CAMERA3_STREAM_OUTPUT &&
1364 newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL)) {
1365 continue;
1366 }
1367
Jason Leec4cf5032017-05-24 18:31:41 -07001368 // Here we only care whether it's EIS3 or not
1369 char is_type_value[PROPERTY_VALUE_MAX];
1370 property_get("persist.camera.is_type", is_type_value, "4");
1371 cam_is_type_t isType = atoi(is_type_value) == IS_TYPE_EIS_3_0 ? IS_TYPE_EIS_3_0 : IS_TYPE_NONE;
1372 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
1373 mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
1374 isType = IS_TYPE_NONE;
1375
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001376 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1377 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1378 bool isZSL = IS_USAGE_ZSL(newStream->usage);
1379 bool forcePreviewUBWC = true;
1380 if (isVideo && !QCameraCommon::isVideoUBWCEnabled()) {
1381 forcePreviewUBWC = false;
1382 }
1383 cam_format_t videoFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001384 CAM_STREAM_TYPE_VIDEO, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001385 cam_format_t previewFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001386 CAM_STREAM_TYPE_PREVIEW, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001387 cam_format_t zslFormat = QCamera3Channel::getStreamDefaultFormat(
Jason Leec4cf5032017-05-24 18:31:41 -07001388 CAM_STREAM_TYPE_SNAPSHOT, newStream->width, newStream->height, forcePreviewUBWC, isType);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001389
1390 // Color space for this camera device is guaranteed to be ITU_R_601_FR.
1391 // So color spaces will always match.
1392
1393 // Check whether underlying formats of shared streams match.
1394 if (isVideo && isPreview && videoFormat != previewFormat) {
1395 LOGE("Combined video and preview usage flag is not supported");
1396 return -EINVAL;
1397 }
1398 if (isPreview && isZSL && previewFormat != zslFormat) {
1399 LOGE("Combined preview and zsl usage flag is not supported");
1400 return -EINVAL;
1401 }
1402 if (isVideo && isZSL && videoFormat != zslFormat) {
1403 LOGE("Combined video and zsl usage flag is not supported");
1404 return -EINVAL;
1405 }
1406 }
1407 return NO_ERROR;
1408}
1409
1410/*===========================================================================
1411 * FUNCTION : validateUsageFlagsForEis
1412 *
1413 * DESCRIPTION: Check if the configuration usage flags conflict with Eis
1414 *
1415 * PARAMETERS :
1416 * @stream_list : streams to be configured
1417 *
1418 * RETURN :
1419 * NO_ERROR if the usage flags are supported
1420 * error code if usage flags are not supported
1421 *
1422 *==========================================================================*/
1423int QCamera3HardwareInterface::validateUsageFlagsForEis(
1424 const camera3_stream_configuration_t* streamList)
1425{
1426 for (size_t j = 0; j < streamList->num_streams; j++) {
1427 const camera3_stream_t *newStream = streamList->streams[j];
1428
1429 bool isVideo = IS_USAGE_VIDEO(newStream->usage);
1430 bool isPreview = IS_USAGE_PREVIEW(newStream->usage);
1431
1432 // Because EIS is "hard-coded" for certain use case, and current
1433 // implementation doesn't support shared preview and video on the same
1434 // stream, return failure if EIS is forced on.
1435 if (isPreview && isVideo && m_bEisEnable && m_bEisSupportedSize) {
1436 LOGE("Combined video and preview usage flag is not supported due to EIS");
1437 return -EINVAL;
1438 }
1439 }
1440 return NO_ERROR;
1441}
1442
Thierry Strudel3d639192016-09-09 11:52:26 -07001443/*==============================================================================
1444 * FUNCTION : isSupportChannelNeeded
1445 *
1446 * DESCRIPTION: Simple heuristic func to determine if support channels is needed
1447 *
1448 * PARAMETERS :
1449 * @stream_list : streams to be configured
1450 * @stream_config_info : the config info for streams to be configured
1451 *
1452 * RETURN : Boolen true/false decision
1453 *
1454 *==========================================================================*/
1455bool QCamera3HardwareInterface::isSupportChannelNeeded(
1456 camera3_stream_configuration_t *streamList,
1457 cam_stream_size_info_t stream_config_info)
1458{
1459 uint32_t i;
1460 bool pprocRequested = false;
1461 /* Check for conditions where PProc pipeline does not have any streams*/
1462 for (i = 0; i < stream_config_info.num_streams; i++) {
1463 if (stream_config_info.type[i] != CAM_STREAM_TYPE_ANALYSIS &&
1464 stream_config_info.postprocess_mask[i] != CAM_QCOM_FEATURE_NONE) {
1465 pprocRequested = true;
1466 break;
1467 }
1468 }
1469
1470 if (pprocRequested == false )
1471 return true;
1472
1473 /* Dummy stream needed if only raw or jpeg streams present */
1474 for (i = 0; i < streamList->num_streams; i++) {
1475 switch(streamList->streams[i]->format) {
1476 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
1477 case HAL_PIXEL_FORMAT_RAW10:
1478 case HAL_PIXEL_FORMAT_RAW16:
1479 case HAL_PIXEL_FORMAT_BLOB:
1480 break;
1481 default:
1482 return false;
1483 }
1484 }
1485 return true;
1486}
1487
1488/*==============================================================================
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001489 * FUNCTION : sensor_mode_info
Thierry Strudel3d639192016-09-09 11:52:26 -07001490 *
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001491 * DESCRIPTION: Get sensor mode information based on current stream configuratoin
Thierry Strudel3d639192016-09-09 11:52:26 -07001492 *
1493 * PARAMETERS :
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001494 * @sensor_mode_info : sensor mode information (output)
Thierry Strudel3d639192016-09-09 11:52:26 -07001495 *
1496 * RETURN : int32_t type of status
1497 * NO_ERROR -- success
1498 * none-zero failure code
1499 *
1500 *==========================================================================*/
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001501int32_t QCamera3HardwareInterface::getSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
Thierry Strudel3d639192016-09-09 11:52:26 -07001502{
1503 int32_t rc = NO_ERROR;
1504
1505 cam_dimension_t max_dim = {0, 0};
1506 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
1507 if (mStreamConfigInfo.stream_sizes[i].width > max_dim.width)
1508 max_dim.width = mStreamConfigInfo.stream_sizes[i].width;
1509 if (mStreamConfigInfo.stream_sizes[i].height > max_dim.height)
1510 max_dim.height = mStreamConfigInfo.stream_sizes[i].height;
1511 }
1512
1513 clear_metadata_buffer(mParameters);
1514
1515 rc = ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_MAX_DIMENSION,
1516 max_dim);
1517 if (rc != NO_ERROR) {
1518 LOGE("Failed to update table for CAM_INTF_PARM_MAX_DIMENSION");
1519 return rc;
1520 }
1521
1522 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle, mParameters);
1523 if (rc != NO_ERROR) {
1524 LOGE("Failed to set CAM_INTF_PARM_MAX_DIMENSION");
1525 return rc;
1526 }
1527
1528 clear_metadata_buffer(mParameters);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001529 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -07001530
1531 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1532 mParameters);
1533 if (rc != NO_ERROR) {
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001534 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
Thierry Strudel3d639192016-09-09 11:52:26 -07001535 return rc;
1536 }
1537
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001538 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_SENSOR_MODE_INFO, sensorModeInfo);
Chien-Yu Chenee335912017-02-09 17:53:20 -08001539 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1540 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1541 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1542 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1543 sensorModeInfo.num_raw_bits);
Thierry Strudel3d639192016-09-09 11:52:26 -07001544
1545 return rc;
1546}
1547
1548/*==============================================================================
Chien-Yu Chen605c3872017-06-14 11:09:23 -07001549 * FUNCTION : getCurrentSensorModeInfo
1550 *
1551 * DESCRIPTION: Get sensor mode information that is currently selected.
1552 *
1553 * PARAMETERS :
1554 * @sensorModeInfo : sensor mode information (output)
1555 *
1556 * RETURN : int32_t type of status
1557 * NO_ERROR -- success
1558 * none-zero failure code
1559 *
1560 *==========================================================================*/
1561int32_t QCamera3HardwareInterface::getCurrentSensorModeInfo(cam_sensor_mode_info_t &sensorModeInfo)
1562{
1563 int32_t rc = NO_ERROR;
1564
1565 clear_metadata_buffer(mParameters);
1566 ADD_GET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_CURRENT_SENSOR_MODE_INFO);
1567
1568 rc = mCameraHandle->ops->get_parms(mCameraHandle->camera_handle,
1569 mParameters);
1570 if (rc != NO_ERROR) {
1571 LOGE("Failed to get CAM_INTF_PARM_SENSOR_MODE_INFO");
1572 return rc;
1573 }
1574
1575 READ_PARAM_ENTRY(mParameters, CAM_INTF_PARM_CURRENT_SENSOR_MODE_INFO, sensorModeInfo);
1576 LOGH("%s: active array size %dx%d, pixel array size %dx%d, output pixel clock %u, "
1577 "raw bits: %d", __FUNCTION__, sensorModeInfo.active_array_size.width,
1578 sensorModeInfo.active_array_size.height, sensorModeInfo.pixel_array_size.width,
1579 sensorModeInfo.pixel_array_size.height, sensorModeInfo.op_pixel_clk,
1580 sensorModeInfo.num_raw_bits);
1581
1582 return rc;
1583}
1584
1585/*==============================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07001586 * FUNCTION : addToPPFeatureMask
1587 *
1588 * DESCRIPTION: add additional features to pp feature mask based on
1589 * stream type and usecase
1590 *
1591 * PARAMETERS :
1592 * @stream_format : stream type for feature mask
1593 * @stream_idx : stream idx within postprocess_mask list to change
1594 *
1595 * RETURN : NULL
1596 *
1597 *==========================================================================*/
1598void QCamera3HardwareInterface::addToPPFeatureMask(int stream_format,
1599 uint32_t stream_idx)
1600{
1601 char feature_mask_value[PROPERTY_VALUE_MAX];
1602 cam_feature_mask_t feature_mask;
1603 int args_converted;
1604 int property_len;
1605
1606 /* Get feature mask from property */
Thierry Strudel269c81a2016-10-12 12:13:59 -07001607#ifdef _LE_CAMERA_
1608 char swtnr_feature_mask_value[PROPERTY_VALUE_MAX];
1609 snprintf(swtnr_feature_mask_value, PROPERTY_VALUE_MAX, "%lld", CAM_QTI_FEATURE_SW_TNR);
1610 property_len = property_get("persist.camera.hal3.feature",
1611 feature_mask_value, swtnr_feature_mask_value);
1612#else
Thierry Strudel3d639192016-09-09 11:52:26 -07001613 property_len = property_get("persist.camera.hal3.feature",
1614 feature_mask_value, "0");
Thierry Strudel269c81a2016-10-12 12:13:59 -07001615#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07001616 if ((property_len > 2) && (feature_mask_value[0] == '0') &&
1617 (feature_mask_value[1] == 'x')) {
1618 args_converted = sscanf(feature_mask_value, "0x%llx", &feature_mask);
1619 } else {
1620 args_converted = sscanf(feature_mask_value, "%lld", &feature_mask);
1621 }
1622 if (1 != args_converted) {
1623 feature_mask = 0;
1624 LOGE("Wrong feature mask %s", feature_mask_value);
1625 return;
1626 }
1627
1628 switch (stream_format) {
1629 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: {
1630 /* Add LLVD to pp feature mask only if video hint is enabled */
1631 if ((m_bIsVideo) && (feature_mask & CAM_QTI_FEATURE_SW_TNR)) {
1632 mStreamConfigInfo.postprocess_mask[stream_idx]
1633 |= CAM_QTI_FEATURE_SW_TNR;
1634 LOGH("Added SW TNR to pp feature mask");
1635 } else if ((m_bIsVideo) && (feature_mask & CAM_QCOM_FEATURE_LLVD)) {
1636 mStreamConfigInfo.postprocess_mask[stream_idx]
1637 |= CAM_QCOM_FEATURE_LLVD;
1638 LOGH("Added LLVD SeeMore to pp feature mask");
1639 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001640 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1641 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
1642 mStreamConfigInfo.postprocess_mask[stream_idx] |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
1643 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08001644 if ((m_bIsVideo) && (gCamCapability[mCameraId]->qcom_supported_feature_mask &
1645 CAM_QTI_FEATURE_BINNING_CORRECTION)) {
1646 mStreamConfigInfo.postprocess_mask[stream_idx] |=
1647 CAM_QTI_FEATURE_BINNING_CORRECTION;
1648 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001649 break;
1650 }
1651 default:
1652 break;
1653 }
1654 LOGD("PP feature mask %llx",
1655 mStreamConfigInfo.postprocess_mask[stream_idx]);
1656}
1657
1658/*==============================================================================
1659 * FUNCTION : updateFpsInPreviewBuffer
1660 *
1661 * DESCRIPTION: update FPS information in preview buffer.
1662 *
1663 * PARAMETERS :
1664 * @metadata : pointer to metadata buffer
1665 * @frame_number: frame_number to look for in pending buffer list
1666 *
1667 * RETURN : None
1668 *
1669 *==========================================================================*/
1670void QCamera3HardwareInterface::updateFpsInPreviewBuffer(metadata_buffer_t *metadata,
1671 uint32_t frame_number)
1672{
1673 // Mark all pending buffers for this particular request
1674 // with corresponding framerate information
1675 for (List<PendingBuffersInRequest>::iterator req =
1676 mPendingBuffersMap.mPendingBuffersInRequest.begin();
1677 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
1678 for(List<PendingBufferInfo>::iterator j =
1679 req->mPendingBufferList.begin();
1680 j != req->mPendingBufferList.end(); j++) {
1681 QCamera3Channel *channel = (QCamera3Channel *)j->stream->priv;
1682 if ((req->frame_number == frame_number) &&
1683 (channel->getStreamTypeMask() &
1684 (1U << CAM_STREAM_TYPE_PREVIEW))) {
1685 IF_META_AVAILABLE(cam_fps_range_t, float_range,
1686 CAM_INTF_PARM_FPS_RANGE, metadata) {
1687 typeof (MetaData_t::refreshrate) cameraFps = float_range->max_fps;
1688 struct private_handle_t *priv_handle =
1689 (struct private_handle_t *)(*(j->buffer));
1690 setMetaData(priv_handle, UPDATE_REFRESH_RATE, &cameraFps);
1691 }
1692 }
1693 }
1694 }
1695}
1696
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001697/*==============================================================================
1698 * FUNCTION : updateTimeStampInPendingBuffers
1699 *
1700 * DESCRIPTION: update timestamp in display metadata for all pending buffers
1701 * of a frame number
1702 *
1703 * PARAMETERS :
1704 * @frame_number: frame_number. Timestamp will be set on pending buffers of this frame number
1705 * @timestamp : timestamp to be set
1706 *
1707 * RETURN : None
1708 *
1709 *==========================================================================*/
1710void QCamera3HardwareInterface::updateTimeStampInPendingBuffers(
1711 uint32_t frameNumber, nsecs_t timestamp)
1712{
1713 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
1714 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
Binhao Lin09245482017-08-31 18:25:29 -07001715 // WAR: save the av_timestamp to the next frame
1716 if(req->frame_number == frameNumber + 1) {
1717 req->av_timestamp = timestamp;
1718 }
1719
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001720 if (req->frame_number != frameNumber)
1721 continue;
1722
1723 for (auto k = req->mPendingBufferList.begin();
1724 k != req->mPendingBufferList.end(); k++ ) {
Binhao Lin09245482017-08-31 18:25:29 -07001725 // WAR: update timestamp when it's not VT usecase
1726 QCamera3Channel *channel = (QCamera3Channel *)k->stream->priv;
1727 if (!((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask() &&
1728 m_bAVTimerEnabled)) {
1729 struct private_handle_t *priv_handle =
1730 (struct private_handle_t *) (*(k->buffer));
1731 setMetaData(priv_handle, SET_VT_TIMESTAMP, &timestamp);
1732 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07001733 }
1734 }
1735 return;
1736}
1737
Thierry Strudel3d639192016-09-09 11:52:26 -07001738/*===========================================================================
1739 * FUNCTION : configureStreams
1740 *
1741 * DESCRIPTION: Reset HAL camera device processing pipeline and set up new input
1742 * and output streams.
1743 *
1744 * PARAMETERS :
1745 * @stream_list : streams to be configured
1746 *
1747 * RETURN :
1748 *
1749 *==========================================================================*/
1750int QCamera3HardwareInterface::configureStreams(
1751 camera3_stream_configuration_t *streamList)
1752{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001753 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS);
Thierry Strudel3d639192016-09-09 11:52:26 -07001754 int rc = 0;
1755
1756 // Acquire perfLock before configure streams
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001757 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001758 rc = configureStreamsPerfLocked(streamList);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08001759 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07001760
1761 return rc;
1762}
1763
1764/*===========================================================================
1765 * FUNCTION : configureStreamsPerfLocked
1766 *
1767 * DESCRIPTION: configureStreams while perfLock is held.
1768 *
1769 * PARAMETERS :
1770 * @stream_list : streams to be configured
1771 *
1772 * RETURN : int32_t type of status
1773 * NO_ERROR -- success
1774 * none-zero failure code
1775 *==========================================================================*/
1776int QCamera3HardwareInterface::configureStreamsPerfLocked(
1777 camera3_stream_configuration_t *streamList)
1778{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08001779 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_CFG_STRMS_PERF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07001780 int rc = 0;
1781
1782 // Sanity check stream_list
1783 if (streamList == NULL) {
1784 LOGE("NULL stream configuration");
1785 return BAD_VALUE;
1786 }
1787 if (streamList->streams == NULL) {
1788 LOGE("NULL stream list");
1789 return BAD_VALUE;
1790 }
1791
1792 if (streamList->num_streams < 1) {
1793 LOGE("Bad number of streams requested: %d",
1794 streamList->num_streams);
1795 return BAD_VALUE;
1796 }
1797
1798 if (streamList->num_streams >= MAX_NUM_STREAMS) {
1799 LOGE("Maximum number of streams %d exceeded: %d",
1800 MAX_NUM_STREAMS, streamList->num_streams);
1801 return BAD_VALUE;
1802 }
1803
Jason Leec4cf5032017-05-24 18:31:41 -07001804 mOpMode = streamList->operation_mode;
1805 LOGD("mOpMode: %d", mOpMode);
1806
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001807 rc = validateUsageFlags(streamList);
1808 if (rc != NO_ERROR) {
1809 return rc;
1810 }
1811
Chien-Yu Chen11c8edc2017-09-11 20:54:24 -07001812 // Disable HDR+ if it's enabled;
Chien-Yu Chen153c5172017-09-08 11:33:19 -07001813 {
1814 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
1815 finishHdrPlusClientOpeningLocked(l);
1816 disableHdrPlusModeLocked();
1817 }
1818
Thierry Strudel3d639192016-09-09 11:52:26 -07001819 /* first invalidate all the steams in the mStreamList
1820 * if they appear again, they will be validated */
1821 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
1822 it != mStreamInfo.end(); it++) {
1823 QCamera3ProcessingChannel *channel = (QCamera3ProcessingChannel*)(*it)->stream->priv;
1824 if (channel) {
1825 channel->stop();
1826 }
1827 (*it)->status = INVALID;
1828 }
1829
1830 if (mRawDumpChannel) {
1831 mRawDumpChannel->stop();
1832 delete mRawDumpChannel;
1833 mRawDumpChannel = NULL;
1834 }
1835
Chien-Yu Chen8e599492016-11-01 13:37:46 -07001836 if (mHdrPlusRawSrcChannel) {
1837 mHdrPlusRawSrcChannel->stop();
1838 delete mHdrPlusRawSrcChannel;
1839 mHdrPlusRawSrcChannel = NULL;
1840 }
1841
Thierry Strudel3d639192016-09-09 11:52:26 -07001842 if (mSupportChannel)
1843 mSupportChannel->stop();
1844
1845 if (mAnalysisChannel) {
1846 mAnalysisChannel->stop();
1847 }
1848 if (mMetadataChannel) {
1849 /* If content of mStreamInfo is not 0, there is metadata stream */
1850 mMetadataChannel->stop();
1851 }
1852 if (mChannelHandle) {
Chien-Yu Chen153c5172017-09-08 11:33:19 -07001853 stopChannelLocked(/*stop_immediately*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -07001854 }
1855
1856 pthread_mutex_lock(&mMutex);
1857
Chien-Yu Chendeaebad2017-06-30 11:46:34 -07001858 mPictureChannel = NULL;
1859
Thierry Strudel3d639192016-09-09 11:52:26 -07001860 // Check state
1861 switch (mState) {
1862 case INITIALIZED:
1863 case CONFIGURED:
1864 case STARTED:
1865 /* valid state */
1866 break;
1867 default:
1868 LOGE("Invalid state %d", mState);
1869 pthread_mutex_unlock(&mMutex);
1870 return -ENODEV;
1871 }
1872
1873 /* Check whether we have video stream */
1874 m_bIs4KVideo = false;
1875 m_bIsVideo = false;
1876 m_bEisSupportedSize = false;
1877 m_bTnrEnabled = false;
Mansoor Aftab93a66e52017-01-26 14:58:25 -08001878 m_bVideoHdrEnabled = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001879 bool isZsl = false;
Emilian Peev7650c122017-01-19 08:24:33 -08001880 bool depthPresent = false;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001881 bool isPreview = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07001882 uint32_t videoWidth = 0U;
1883 uint32_t videoHeight = 0U;
1884 size_t rawStreamCnt = 0;
1885 size_t stallStreamCnt = 0;
1886 size_t processedStreamCnt = 0;
1887 // Number of streams on ISP encoder path
1888 size_t numStreamsOnEncoder = 0;
1889 size_t numYuv888OnEncoder = 0;
1890 bool bYuv888OverrideJpeg = false;
1891 cam_dimension_t largeYuv888Size = {0, 0};
1892 cam_dimension_t maxViewfinderSize = {0, 0};
1893 bool bJpegExceeds4K = false;
1894 bool bJpegOnEncoder = false;
1895 bool bUseCommonFeatureMask = false;
1896 cam_feature_mask_t commonFeatureMask = 0;
1897 bool bSmallJpegSize = false;
1898 uint32_t width_ratio;
1899 uint32_t height_ratio;
1900 maxViewfinderSize = gCamCapability[mCameraId]->max_viewfinder_size;
1901 camera3_stream_t *inputStream = NULL;
1902 bool isJpeg = false;
1903 cam_dimension_t jpegSize = {0, 0};
Thierry Strudel9ec39c62016-12-28 11:30:05 -08001904 cam_dimension_t previewSize = {0, 0};
Emilian Peev0f3c3162017-03-15 12:57:46 +00001905 size_t pdStatCount = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07001906
1907 cam_padding_info_t padding_info = gCamCapability[mCameraId]->padding_info;
1908
1909 /*EIS configuration*/
Thierry Strudel3d639192016-09-09 11:52:26 -07001910 uint8_t eis_prop_set;
1911 uint32_t maxEisWidth = 0;
1912 uint32_t maxEisHeight = 0;
1913
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001914 // Initialize all instant AEC related variables
1915 mInstantAEC = false;
1916 mResetInstantAEC = false;
1917 mInstantAECSettledFrameNumber = 0;
1918 mAecSkipDisplayFrameBound = 0;
1919 mInstantAecFrameIdxCount = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08001920 mCurrFeatureState = 0;
1921 mStreamConfig = true;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07001922
Binhao Lin09245482017-08-31 18:25:29 -07001923 m_bAVTimerEnabled = false;
1924
Thierry Strudel3d639192016-09-09 11:52:26 -07001925 memset(&mInputStreamInfo, 0, sizeof(mInputStreamInfo));
1926
1927 size_t count = IS_TYPE_MAX;
1928 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
1929 for (size_t i = 0; i < count; i++) {
1930 if ((gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001931 (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
1932 m_bEisSupported = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07001933 break;
1934 }
1935 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001936
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001937 if (m_bEisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001938 maxEisWidth = MAX_EIS_WIDTH;
1939 maxEisHeight = MAX_EIS_HEIGHT;
1940 }
1941
1942 /* EIS setprop control */
1943 char eis_prop[PROPERTY_VALUE_MAX];
1944 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001945 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07001946 eis_prop_set = (uint8_t)atoi(eis_prop);
1947
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001948 m_bEisEnable = eis_prop_set && m_bEisSupported &&
Thierry Strudel3d639192016-09-09 11:52:26 -07001949 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE);
1950
Mansoor Aftabb9370df2017-03-15 17:09:34 -07001951 LOGD("m_bEisEnable: %d, eis_prop_set: %d, m_bEisSupported: %d",
1952 m_bEisEnable, eis_prop_set, m_bEisSupported);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07001953
Thierry Strudel3d639192016-09-09 11:52:26 -07001954 /* stream configurations */
1955 for (size_t i = 0; i < streamList->num_streams; i++) {
1956 camera3_stream_t *newStream = streamList->streams[i];
1957 LOGI("stream[%d] type = %d, format = %d, width = %d, "
1958 "height = %d, rotation = %d, usage = 0x%x",
1959 i, newStream->stream_type, newStream->format,
1960 newStream->width, newStream->height, newStream->rotation,
1961 newStream->usage);
1962 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
1963 newStream->stream_type == CAMERA3_STREAM_INPUT){
1964 isZsl = true;
1965 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08001966 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1967 IS_USAGE_PREVIEW(newStream->usage)) {
1968 isPreview = true;
1969 }
1970
Thierry Strudel3d639192016-09-09 11:52:26 -07001971 if (newStream->stream_type == CAMERA3_STREAM_INPUT){
1972 inputStream = newStream;
1973 }
1974
Emilian Peev7650c122017-01-19 08:24:33 -08001975 if ((newStream->format == HAL_PIXEL_FORMAT_BLOB) &&
1976 (newStream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07001977 isJpeg = true;
1978 jpegSize.width = newStream->width;
1979 jpegSize.height = newStream->height;
1980 if (newStream->width > VIDEO_4K_WIDTH ||
1981 newStream->height > VIDEO_4K_HEIGHT)
1982 bJpegExceeds4K = true;
1983 }
1984
1985 if ((HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED == newStream->format) &&
1986 (newStream->usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)) {
1987 m_bIsVideo = true;
Thierry Strudel2896d122017-02-23 19:18:03 -08001988 // In HAL3 we can have multiple different video streams.
1989 // The variables video width and height are used below as
1990 // dimensions of the biggest of them
1991 if (videoWidth < newStream->width ||
1992 videoHeight < newStream->height) {
1993 videoWidth = newStream->width;
1994 videoHeight = newStream->height;
1995 }
Thierry Strudel3d639192016-09-09 11:52:26 -07001996 if ((VIDEO_4K_WIDTH <= newStream->width) &&
1997 (VIDEO_4K_HEIGHT <= newStream->height)) {
1998 m_bIs4KVideo = true;
1999 }
2000 m_bEisSupportedSize = (newStream->width <= maxEisWidth) &&
2001 (newStream->height <= maxEisHeight);
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002002
Thierry Strudel3d639192016-09-09 11:52:26 -07002003 }
2004 if (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ||
2005 newStream->stream_type == CAMERA3_STREAM_OUTPUT) {
2006 switch (newStream->format) {
2007 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08002008 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
2009 depthPresent = true;
2010 break;
2011 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002012 stallStreamCnt++;
2013 if (isOnEncoder(maxViewfinderSize, newStream->width,
2014 newStream->height)) {
2015 numStreamsOnEncoder++;
2016 bJpegOnEncoder = true;
2017 }
2018 width_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.width,
2019 newStream->width);
2020 height_ratio = CEIL_DIVISION(gCamCapability[mCameraId]->active_array_size.height,
2021 newStream->height);;
2022 FATAL_IF(gCamCapability[mCameraId]->max_downscale_factor == 0,
2023 "FATAL: max_downscale_factor cannot be zero and so assert");
2024 if ( (width_ratio > gCamCapability[mCameraId]->max_downscale_factor) ||
2025 (height_ratio > gCamCapability[mCameraId]->max_downscale_factor)) {
2026 LOGH("Setting small jpeg size flag to true");
2027 bSmallJpegSize = true;
2028 }
2029 break;
2030 case HAL_PIXEL_FORMAT_RAW10:
2031 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2032 case HAL_PIXEL_FORMAT_RAW16:
2033 rawStreamCnt++;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002034 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
2035 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
2036 pdStatCount++;
2037 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002038 break;
2039 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2040 processedStreamCnt++;
2041 if (isOnEncoder(maxViewfinderSize, newStream->width,
2042 newStream->height)) {
2043 if (newStream->stream_type != CAMERA3_STREAM_BIDIRECTIONAL &&
2044 !IS_USAGE_ZSL(newStream->usage)) {
2045 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2046 }
2047 numStreamsOnEncoder++;
2048 }
2049 break;
2050 case HAL_PIXEL_FORMAT_YCbCr_420_888:
2051 processedStreamCnt++;
2052 if (isOnEncoder(maxViewfinderSize, newStream->width,
2053 newStream->height)) {
2054 // If Yuv888 size is not greater than 4K, set feature mask
2055 // to SUPERSET so that it support concurrent request on
2056 // YUV and JPEG.
2057 if (newStream->width <= VIDEO_4K_WIDTH &&
2058 newStream->height <= VIDEO_4K_HEIGHT) {
2059 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2060 }
2061 numStreamsOnEncoder++;
2062 numYuv888OnEncoder++;
2063 largeYuv888Size.width = newStream->width;
2064 largeYuv888Size.height = newStream->height;
2065 }
2066 break;
2067 default:
2068 processedStreamCnt++;
2069 if (isOnEncoder(maxViewfinderSize, newStream->width,
2070 newStream->height)) {
2071 commonFeatureMask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2072 numStreamsOnEncoder++;
2073 }
2074 break;
2075 }
2076
2077 }
2078 }
2079
2080 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
2081 gCamCapability[mCameraId]->position == CAM_POSITION_FRONT_AUX ||
2082 !m_bIsVideo) {
2083 m_bEisEnable = false;
2084 }
2085
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002086 if (validateUsageFlagsForEis(streamList) != NO_ERROR) {
2087 pthread_mutex_unlock(&mMutex);
2088 return -EINVAL;
2089 }
2090
Thierry Strudel54dc9782017-02-15 12:12:10 -08002091 uint8_t forceEnableTnr = 0;
2092 char tnr_prop[PROPERTY_VALUE_MAX];
2093 memset(tnr_prop, 0, sizeof(tnr_prop));
2094 property_get("debug.camera.tnr.forceenable", tnr_prop, "0");
2095 forceEnableTnr = (uint8_t)atoi(tnr_prop);
2096
Thierry Strudel3d639192016-09-09 11:52:26 -07002097 /* Logic to enable/disable TNR based on specific config size/etc.*/
2098 if ((m_bTnrPreview || m_bTnrVideo) && m_bIsVideo &&
Thierry Strudel3d639192016-09-09 11:52:26 -07002099 (mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE))
2100 m_bTnrEnabled = true;
Thierry Strudel54dc9782017-02-15 12:12:10 -08002101 else if (forceEnableTnr)
2102 m_bTnrEnabled = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002103
Mansoor Aftab93a66e52017-01-26 14:58:25 -08002104 char videoHdrProp[PROPERTY_VALUE_MAX];
2105 memset(videoHdrProp, 0, sizeof(videoHdrProp));
2106 property_get("persist.camera.hdr.video", videoHdrProp, "0");
2107 uint8_t hdr_mode_prop = (uint8_t)atoi(videoHdrProp);
2108
2109 if (hdr_mode_prop == 1 && m_bIsVideo &&
2110 mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2111 m_bVideoHdrEnabled = true;
2112 else
2113 m_bVideoHdrEnabled = false;
2114
2115
Thierry Strudel3d639192016-09-09 11:52:26 -07002116 /* Check if num_streams is sane */
2117 if (stallStreamCnt > MAX_STALLING_STREAMS ||
2118 rawStreamCnt > MAX_RAW_STREAMS ||
2119 processedStreamCnt > MAX_PROCESSED_STREAMS) {
2120 LOGE("Invalid stream configu: stall: %d, raw: %d, processed %d",
2121 stallStreamCnt, rawStreamCnt, processedStreamCnt);
2122 pthread_mutex_unlock(&mMutex);
2123 return -EINVAL;
2124 }
2125 /* Check whether we have zsl stream or 4k video case */
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002126 if (isZsl && m_bIs4KVideo) {
2127 LOGE("Currently invalid configuration ZSL & 4K Video!");
Thierry Strudel3d639192016-09-09 11:52:26 -07002128 pthread_mutex_unlock(&mMutex);
2129 return -EINVAL;
2130 }
2131 /* Check if stream sizes are sane */
2132 if (numStreamsOnEncoder > 2) {
2133 LOGE("Number of streams on ISP encoder path exceeds limits of 2");
2134 pthread_mutex_unlock(&mMutex);
2135 return -EINVAL;
2136 } else if (1 < numStreamsOnEncoder){
2137 bUseCommonFeatureMask = true;
2138 LOGH("Multiple streams above max viewfinder size, common mask needed");
2139 }
2140
2141 /* Check if BLOB size is greater than 4k in 4k recording case */
2142 if (m_bIs4KVideo && bJpegExceeds4K) {
2143 LOGE("HAL doesn't support Blob size greater than 4k in 4k recording");
2144 pthread_mutex_unlock(&mMutex);
2145 return -EINVAL;
2146 }
2147
Emilian Peev7650c122017-01-19 08:24:33 -08002148 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2149 depthPresent) {
2150 LOGE("HAL doesn't support depth streams in HFR mode!");
2151 pthread_mutex_unlock(&mMutex);
2152 return -EINVAL;
2153 }
2154
Thierry Strudel3d639192016-09-09 11:52:26 -07002155 // When JPEG and preview streams share VFE output, CPP will not apply CAC2
2156 // on JPEG stream. So disable such configurations to ensure CAC2 is applied.
2157 // Don't fail for reprocess configurations. Also don't fail if bJpegExceeds4K
2158 // is not true. Otherwise testMandatoryOutputCombinations will fail with following
2159 // configurations:
2160 // {[PRIV, PREVIEW] [PRIV, RECORD] [JPEG, RECORD]}
2161 // {[PRIV, PREVIEW] [YUV, RECORD] [JPEG, RECORD]}
2162 // (These two configurations will not have CAC2 enabled even in HQ modes.)
2163 if (!isZsl && bJpegOnEncoder && bJpegExceeds4K && bUseCommonFeatureMask) {
2164 ALOGE("%s: Blob size greater than 4k and multiple streams are on encoder output",
2165 __func__);
2166 pthread_mutex_unlock(&mMutex);
2167 return -EINVAL;
2168 }
2169
2170 // If jpeg stream is available, and a YUV 888 stream is on Encoder path, and
2171 // the YUV stream's size is greater or equal to the JPEG size, set common
2172 // postprocess mask to NONE, so that we can take advantage of postproc bypass.
2173 if (numYuv888OnEncoder && isOnEncoder(maxViewfinderSize,
2174 jpegSize.width, jpegSize.height) &&
2175 largeYuv888Size.width > jpegSize.width &&
2176 largeYuv888Size.height > jpegSize.height) {
2177 bYuv888OverrideJpeg = true;
2178 } else if (!isJpeg && numStreamsOnEncoder > 1) {
2179 commonFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2180 }
2181
2182 LOGH("max viewfinder width %d height %d isZsl %d bUseCommonFeature %x commonFeatureMask %llx",
2183 maxViewfinderSize.width, maxViewfinderSize.height, isZsl, bUseCommonFeatureMask,
2184 commonFeatureMask);
2185 LOGH("numStreamsOnEncoder %d, processedStreamCnt %d, stallcnt %d bSmallJpegSize %d",
2186 numStreamsOnEncoder, processedStreamCnt, stallStreamCnt, bSmallJpegSize);
2187
2188 rc = validateStreamDimensions(streamList);
2189 if (rc == NO_ERROR) {
2190 rc = validateStreamRotations(streamList);
2191 }
2192 if (rc != NO_ERROR) {
2193 LOGE("Invalid stream configuration requested!");
2194 pthread_mutex_unlock(&mMutex);
2195 return rc;
2196 }
2197
Emilian Peev0f3c3162017-03-15 12:57:46 +00002198 if (1 < pdStatCount) {
2199 LOGE("HAL doesn't support multiple PD streams");
2200 pthread_mutex_unlock(&mMutex);
2201 return -EINVAL;
2202 }
2203
2204 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
2205 (1 == pdStatCount)) {
2206 LOGE("HAL doesn't support PD streams in HFR mode!");
2207 pthread_mutex_unlock(&mMutex);
2208 return -EINVAL;
2209 }
2210
Thierry Strudel3d639192016-09-09 11:52:26 -07002211 camera3_stream_t *zslStream = NULL; //Only use this for size and not actual handle!
2212 for (size_t i = 0; i < streamList->num_streams; i++) {
2213 camera3_stream_t *newStream = streamList->streams[i];
2214 LOGH("newStream type = %d, stream format = %d "
2215 "stream size : %d x %d, stream rotation = %d",
2216 newStream->stream_type, newStream->format,
2217 newStream->width, newStream->height, newStream->rotation);
2218 //if the stream is in the mStreamList validate it
2219 bool stream_exists = false;
2220 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2221 it != mStreamInfo.end(); it++) {
2222 if ((*it)->stream == newStream) {
2223 QCamera3ProcessingChannel *channel =
2224 (QCamera3ProcessingChannel*)(*it)->stream->priv;
2225 stream_exists = true;
2226 if (channel)
2227 delete channel;
2228 (*it)->status = VALID;
2229 (*it)->stream->priv = NULL;
2230 (*it)->channel = NULL;
2231 }
2232 }
2233 if (!stream_exists && newStream->stream_type != CAMERA3_STREAM_INPUT) {
2234 //new stream
2235 stream_info_t* stream_info;
2236 stream_info = (stream_info_t* )malloc(sizeof(stream_info_t));
2237 if (!stream_info) {
2238 LOGE("Could not allocate stream info");
2239 rc = -ENOMEM;
2240 pthread_mutex_unlock(&mMutex);
2241 return rc;
2242 }
2243 stream_info->stream = newStream;
2244 stream_info->status = VALID;
2245 stream_info->channel = NULL;
Chien-Yu Chen14d3e392017-07-10 18:27:05 -07002246 stream_info->id = i;
Thierry Strudel3d639192016-09-09 11:52:26 -07002247 mStreamInfo.push_back(stream_info);
2248 }
2249 /* Covers Opaque ZSL and API1 F/W ZSL */
2250 if (IS_USAGE_ZSL(newStream->usage)
2251 || newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL ) {
2252 if (zslStream != NULL) {
2253 LOGE("Multiple input/reprocess streams requested!");
2254 pthread_mutex_unlock(&mMutex);
2255 return BAD_VALUE;
2256 }
2257 zslStream = newStream;
2258 }
2259 /* Covers YUV reprocess */
2260 if (inputStream != NULL) {
2261 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT
2262 && newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2263 && inputStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888
2264 && inputStream->width == newStream->width
2265 && inputStream->height == newStream->height) {
2266 if (zslStream != NULL) {
2267 /* This scenario indicates multiple YUV streams with same size
2268 * as input stream have been requested, since zsl stream handle
2269 * is solely use for the purpose of overriding the size of streams
2270 * which share h/w streams we will just make a guess here as to
2271 * which of the stream is a ZSL stream, this will be refactored
2272 * once we make generic logic for streams sharing encoder output
2273 */
2274 LOGH("Warning, Multiple ip/reprocess streams requested!");
2275 }
2276 zslStream = newStream;
2277 }
2278 }
2279 }
2280
2281 /* If a zsl stream is set, we know that we have configured at least one input or
2282 bidirectional stream */
2283 if (NULL != zslStream) {
2284 mInputStreamInfo.dim.width = (int32_t)zslStream->width;
2285 mInputStreamInfo.dim.height = (int32_t)zslStream->height;
2286 mInputStreamInfo.format = zslStream->format;
2287 mInputStreamInfo.usage = zslStream->usage;
2288 LOGD("Input stream configured! %d x %d, format %d, usage %d",
2289 mInputStreamInfo.dim.width,
2290 mInputStreamInfo.dim.height,
2291 mInputStreamInfo.format, mInputStreamInfo.usage);
2292 }
2293
2294 cleanAndSortStreamInfo();
2295 if (mMetadataChannel) {
2296 delete mMetadataChannel;
2297 mMetadataChannel = NULL;
2298 }
2299 if (mSupportChannel) {
2300 delete mSupportChannel;
2301 mSupportChannel = NULL;
2302 }
2303
2304 if (mAnalysisChannel) {
2305 delete mAnalysisChannel;
2306 mAnalysisChannel = NULL;
2307 }
2308
2309 if (mDummyBatchChannel) {
2310 delete mDummyBatchChannel;
2311 mDummyBatchChannel = NULL;
2312 }
2313
Emilian Peev7650c122017-01-19 08:24:33 -08002314 if (mDepthChannel) {
2315 mDepthChannel = NULL;
2316 }
Emilian Peev656e4fa2017-06-02 16:47:04 +01002317 mDepthCloudMode = CAM_PD_DATA_SKIP;
Emilian Peev7650c122017-01-19 08:24:33 -08002318
Chien-Yu Chen3f303522017-05-19 15:21:45 -07002319 mShutterDispatcher.clear();
2320 mOutputBufferDispatcher.clear();
2321
Thierry Strudel2896d122017-02-23 19:18:03 -08002322 char is_type_value[PROPERTY_VALUE_MAX];
2323 property_get("persist.camera.is_type", is_type_value, "4");
2324 m_bEis3PropertyEnabled = (atoi(is_type_value) == IS_TYPE_EIS_3_0);
2325
Binhao Line406f062017-05-03 14:39:44 -07002326 char property_value[PROPERTY_VALUE_MAX];
2327 property_get("persist.camera.gzoom.at", property_value, "0");
2328 int goog_zoom_at = atoi(property_value);
Jason Leec4cf5032017-05-24 18:31:41 -07002329 bool is_goog_zoom_video_enabled = ((goog_zoom_at & 1) > 0) &&
2330 gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
2331 bool is_goog_zoom_preview_enabled = ((goog_zoom_at & 2) > 0) &&
2332 gCamCapability[mCameraId]->position == CAM_POSITION_BACK;
Binhao Line406f062017-05-03 14:39:44 -07002333
2334 property_get("persist.camera.gzoom.4k", property_value, "0");
2335 bool is_goog_zoom_4k_enabled = (atoi(property_value) > 0);
2336
Thierry Strudel3d639192016-09-09 11:52:26 -07002337 //Create metadata channel and initialize it
2338 cam_feature_mask_t metadataFeatureMask = CAM_QCOM_FEATURE_NONE;
2339 setPAAFSupport(metadataFeatureMask, CAM_STREAM_TYPE_METADATA,
2340 gCamCapability[mCameraId]->color_arrangement);
2341 mMetadataChannel = new QCamera3MetadataChannel(mCameraHandle->camera_handle,
2342 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002343 setBufferErrorStatus, &padding_info, metadataFeatureMask, this);
Thierry Strudel3d639192016-09-09 11:52:26 -07002344 if (mMetadataChannel == NULL) {
2345 LOGE("failed to allocate metadata channel");
2346 rc = -ENOMEM;
2347 pthread_mutex_unlock(&mMutex);
2348 return rc;
2349 }
Emilian Peev662c05e2017-05-16 10:00:04 +01002350 mMetadataChannel->enableDepthData(depthPresent);
Thierry Strudel3d639192016-09-09 11:52:26 -07002351 rc = mMetadataChannel->initialize(IS_TYPE_NONE);
2352 if (rc < 0) {
2353 LOGE("metadata channel initialization failed");
2354 delete mMetadataChannel;
2355 mMetadataChannel = NULL;
2356 pthread_mutex_unlock(&mMutex);
2357 return rc;
2358 }
2359
Thierry Strudel2896d122017-02-23 19:18:03 -08002360 cam_feature_mask_t zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002361 bool isRawStreamRequested = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002362 bool onlyRaw = true;
Binhao Lincdb362a2017-04-20 13:31:54 -07002363 // Keep track of preview/video streams indices.
2364 // There could be more than one preview streams, but only one video stream.
2365 int32_t video_stream_idx = -1;
2366 int32_t preview_stream_idx[streamList->num_streams];
2367 size_t preview_stream_cnt = 0;
Jason Leea52b77e2017-06-27 16:16:17 -07002368 bool previewTnr[streamList->num_streams];
2369 memset(previewTnr, 0, sizeof(bool) * streamList->num_streams);
2370 bool isFront = gCamCapability[mCameraId]->position == CAM_POSITION_FRONT;
2371 // Loop through once to determine preview TNR conditions before creating channels.
2372 for (size_t i = 0; i < streamList->num_streams; i++) {
2373 camera3_stream_t *newStream = streamList->streams[i];
2374 uint32_t stream_usage = newStream->usage;
2375 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT &&
2376 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED) {
2377 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER)
2378 video_stream_idx = (int32_t)i;
2379 else
2380 preview_stream_idx[preview_stream_cnt++] = (int32_t)i;
2381 }
2382 }
2383 // By default, preview stream TNR is disabled.
2384 // Enable TNR to the preview stream if all conditions below are satisfied:
2385 // 1. preview resolution == video resolution.
2386 // 2. video stream TNR is enabled.
2387 // 3. EIS2.0 OR is front camera (which wouldn't use EIS3 even if it's set)
2388 for (size_t i = 0; i < preview_stream_cnt && video_stream_idx != -1; i++) {
2389 camera3_stream_t *video_stream = streamList->streams[video_stream_idx];
2390 camera3_stream_t *preview_stream = streamList->streams[preview_stream_idx[i]];
2391 if (m_bTnrEnabled && m_bTnrVideo &&
2392 (isFront || (atoi(is_type_value) == IS_TYPE_EIS_2_0)) &&
2393 video_stream->width == preview_stream->width &&
2394 video_stream->height == preview_stream->height) {
2395 previewTnr[preview_stream_idx[i]] = true;
2396 }
2397 }
2398
Thierry Strudel3d639192016-09-09 11:52:26 -07002399 memset(&mStreamConfigInfo, 0, sizeof(cam_stream_size_info_t));
2400 /* Allocate channel objects for the requested streams */
2401 for (size_t i = 0; i < streamList->num_streams; i++) {
Binhao Line406f062017-05-03 14:39:44 -07002402
Thierry Strudel3d639192016-09-09 11:52:26 -07002403 camera3_stream_t *newStream = streamList->streams[i];
2404 uint32_t stream_usage = newStream->usage;
2405 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)newStream->width;
2406 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)newStream->height;
2407 struct camera_info *p_info = NULL;
2408 pthread_mutex_lock(&gCamLock);
2409 p_info = get_cam_info(mCameraId, &mStreamConfigInfo.sync_type);
2410 pthread_mutex_unlock(&gCamLock);
2411 if ((newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL
2412 || IS_USAGE_ZSL(newStream->usage)) &&
2413 newStream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED){
Thierry Strudel2896d122017-02-23 19:18:03 -08002414 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002415 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
Thierry Strudel2896d122017-02-23 19:18:03 -08002416 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2417 if (bUseCommonFeatureMask)
2418 zsl_ppmask = commonFeatureMask;
2419 else
2420 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002421 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08002422 if (numStreamsOnEncoder > 0)
2423 zsl_ppmask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2424 else
2425 zsl_ppmask = CAM_QCOM_FEATURE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07002426 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002427 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002428 } else if(newStream->stream_type == CAMERA3_STREAM_INPUT) {
Thierry Strudel2896d122017-02-23 19:18:03 -08002429 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002430 LOGH("Input stream configured, reprocess config");
2431 } else {
2432 //for non zsl streams find out the format
2433 switch (newStream->format) {
2434 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED :
2435 {
Thierry Strudel2896d122017-02-23 19:18:03 -08002436 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002437 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2438 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2439 /* add additional features to pp feature mask */
2440 addToPPFeatureMask(HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
2441 mStreamConfigInfo.num_streams);
2442
2443 if (stream_usage & private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) {
2444 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2445 CAM_STREAM_TYPE_VIDEO;
2446 if (m_bTnrEnabled && m_bTnrVideo) {
2447 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2448 CAM_QCOM_FEATURE_CPP_TNR;
2449 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2450 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2451 ~CAM_QCOM_FEATURE_CDS;
2452 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002453 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2454 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2455 CAM_QTI_FEATURE_PPEISCORE;
2456 }
Binhao Line406f062017-05-03 14:39:44 -07002457 if (is_goog_zoom_video_enabled && (is_goog_zoom_4k_enabled || !m_bIs4KVideo)) {
2458 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2459 CAM_QCOM_FEATURE_GOOG_ZOOM;
2460 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002461 } else {
2462 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2463 CAM_STREAM_TYPE_PREVIEW;
Jason Leea52b77e2017-06-27 16:16:17 -07002464 if (m_bTnrEnabled && (previewTnr[i] || m_bTnrPreview)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002465 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2466 CAM_QCOM_FEATURE_CPP_TNR;
2467 //TNR and CDS are mutually exclusive. So reset CDS from feature mask
2468 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2469 ~CAM_QCOM_FEATURE_CDS;
2470 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08002471 if(!m_bSwTnrPreview) {
2472 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &=
2473 ~CAM_QTI_FEATURE_SW_TNR;
2474 }
Binhao Line406f062017-05-03 14:39:44 -07002475 if (is_goog_zoom_preview_enabled) {
2476 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] |=
2477 CAM_QCOM_FEATURE_GOOG_ZOOM;
2478 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002479 padding_info.width_padding = mSurfaceStridePadding;
2480 padding_info.height_padding = CAM_PAD_TO_2;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002481 previewSize.width = (int32_t)newStream->width;
2482 previewSize.height = (int32_t)newStream->height;
Thierry Strudel3d639192016-09-09 11:52:26 -07002483 }
2484 if ((newStream->rotation == CAMERA3_STREAM_ROTATION_90) ||
2485 (newStream->rotation == CAMERA3_STREAM_ROTATION_270)) {
2486 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2487 newStream->height;
2488 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2489 newStream->width;
2490 }
2491 }
2492 break;
2493 case HAL_PIXEL_FORMAT_YCbCr_420_888:
Thierry Strudel2896d122017-02-23 19:18:03 -08002494 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002495 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_CALLBACK;
2496 if (isOnEncoder(maxViewfinderSize, newStream->width, newStream->height)) {
2497 if (bUseCommonFeatureMask)
2498 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2499 commonFeatureMask;
2500 else
2501 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2502 CAM_QCOM_FEATURE_NONE;
2503 } else {
2504 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2505 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2506 }
2507 break;
2508 case HAL_PIXEL_FORMAT_BLOB:
Thierry Strudel2896d122017-02-23 19:18:03 -08002509 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002510 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_SNAPSHOT;
2511 // No need to check bSmallJpegSize if ZSL is present since JPEG uses ZSL stream
2512 if ((m_bIs4KVideo && !isZsl) || (bSmallJpegSize && !isZsl)) {
2513 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2514 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel2896d122017-02-23 19:18:03 -08002515 /* Remove rotation if it is not supported
2516 for 4K LiveVideo snapshot case (online processing) */
2517 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask &
2518 CAM_QCOM_FEATURE_ROTATION)) {
2519 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams]
2520 &= ~CAM_QCOM_FEATURE_ROTATION;
2521 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002522 } else {
2523 if (bUseCommonFeatureMask &&
2524 isOnEncoder(maxViewfinderSize, newStream->width,
2525 newStream->height)) {
2526 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = commonFeatureMask;
2527 } else {
2528 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2529 }
2530 }
2531 if (isZsl) {
2532 if (zslStream) {
2533 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2534 (int32_t)zslStream->width;
2535 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2536 (int32_t)zslStream->height;
Thierry Strudel2896d122017-02-23 19:18:03 -08002537 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2538 zsl_ppmask;
Thierry Strudel3d639192016-09-09 11:52:26 -07002539 } else {
2540 LOGE("Error, No ZSL stream identified");
2541 pthread_mutex_unlock(&mMutex);
2542 return -EINVAL;
2543 }
2544 } else if (m_bIs4KVideo) {
2545 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width = (int32_t)videoWidth;
2546 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height = (int32_t)videoHeight;
2547 } else if (bYuv888OverrideJpeg) {
2548 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
2549 (int32_t)largeYuv888Size.width;
2550 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
2551 (int32_t)largeYuv888Size.height;
2552 }
2553 break;
2554 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2555 case HAL_PIXEL_FORMAT_RAW16:
2556 case HAL_PIXEL_FORMAT_RAW10:
2557 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
2558 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2559 isRawStreamRequested = true;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002560 if ((HAL_DATASPACE_DEPTH == newStream->data_space) &&
2561 (HAL_PIXEL_FORMAT_RAW16 == newStream->format)) {
2562 mStreamConfigInfo.sub_format_type[mStreamConfigInfo.num_streams] =
2563 gCamCapability[mCameraId]->sub_fmt[mPDIndex];
2564 mStreamConfigInfo.format[mStreamConfigInfo.num_streams] =
2565 gCamCapability[mCameraId]->supported_meta_raw_fmts[mPDIndex];
2566 mStreamConfigInfo.dt[mStreamConfigInfo.num_streams] =
2567 gCamCapability[mCameraId]->dt[mPDIndex];
2568 mStreamConfigInfo.vc[mStreamConfigInfo.num_streams] =
2569 gCamCapability[mCameraId]->vc[mPDIndex];
2570 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002571 break;
2572 default:
Thierry Strudel2896d122017-02-23 19:18:03 -08002573 onlyRaw = false; // There is non-raw stream - bypass flag if set
Thierry Strudel3d639192016-09-09 11:52:26 -07002574 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_DEFAULT;
2575 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
2576 break;
2577 }
2578 }
2579
2580 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2581 (cam_stream_type_t) mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2582 gCamCapability[mCameraId]->color_arrangement);
2583
2584 if (newStream->priv == NULL) {
2585 //New stream, construct channel
2586 switch (newStream->stream_type) {
2587 case CAMERA3_STREAM_INPUT:
2588 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ;
2589 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;//WR for inplace algo's
2590 break;
2591 case CAMERA3_STREAM_BIDIRECTIONAL:
2592 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_READ |
2593 GRALLOC_USAGE_HW_CAMERA_WRITE;
2594 break;
2595 case CAMERA3_STREAM_OUTPUT:
2596 /* For video encoding stream, set read/write rarely
2597 * flag so that they may be set to un-cached */
2598 if (newStream->usage & GRALLOC_USAGE_HW_VIDEO_ENCODER)
2599 newStream->usage |=
2600 (GRALLOC_USAGE_SW_READ_RARELY |
2601 GRALLOC_USAGE_SW_WRITE_RARELY |
2602 GRALLOC_USAGE_HW_CAMERA_WRITE);
2603 else if (IS_USAGE_ZSL(newStream->usage))
2604 {
2605 LOGD("ZSL usage flag skipping");
2606 }
2607 else if (newStream == zslStream
2608 || newStream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
2609 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_ZSL;
2610 } else
2611 newStream->usage |= GRALLOC_USAGE_HW_CAMERA_WRITE;
2612 break;
2613 default:
2614 LOGE("Invalid stream_type %d", newStream->stream_type);
2615 break;
2616 }
2617
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002618 bool forcePreviewUBWC = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07002619 if (newStream->stream_type == CAMERA3_STREAM_OUTPUT ||
2620 newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL) {
2621 QCamera3ProcessingChannel *channel = NULL;
2622 switch (newStream->format) {
2623 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
2624 if ((newStream->usage &
2625 private_handle_t::PRIV_FLAGS_VIDEO_ENCODER) &&
2626 (streamList->operation_mode ==
2627 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2628 ) {
2629 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2630 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002631 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002632 this,
2633 newStream,
2634 (cam_stream_type_t)
2635 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2636 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2637 mMetadataChannel,
2638 0); //heap buffers are not required for HFR video channel
2639 if (channel == NULL) {
2640 LOGE("allocation of channel failed");
2641 pthread_mutex_unlock(&mMutex);
2642 return -ENOMEM;
2643 }
2644 //channel->getNumBuffers() will return 0 here so use
2645 //MAX_INFLIGH_HFR_REQUESTS
2646 newStream->max_buffers = MAX_INFLIGHT_HFR_REQUESTS;
2647 newStream->priv = channel;
2648 LOGI("num video buffers in HFR mode: %d",
2649 MAX_INFLIGHT_HFR_REQUESTS);
2650 } else {
2651 /* Copy stream contents in HFR preview only case to create
2652 * dummy batch channel so that sensor streaming is in
2653 * HFR mode */
2654 if (!m_bIsVideo && (streamList->operation_mode ==
2655 CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)) {
2656 mDummyBatchStream = *newStream;
2657 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002658 int bufferCount = MAX_INFLIGHT_REQUESTS;
2659 if (mStreamConfigInfo.type[mStreamConfigInfo.num_streams] ==
2660 CAM_STREAM_TYPE_VIDEO) {
Zhijun He6cdf6372017-07-15 14:59:58 -07002661 if (m_bEis3PropertyEnabled /* hint for EIS 3 needed here */) {
2662 // WAR: 4K video can only run <=30fps, reduce the buffer count.
2663 bufferCount = m_bIs4KVideo ?
2664 MAX_30FPS_VIDEO_BUFFERS : MAX_VIDEO_BUFFERS;
2665 }
2666
Thierry Strudel2896d122017-02-23 19:18:03 -08002667 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002668 channel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
2669 mChannelHandle, mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002670 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002671 this,
2672 newStream,
2673 (cam_stream_type_t)
2674 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2675 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2676 mMetadataChannel,
Thierry Strudel2896d122017-02-23 19:18:03 -08002677 bufferCount);
Thierry Strudel3d639192016-09-09 11:52:26 -07002678 if (channel == NULL) {
2679 LOGE("allocation of channel failed");
2680 pthread_mutex_unlock(&mMutex);
2681 return -ENOMEM;
2682 }
Thierry Strudel2896d122017-02-23 19:18:03 -08002683 /* disable UBWC for preview, though supported,
2684 * to take advantage of CPP duplication */
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002685 if (m_bIsVideo && (!QCameraCommon::isVideoUBWCEnabled()) &&
Thierry Strudel2896d122017-02-23 19:18:03 -08002686 (previewSize.width == (int32_t)videoWidth)&&
2687 (previewSize.height == (int32_t)videoHeight)){
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002688 forcePreviewUBWC = false;
Thierry Strudel2896d122017-02-23 19:18:03 -08002689 }
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002690 channel->setUBWCEnabled(forcePreviewUBWC);
Binhao Line406f062017-05-03 14:39:44 -07002691 /* When goog_zoom is linked to the preview or video stream,
2692 * disable ubwc to the linked stream */
2693 if ((mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] &
2694 CAM_QCOM_FEATURE_GOOG_ZOOM) != 0) {
2695 channel->setUBWCEnabled(false);
2696 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002697 newStream->max_buffers = channel->getNumBuffers();
2698 newStream->priv = channel;
2699 }
2700 break;
2701 case HAL_PIXEL_FORMAT_YCbCr_420_888: {
2702 channel = new QCamera3YUVChannel(mCameraHandle->camera_handle,
2703 mChannelHandle,
2704 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002705 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002706 this,
2707 newStream,
2708 (cam_stream_type_t)
2709 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2710 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2711 mMetadataChannel);
2712 if (channel == NULL) {
2713 LOGE("allocation of YUV channel failed");
2714 pthread_mutex_unlock(&mMutex);
2715 return -ENOMEM;
2716 }
2717 newStream->max_buffers = channel->getNumBuffers();
2718 newStream->priv = channel;
2719 break;
2720 }
2721 case HAL_PIXEL_FORMAT_RAW_OPAQUE:
2722 case HAL_PIXEL_FORMAT_RAW16:
Emilian Peev0f3c3162017-03-15 12:57:46 +00002723 case HAL_PIXEL_FORMAT_RAW10: {
2724 bool isRAW16 = ((newStream->format == HAL_PIXEL_FORMAT_RAW16) &&
2725 (HAL_DATASPACE_DEPTH != newStream->data_space))
2726 ? true : false;
Thierry Strudel3d639192016-09-09 11:52:26 -07002727 mRawChannel = new QCamera3RawChannel(
2728 mCameraHandle->camera_handle, mChannelHandle,
2729 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002730 setBufferErrorStatus, &padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07002731 this, newStream,
2732 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
Emilian Peev0f3c3162017-03-15 12:57:46 +00002733 mMetadataChannel, isRAW16);
Thierry Strudel3d639192016-09-09 11:52:26 -07002734 if (mRawChannel == NULL) {
2735 LOGE("allocation of raw channel failed");
2736 pthread_mutex_unlock(&mMutex);
2737 return -ENOMEM;
2738 }
2739 newStream->max_buffers = mRawChannel->getNumBuffers();
2740 newStream->priv = (QCamera3ProcessingChannel*)mRawChannel;
2741 break;
Emilian Peev0f3c3162017-03-15 12:57:46 +00002742 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002743 case HAL_PIXEL_FORMAT_BLOB:
Emilian Peev7650c122017-01-19 08:24:33 -08002744 if (newStream->data_space == HAL_DATASPACE_DEPTH) {
2745 mDepthChannel = new QCamera3DepthChannel(
2746 mCameraHandle->camera_handle, mChannelHandle,
2747 mCameraHandle->ops, NULL, NULL, &padding_info,
2748 0, this, MAX_INFLIGHT_REQUESTS, newStream,
2749 mMetadataChannel);
2750 if (NULL == mDepthChannel) {
2751 LOGE("Allocation of depth channel failed");
2752 pthread_mutex_unlock(&mMutex);
2753 return NO_MEMORY;
2754 }
2755 newStream->priv = mDepthChannel;
2756 newStream->max_buffers = MAX_INFLIGHT_REQUESTS;
2757 } else {
2758 // Max live snapshot inflight buffer is 1. This is to mitigate
2759 // frame drop issues for video snapshot. The more buffers being
2760 // allocated, the more frame drops there are.
2761 mPictureChannel = new QCamera3PicChannel(
2762 mCameraHandle->camera_handle, mChannelHandle,
2763 mCameraHandle->ops, captureResultCb,
2764 setBufferErrorStatus, &padding_info, this, newStream,
2765 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2766 m_bIs4KVideo, isZsl, mMetadataChannel,
2767 (m_bIsVideo ? 1 : MAX_INFLIGHT_BLOB));
2768 if (mPictureChannel == NULL) {
2769 LOGE("allocation of channel failed");
2770 pthread_mutex_unlock(&mMutex);
2771 return -ENOMEM;
2772 }
2773 newStream->priv = (QCamera3ProcessingChannel*)mPictureChannel;
2774 newStream->max_buffers = mPictureChannel->getNumBuffers();
2775 mPictureChannel->overrideYuvSize(
2776 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width,
2777 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height);
Thierry Strudel3d639192016-09-09 11:52:26 -07002778 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002779 break;
2780
2781 default:
2782 LOGE("not a supported format 0x%x", newStream->format);
Thierry Strudel73e91562017-05-15 09:16:18 -07002783 pthread_mutex_unlock(&mMutex);
2784 return -EINVAL;
Thierry Strudel3d639192016-09-09 11:52:26 -07002785 }
2786 } else if (newStream->stream_type == CAMERA3_STREAM_INPUT) {
2787 newStream->max_buffers = MAX_INFLIGHT_REPROCESS_REQUESTS;
2788 } else {
2789 LOGE("Error, Unknown stream type");
2790 pthread_mutex_unlock(&mMutex);
2791 return -EINVAL;
2792 }
2793
2794 QCamera3Channel *channel = (QCamera3Channel*) newStream->priv;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002795 if (channel != NULL && QCamera3Channel::isUBWCEnabled()) {
Jason Leec4cf5032017-05-24 18:31:41 -07002796 // Here we only care whether it's EIS3 or not
2797 cam_is_type_t isType = m_bEis3PropertyEnabled ? IS_TYPE_EIS_3_0 : IS_TYPE_NONE;
2798 if (gCamCapability[mCameraId]->position == CAM_POSITION_FRONT ||
2799 mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
2800 isType = IS_TYPE_NONE;
Shuzhen Wangbb03f5c2017-02-17 15:38:24 -08002801 cam_format_t fmt = QCamera3Channel::getStreamDefaultFormat(
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07002802 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
Jason Leec4cf5032017-05-24 18:31:41 -07002803 newStream->width, newStream->height, forcePreviewUBWC, isType);
Thierry Strudel3d639192016-09-09 11:52:26 -07002804 if(fmt == CAM_FORMAT_YUV_420_NV12_UBWC) {
2805 newStream->usage |= GRALLOC_USAGE_PRIVATE_ALLOC_UBWC;
2806 }
2807 }
2808
2809 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
2810 it != mStreamInfo.end(); it++) {
2811 if ((*it)->stream == newStream) {
2812 (*it)->channel = (QCamera3ProcessingChannel*) newStream->priv;
2813 break;
2814 }
2815 }
2816 } else {
2817 // Channel already exists for this stream
2818 // Do nothing for now
2819 }
2820 padding_info = gCamCapability[mCameraId]->padding_info;
2821
Emilian Peev7650c122017-01-19 08:24:33 -08002822 /* Do not add entries for input&depth stream in metastream info
Thierry Strudel3d639192016-09-09 11:52:26 -07002823 * since there is no real stream associated with it
2824 */
Emilian Peev7650c122017-01-19 08:24:33 -08002825 if ((newStream->stream_type != CAMERA3_STREAM_INPUT) &&
Emilian Peev0f3c3162017-03-15 12:57:46 +00002826 !((newStream->data_space == HAL_DATASPACE_DEPTH) &&
2827 (newStream->format == HAL_PIXEL_FORMAT_BLOB))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002828 mStreamConfigInfo.num_streams++;
Emilian Peev7650c122017-01-19 08:24:33 -08002829 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002830 }
2831
Chien-Yu Chen3f303522017-05-19 15:21:45 -07002832 // Let buffer dispatcher know the configured streams.
2833 mOutputBufferDispatcher.configureStreams(streamList);
2834
Thierry Strudel2896d122017-02-23 19:18:03 -08002835 if (mOpMode != QCAMERA3_VENDOR_STREAM_CONFIGURATION_RAW_ONLY_MODE) {
2836 onlyRaw = false;
2837 }
2838
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002839 // Create analysis stream all the time, even when h/w support is not available
Thierry Strudel2896d122017-02-23 19:18:03 -08002840 if (!onlyRaw) {
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002841 cam_feature_mask_t analysisFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002842 cam_analysis_info_t analysisInfo;
2843 int32_t ret = NO_ERROR;
2844 ret = mCommon.getAnalysisInfo(
2845 FALSE,
2846 analysisFeatureMask,
2847 &analysisInfo);
2848 if (ret == NO_ERROR) {
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002849 cam_color_filter_arrangement_t analysis_color_arrangement =
2850 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2851 CAM_FILTER_ARRANGEMENT_Y :
2852 gCamCapability[mCameraId]->color_arrangement);
2853 setPAAFSupport(analysisFeatureMask, CAM_STREAM_TYPE_ANALYSIS,
2854 analysis_color_arrangement);
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002855 cam_dimension_t analysisDim;
2856 analysisDim = mCommon.getMatchingDimension(previewSize,
2857 analysisInfo.analysis_recommended_res);
2858
2859 mAnalysisChannel = new QCamera3SupportChannel(
2860 mCameraHandle->camera_handle,
2861 mChannelHandle,
2862 mCameraHandle->ops,
2863 &analysisInfo.analysis_padding_info,
2864 analysisFeatureMask,
2865 CAM_STREAM_TYPE_ANALYSIS,
2866 &analysisDim,
2867 (analysisInfo.analysis_format
2868 == CAM_FORMAT_Y_ONLY ? CAM_FORMAT_Y_ONLY
2869 : CAM_FORMAT_YUV_420_NV21),
2870 analysisInfo.hw_analysis_supported,
2871 gCamCapability[mCameraId]->color_arrangement,
2872 this,
2873 0); // force buffer count to 0
2874 } else {
2875 LOGW("getAnalysisInfo failed, ret = %d", ret);
2876 }
2877 if (!mAnalysisChannel) {
2878 LOGW("Analysis channel cannot be created");
2879 }
2880 }
2881
Thierry Strudel3d639192016-09-09 11:52:26 -07002882 //RAW DUMP channel
2883 if (mEnableRawDump && isRawStreamRequested == false){
2884 cam_dimension_t rawDumpSize;
2885 rawDumpSize = getMaxRawSize(mCameraId);
2886 cam_feature_mask_t rawDumpFeatureMask = CAM_QCOM_FEATURE_NONE;
2887 setPAAFSupport(rawDumpFeatureMask,
2888 CAM_STREAM_TYPE_RAW,
2889 gCamCapability[mCameraId]->color_arrangement);
2890 mRawDumpChannel = new QCamera3RawDumpChannel(mCameraHandle->camera_handle,
2891 mChannelHandle,
2892 mCameraHandle->ops,
2893 rawDumpSize,
2894 &padding_info,
2895 this, rawDumpFeatureMask);
2896 if (!mRawDumpChannel) {
2897 LOGE("Raw Dump channel cannot be created");
2898 pthread_mutex_unlock(&mMutex);
2899 return -ENOMEM;
2900 }
2901 }
2902
Thierry Strudel3d639192016-09-09 11:52:26 -07002903 if (mAnalysisChannel) {
2904 cam_analysis_info_t analysisInfo;
2905 memset(&analysisInfo, 0, sizeof(cam_analysis_info_t));
2906 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2907 CAM_STREAM_TYPE_ANALYSIS;
2908 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2909 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002910 rc = mCommon.getAnalysisInfo(FALSE,
Thierry Strudel3d639192016-09-09 11:52:26 -07002911 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2912 &analysisInfo);
2913 if (rc != NO_ERROR) {
2914 LOGE("getAnalysisInfo failed, ret = %d", rc);
2915 pthread_mutex_unlock(&mMutex);
2916 return rc;
2917 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -07002918 cam_color_filter_arrangement_t analysis_color_arrangement =
2919 (analysisInfo.analysis_format == CAM_FORMAT_Y_ONLY ?
2920 CAM_FILTER_ARRANGEMENT_Y :
2921 gCamCapability[mCameraId]->color_arrangement);
2922 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2923 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2924 analysis_color_arrangement);
2925
Thierry Strudel3d639192016-09-09 11:52:26 -07002926 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002927 mCommon.getMatchingDimension(previewSize,
2928 analysisInfo.analysis_recommended_res);
Thierry Strudel3d639192016-09-09 11:52:26 -07002929 mStreamConfigInfo.num_streams++;
2930 }
2931
Thierry Strudel2896d122017-02-23 19:18:03 -08002932 if (!onlyRaw && isSupportChannelNeeded(streamList, mStreamConfigInfo)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07002933 cam_analysis_info_t supportInfo;
2934 memset(&supportInfo, 0, sizeof(cam_analysis_info_t));
2935 cam_feature_mask_t callbackFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2936 setPAAFSupport(callbackFeatureMask,
2937 CAM_STREAM_TYPE_CALLBACK,
2938 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002939 int32_t ret = NO_ERROR;
Thierry Strudel9ec39c62016-12-28 11:30:05 -08002940 ret = mCommon.getAnalysisInfo(FALSE, callbackFeatureMask, &supportInfo);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07002941 if (ret != NO_ERROR) {
2942 /* Ignore the error for Mono camera
2943 * because the PAAF bit mask is only set
2944 * for CAM_STREAM_TYPE_ANALYSIS stream type
2945 */
2946 if (gCamCapability[mCameraId]->color_arrangement != CAM_FILTER_ARRANGEMENT_Y) {
2947 LOGW("getAnalysisInfo failed, ret = %d", ret);
2948 }
Thierry Strudel3d639192016-09-09 11:52:26 -07002949 }
2950 mSupportChannel = new QCamera3SupportChannel(
2951 mCameraHandle->camera_handle,
2952 mChannelHandle,
2953 mCameraHandle->ops,
2954 &gCamCapability[mCameraId]->padding_info,
2955 callbackFeatureMask,
2956 CAM_STREAM_TYPE_CALLBACK,
2957 &QCamera3SupportChannel::kDim,
2958 CAM_FORMAT_YUV_420_NV21,
2959 supportInfo.hw_analysis_supported,
2960 gCamCapability[mCameraId]->color_arrangement,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08002961 this, 0);
Thierry Strudel3d639192016-09-09 11:52:26 -07002962 if (!mSupportChannel) {
2963 LOGE("dummy channel cannot be created");
2964 pthread_mutex_unlock(&mMutex);
2965 return -ENOMEM;
2966 }
2967 }
2968
2969 if (mSupportChannel) {
2970 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2971 QCamera3SupportChannel::kDim;
2972 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2973 CAM_STREAM_TYPE_CALLBACK;
2974 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2975 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
2976 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2977 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2978 gCamCapability[mCameraId]->color_arrangement);
2979 mStreamConfigInfo.num_streams++;
2980 }
2981
2982 if (mRawDumpChannel) {
2983 cam_dimension_t rawSize;
2984 rawSize = getMaxRawSize(mCameraId);
2985 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] =
2986 rawSize;
2987 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
2988 CAM_STREAM_TYPE_RAW;
2989 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
2990 CAM_QCOM_FEATURE_NONE;
2991 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
2992 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
2993 gCamCapability[mCameraId]->color_arrangement);
2994 mStreamConfigInfo.num_streams++;
2995 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07002996
2997 if (mHdrPlusRawSrcChannel) {
2998 cam_dimension_t rawSize;
2999 rawSize = getMaxRawSize(mCameraId);
3000 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams] = rawSize;
3001 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] = CAM_STREAM_TYPE_RAW;
3002 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] = CAM_QCOM_FEATURE_NONE;
3003 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
3004 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
3005 gCamCapability[mCameraId]->color_arrangement);
3006 mStreamConfigInfo.num_streams++;
3007 }
3008
Thierry Strudel3d639192016-09-09 11:52:26 -07003009 /* In HFR mode, if video stream is not added, create a dummy channel so that
3010 * ISP can create a batch mode even for preview only case. This channel is
3011 * never 'start'ed (no stream-on), it is only 'initialized' */
3012 if ((mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
3013 !m_bIsVideo) {
3014 cam_feature_mask_t dummyFeatureMask = CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
3015 setPAAFSupport(dummyFeatureMask,
3016 CAM_STREAM_TYPE_VIDEO,
3017 gCamCapability[mCameraId]->color_arrangement);
3018 mDummyBatchChannel = new QCamera3RegularChannel(mCameraHandle->camera_handle,
3019 mChannelHandle,
3020 mCameraHandle->ops, captureResultCb,
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003021 setBufferErrorStatus, &gCamCapability[mCameraId]->padding_info,
Thierry Strudel3d639192016-09-09 11:52:26 -07003022 this,
3023 &mDummyBatchStream,
3024 CAM_STREAM_TYPE_VIDEO,
3025 dummyFeatureMask,
3026 mMetadataChannel);
3027 if (NULL == mDummyBatchChannel) {
3028 LOGE("creation of mDummyBatchChannel failed."
3029 "Preview will use non-hfr sensor mode ");
3030 }
3031 }
3032 if (mDummyBatchChannel) {
3033 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].width =
3034 mDummyBatchStream.width;
3035 mStreamConfigInfo.stream_sizes[mStreamConfigInfo.num_streams].height =
3036 mDummyBatchStream.height;
3037 mStreamConfigInfo.type[mStreamConfigInfo.num_streams] =
3038 CAM_STREAM_TYPE_VIDEO;
3039 mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams] =
3040 CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
3041 setPAAFSupport(mStreamConfigInfo.postprocess_mask[mStreamConfigInfo.num_streams],
3042 mStreamConfigInfo.type[mStreamConfigInfo.num_streams],
3043 gCamCapability[mCameraId]->color_arrangement);
3044 mStreamConfigInfo.num_streams++;
3045 }
3046
3047 mStreamConfigInfo.buffer_info.min_buffers = MIN_INFLIGHT_REQUESTS;
3048 mStreamConfigInfo.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08003049 m_bIs4KVideo ? 0 :
Jason Leea46ad5e2017-07-07 15:20:56 -07003050 m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07003051
3052 /* Initialize mPendingRequestInfo and mPendingBuffersMap */
3053 for (pendingRequestIterator i = mPendingRequestsList.begin();
3054 i != mPendingRequestsList.end();) {
3055 i = erasePendingRequest(i);
3056 }
3057 mPendingFrameDropList.clear();
3058 // Initialize/Reset the pending buffers list
3059 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
3060 req.mPendingBufferList.clear();
3061 }
3062 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Emilian Peev30522a12017-08-03 14:36:33 +01003063 mExpectedInflightDuration = 0;
3064 mExpectedFrameDuration = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07003065
Thierry Strudel3d639192016-09-09 11:52:26 -07003066 mCurJpegMeta.clear();
3067 //Get min frame duration for this streams configuration
3068 deriveMinFrameDuration();
3069
Chien-Yu Chenee335912017-02-09 17:53:20 -08003070 mFirstPreviewIntentSeen = false;
3071
Thierry Strudel3d639192016-09-09 11:52:26 -07003072 // Update state
3073 mState = CONFIGURED;
3074
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003075 mFirstMetadataCallback = true;
3076
Thierry Strudel3d639192016-09-09 11:52:26 -07003077 pthread_mutex_unlock(&mMutex);
3078
3079 return rc;
3080}
3081
3082/*===========================================================================
3083 * FUNCTION : validateCaptureRequest
3084 *
3085 * DESCRIPTION: validate a capture request from camera service
3086 *
3087 * PARAMETERS :
3088 * @request : request from framework to process
3089 *
3090 * RETURN :
3091 *
3092 *==========================================================================*/
3093int QCamera3HardwareInterface::validateCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003094 camera3_capture_request_t *request,
3095 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07003096{
3097 ssize_t idx = 0;
3098 const camera3_stream_buffer_t *b;
3099 CameraMetadata meta;
3100
3101 /* Sanity check the request */
3102 if (request == NULL) {
3103 LOGE("NULL capture request");
3104 return BAD_VALUE;
3105 }
3106
3107 if ((request->settings == NULL) && (mState == CONFIGURED)) {
3108 /*settings cannot be null for the first request*/
3109 return BAD_VALUE;
3110 }
3111
3112 uint32_t frameNumber = request->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003113 if ((request->num_output_buffers < 1 || request->output_buffers == NULL)
3114 && (internallyRequestedStreams.size() == 0)) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003115 LOGE("Request %d: No output buffers provided!",
3116 __FUNCTION__, frameNumber);
3117 return BAD_VALUE;
3118 }
3119 if (request->num_output_buffers >= MAX_NUM_STREAMS) {
3120 LOGE("Number of buffers %d equals or is greater than maximum number of streams!",
3121 request->num_output_buffers, MAX_NUM_STREAMS);
3122 return BAD_VALUE;
3123 }
3124 if (request->input_buffer != NULL) {
3125 b = request->input_buffer;
3126 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3127 LOGE("Request %d: Buffer %ld: Status not OK!",
3128 frameNumber, (long)idx);
3129 return BAD_VALUE;
3130 }
3131 if (b->release_fence != -1) {
3132 LOGE("Request %d: Buffer %ld: Has a release fence!",
3133 frameNumber, (long)idx);
3134 return BAD_VALUE;
3135 }
3136 if (b->buffer == NULL) {
3137 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3138 frameNumber, (long)idx);
3139 return BAD_VALUE;
3140 }
3141 }
3142
3143 // Validate all buffers
3144 b = request->output_buffers;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003145 if (b == NULL) {
3146 return BAD_VALUE;
3147 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003148 while (idx < (ssize_t)request->num_output_buffers) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003149 QCamera3ProcessingChannel *channel =
3150 static_cast<QCamera3ProcessingChannel*>(b->stream->priv);
3151 if (channel == NULL) {
3152 LOGE("Request %d: Buffer %ld: Unconfigured stream!",
3153 frameNumber, (long)idx);
3154 return BAD_VALUE;
3155 }
3156 if (b->status != CAMERA3_BUFFER_STATUS_OK) {
3157 LOGE("Request %d: Buffer %ld: Status not OK!",
3158 frameNumber, (long)idx);
3159 return BAD_VALUE;
3160 }
3161 if (b->release_fence != -1) {
3162 LOGE("Request %d: Buffer %ld: Has a release fence!",
3163 frameNumber, (long)idx);
3164 return BAD_VALUE;
3165 }
3166 if (b->buffer == NULL) {
3167 LOGE("Request %d: Buffer %ld: NULL buffer handle!",
3168 frameNumber, (long)idx);
3169 return BAD_VALUE;
3170 }
3171 if (*(b->buffer) == NULL) {
3172 LOGE("Request %d: Buffer %ld: NULL private handle!",
3173 frameNumber, (long)idx);
3174 return BAD_VALUE;
3175 }
3176 idx++;
3177 b = request->output_buffers + idx;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003178 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003179 return NO_ERROR;
3180}
3181
3182/*===========================================================================
3183 * FUNCTION : deriveMinFrameDuration
3184 *
3185 * DESCRIPTION: derive mininum processed, jpeg, and raw frame durations based
3186 * on currently configured streams.
3187 *
3188 * PARAMETERS : NONE
3189 *
3190 * RETURN : NONE
3191 *
3192 *==========================================================================*/
3193void QCamera3HardwareInterface::deriveMinFrameDuration()
3194{
3195 int32_t maxJpegDim, maxProcessedDim, maxRawDim;
Jason Lee2d0ab112017-06-21 18:03:05 -07003196 bool hasRaw = false;
3197
3198 mMinRawFrameDuration = 0;
3199 mMinJpegFrameDuration = 0;
3200 mMinProcessedFrameDuration = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07003201
3202 maxJpegDim = 0;
3203 maxProcessedDim = 0;
3204 maxRawDim = 0;
3205
3206 // Figure out maximum jpeg, processed, and raw dimensions
3207 for (List<stream_info_t*>::iterator it = mStreamInfo.begin();
3208 it != mStreamInfo.end(); it++) {
3209
3210 // Input stream doesn't have valid stream_type
3211 if ((*it)->stream->stream_type == CAMERA3_STREAM_INPUT)
3212 continue;
3213
3214 int32_t dimension = (int32_t)((*it)->stream->width * (*it)->stream->height);
3215 if ((*it)->stream->format == HAL_PIXEL_FORMAT_BLOB) {
3216 if (dimension > maxJpegDim)
3217 maxJpegDim = dimension;
3218 } else if ((*it)->stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3219 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3220 (*it)->stream->format == HAL_PIXEL_FORMAT_RAW16) {
Jason Lee2d0ab112017-06-21 18:03:05 -07003221 hasRaw = true;
Thierry Strudel3d639192016-09-09 11:52:26 -07003222 if (dimension > maxRawDim)
3223 maxRawDim = dimension;
3224 } else {
3225 if (dimension > maxProcessedDim)
3226 maxProcessedDim = dimension;
3227 }
3228 }
3229
3230 size_t count = MIN(gCamCapability[mCameraId]->supported_raw_dim_cnt,
3231 MAX_SIZES_CNT);
3232
3233 //Assume all jpeg dimensions are in processed dimensions.
3234 if (maxJpegDim > maxProcessedDim)
3235 maxProcessedDim = maxJpegDim;
3236 //Find the smallest raw dimension that is greater or equal to jpeg dimension
Jason Lee2d0ab112017-06-21 18:03:05 -07003237 if (hasRaw && maxProcessedDim > maxRawDim) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003238 maxRawDim = INT32_MAX;
3239
3240 for (size_t i = 0; i < count; i++) {
3241 int32_t dimension = gCamCapability[mCameraId]->raw_dim[i].width *
3242 gCamCapability[mCameraId]->raw_dim[i].height;
3243 if (dimension >= maxProcessedDim && dimension < maxRawDim)
3244 maxRawDim = dimension;
3245 }
3246 }
3247
3248 //Find minimum durations for processed, jpeg, and raw
3249 for (size_t i = 0; i < count; i++) {
3250 if (maxRawDim == gCamCapability[mCameraId]->raw_dim[i].width *
3251 gCamCapability[mCameraId]->raw_dim[i].height) {
3252 mMinRawFrameDuration = gCamCapability[mCameraId]->raw_min_duration[i];
3253 break;
3254 }
3255 }
3256 count = MIN(gCamCapability[mCameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
3257 for (size_t i = 0; i < count; i++) {
3258 if (maxProcessedDim ==
3259 gCamCapability[mCameraId]->picture_sizes_tbl[i].width *
3260 gCamCapability[mCameraId]->picture_sizes_tbl[i].height) {
3261 mMinProcessedFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3262 mMinJpegFrameDuration = gCamCapability[mCameraId]->picture_min_duration[i];
3263 break;
3264 }
3265 }
3266}
3267
3268/*===========================================================================
3269 * FUNCTION : getMinFrameDuration
3270 *
3271 * DESCRIPTION: get minimum frame draution based on the current maximum frame durations
3272 * and current request configuration.
3273 *
3274 * PARAMETERS : @request: requset sent by the frameworks
3275 *
3276 * RETURN : min farme duration for a particular request
3277 *
3278 *==========================================================================*/
3279int64_t QCamera3HardwareInterface::getMinFrameDuration(const camera3_capture_request_t *request)
3280{
3281 bool hasJpegStream = false;
3282 bool hasRawStream = false;
3283 for (uint32_t i = 0; i < request->num_output_buffers; i ++) {
3284 const camera3_stream_t *stream = request->output_buffers[i].stream;
3285 if (stream->format == HAL_PIXEL_FORMAT_BLOB)
3286 hasJpegStream = true;
3287 else if (stream->format == HAL_PIXEL_FORMAT_RAW_OPAQUE ||
3288 stream->format == HAL_PIXEL_FORMAT_RAW10 ||
3289 stream->format == HAL_PIXEL_FORMAT_RAW16)
3290 hasRawStream = true;
3291 }
3292
3293 if (!hasJpegStream)
3294 return MAX(mMinRawFrameDuration, mMinProcessedFrameDuration);
3295 else
3296 return MAX(MAX(mMinRawFrameDuration, mMinProcessedFrameDuration), mMinJpegFrameDuration);
3297}
3298
3299/*===========================================================================
3300 * FUNCTION : handleBuffersDuringFlushLock
3301 *
3302 * DESCRIPTION: Account for buffers returned from back-end during flush
3303 * This function is executed while mMutex is held by the caller.
3304 *
3305 * PARAMETERS :
3306 * @buffer: image buffer for the callback
3307 *
3308 * RETURN :
3309 *==========================================================================*/
3310void QCamera3HardwareInterface::handleBuffersDuringFlushLock(camera3_stream_buffer_t *buffer)
3311{
3312 bool buffer_found = false;
3313 for (List<PendingBuffersInRequest>::iterator req =
3314 mPendingBuffersMap.mPendingBuffersInRequest.begin();
3315 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
3316 for (List<PendingBufferInfo>::iterator i =
3317 req->mPendingBufferList.begin();
3318 i != req->mPendingBufferList.end(); i++) {
3319 if (i->buffer == buffer->buffer) {
3320 mPendingBuffersMap.numPendingBufsAtFlush--;
3321 LOGD("Found buffer %p for Frame %d, numPendingBufsAtFlush = %d",
3322 buffer->buffer, req->frame_number,
3323 mPendingBuffersMap.numPendingBufsAtFlush);
3324 buffer_found = true;
3325 break;
3326 }
3327 }
3328 if (buffer_found) {
3329 break;
3330 }
3331 }
3332 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
3333 //signal the flush()
3334 LOGD("All buffers returned to HAL. Continue flush");
3335 pthread_cond_signal(&mBuffersCond);
3336 }
3337}
3338
Thierry Strudel3d639192016-09-09 11:52:26 -07003339/*===========================================================================
3340 * FUNCTION : handleBatchMetadata
3341 *
3342 * DESCRIPTION: Handles metadata buffer callback in batch mode
3343 *
3344 * PARAMETERS : @metadata_buf: metadata buffer
3345 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3346 * the meta buf in this method
3347 *
3348 * RETURN :
3349 *
3350 *==========================================================================*/
3351void QCamera3HardwareInterface::handleBatchMetadata(
3352 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf)
3353{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003354 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BATCH_METADATA);
Thierry Strudel3d639192016-09-09 11:52:26 -07003355
3356 if (NULL == metadata_buf) {
3357 LOGE("metadata_buf is NULL");
3358 return;
3359 }
3360 /* In batch mode, the metdata will contain the frame number and timestamp of
3361 * the last frame in the batch. Eg: a batch containing buffers from request
3362 * 5,6,7 and 8 will have frame number and timestamp corresponding to 8.
3363 * multiple process_capture_requests => 1 set_param => 1 handleBatchMetata =>
3364 * multiple process_capture_results */
3365 metadata_buffer_t *metadata =
3366 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3367 int32_t frame_number_valid = 0, urgent_frame_number_valid = 0;
3368 uint32_t last_frame_number = 0, last_urgent_frame_number = 0;
3369 uint32_t first_frame_number = 0, first_urgent_frame_number = 0;
3370 uint32_t frame_number = 0, urgent_frame_number = 0;
3371 int64_t last_frame_capture_time = 0, first_frame_capture_time, capture_time;
3372 bool invalid_metadata = false;
3373 size_t urgentFrameNumDiff = 0, frameNumDiff = 0;
3374 size_t loopCount = 1;
Thierry Strudel54dc9782017-02-15 12:12:10 -08003375 bool is_metabuf_queued = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07003376
3377 int32_t *p_frame_number_valid =
3378 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3379 uint32_t *p_frame_number =
3380 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3381 int64_t *p_capture_time =
3382 POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
3383 int32_t *p_urgent_frame_number_valid =
3384 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3385 uint32_t *p_urgent_frame_number =
3386 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3387
3388 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) ||
3389 (NULL == p_capture_time) || (NULL == p_urgent_frame_number_valid) ||
3390 (NULL == p_urgent_frame_number)) {
3391 LOGE("Invalid metadata");
3392 invalid_metadata = true;
3393 } else {
3394 frame_number_valid = *p_frame_number_valid;
3395 last_frame_number = *p_frame_number;
3396 last_frame_capture_time = *p_capture_time;
3397 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3398 last_urgent_frame_number = *p_urgent_frame_number;
3399 }
3400
3401 /* In batchmode, when no video buffers are requested, set_parms are sent
3402 * for every capture_request. The difference between consecutive urgent
3403 * frame numbers and frame numbers should be used to interpolate the
3404 * corresponding frame numbers and time stamps */
3405 pthread_mutex_lock(&mMutex);
3406 if (urgent_frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003407 ssize_t idx = mPendingBatchMap.indexOfKey(last_urgent_frame_number);
3408 if(idx < 0) {
3409 LOGE("Invalid urgent frame number received: %d. Irrecoverable error",
3410 last_urgent_frame_number);
3411 mState = ERROR;
3412 pthread_mutex_unlock(&mMutex);
3413 return;
3414 }
3415 first_urgent_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003416 urgentFrameNumDiff = last_urgent_frame_number + 1 -
3417 first_urgent_frame_number;
3418
3419 LOGD("urgent_frm: valid: %d frm_num: %d - %d",
3420 urgent_frame_number_valid,
3421 first_urgent_frame_number, last_urgent_frame_number);
3422 }
3423
3424 if (frame_number_valid) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003425 ssize_t idx = mPendingBatchMap.indexOfKey(last_frame_number);
3426 if(idx < 0) {
3427 LOGE("Invalid frame number received: %d. Irrecoverable error",
3428 last_frame_number);
3429 mState = ERROR;
3430 pthread_mutex_unlock(&mMutex);
3431 return;
3432 }
3433 first_frame_number = mPendingBatchMap.valueAt(idx);
Thierry Strudel3d639192016-09-09 11:52:26 -07003434 frameNumDiff = last_frame_number + 1 -
3435 first_frame_number;
3436 mPendingBatchMap.removeItem(last_frame_number);
3437
3438 LOGD("frm: valid: %d frm_num: %d - %d",
3439 frame_number_valid,
3440 first_frame_number, last_frame_number);
3441
3442 }
3443 pthread_mutex_unlock(&mMutex);
3444
3445 if (urgent_frame_number_valid || frame_number_valid) {
3446 loopCount = MAX(urgentFrameNumDiff, frameNumDiff);
3447 if (urgentFrameNumDiff > MAX_HFR_BATCH_SIZE)
3448 LOGE("urgentFrameNumDiff: %d urgentFrameNum: %d",
3449 urgentFrameNumDiff, last_urgent_frame_number);
3450 if (frameNumDiff > MAX_HFR_BATCH_SIZE)
3451 LOGE("frameNumDiff: %d frameNum: %d",
3452 frameNumDiff, last_frame_number);
3453 }
3454
3455 for (size_t i = 0; i < loopCount; i++) {
3456 /* handleMetadataWithLock is called even for invalid_metadata for
3457 * pipeline depth calculation */
3458 if (!invalid_metadata) {
3459 /* Infer frame number. Batch metadata contains frame number of the
3460 * last frame */
3461 if (urgent_frame_number_valid) {
3462 if (i < urgentFrameNumDiff) {
3463 urgent_frame_number =
3464 first_urgent_frame_number + i;
3465 LOGD("inferred urgent frame_number: %d",
3466 urgent_frame_number);
3467 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3468 CAM_INTF_META_URGENT_FRAME_NUMBER, urgent_frame_number);
3469 } else {
3470 /* This is to handle when urgentFrameNumDiff < frameNumDiff */
3471 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3472 CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, 0);
3473 }
3474 }
3475
3476 /* Infer frame number. Batch metadata contains frame number of the
3477 * last frame */
3478 if (frame_number_valid) {
3479 if (i < frameNumDiff) {
3480 frame_number = first_frame_number + i;
3481 LOGD("inferred frame_number: %d", frame_number);
3482 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3483 CAM_INTF_META_FRAME_NUMBER, frame_number);
3484 } else {
3485 /* This is to handle when urgentFrameNumDiff > frameNumDiff */
3486 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3487 CAM_INTF_META_FRAME_NUMBER_VALID, 0);
3488 }
3489 }
3490
3491 if (last_frame_capture_time) {
3492 //Infer timestamp
3493 first_frame_capture_time = last_frame_capture_time -
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003494 (((loopCount - 1) * NSEC_PER_SEC) / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003495 capture_time =
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003496 first_frame_capture_time + (i * NSEC_PER_SEC / (double) mHFRVideoFps);
Thierry Strudel3d639192016-09-09 11:52:26 -07003497 ADD_SET_PARAM_ENTRY_TO_BATCH(metadata,
3498 CAM_INTF_META_SENSOR_TIMESTAMP, capture_time);
3499 LOGD("batch capture_time: %lld, capture_time: %lld",
3500 last_frame_capture_time, capture_time);
3501 }
3502 }
3503 pthread_mutex_lock(&mMutex);
3504 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003505 false /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003506 (i == urgentFrameNumDiff-1), /* last urgent metadata in the batch */
3507 (i == frameNumDiff-1), /* last metadata in the batch metadata */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003508 &is_metabuf_queued /* if metabuf isqueued or not */);
Thierry Strudel3d639192016-09-09 11:52:26 -07003509 pthread_mutex_unlock(&mMutex);
3510 }
3511
3512 /* BufDone metadata buffer */
Thierry Strudel54dc9782017-02-15 12:12:10 -08003513 if (free_and_bufdone_meta_buf && !is_metabuf_queued) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003514 mMetadataChannel->bufDone(metadata_buf);
3515 free(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003516 metadata_buf = NULL;
Thierry Strudel3d639192016-09-09 11:52:26 -07003517 }
3518}
3519
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003520void QCamera3HardwareInterface::notifyError(uint32_t frameNumber,
3521 camera3_error_msg_code_t errorCode)
3522{
3523 camera3_notify_msg_t notify_msg;
3524 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
3525 notify_msg.type = CAMERA3_MSG_ERROR;
3526 notify_msg.message.error.error_code = errorCode;
3527 notify_msg.message.error.error_stream = NULL;
3528 notify_msg.message.error.frame_number = frameNumber;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003529 orchestrateNotify(&notify_msg);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003530
3531 return;
3532}
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003533
3534/*===========================================================================
3535 * FUNCTION : sendPartialMetadataWithLock
3536 *
3537 * DESCRIPTION: Send partial capture result callback with mMutex lock held.
3538 *
3539 * PARAMETERS : @metadata: metadata buffer
3540 * @requestIter: The iterator for the pending capture request for
3541 * which the partial result is being sen
3542 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3543 * last urgent metadata in a batch. Always true for non-batch mode
Shuzhen Wang485e2442017-08-02 12:21:08 -07003544 * @isJumpstartMetadata: Whether this is a partial metadata for
3545 * jumpstart, i.e. even though it doesn't map to a valid partial
3546 * frame number, its metadata entries should be kept.
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003547 *
3548 * RETURN :
3549 *
3550 *==========================================================================*/
3551
3552void QCamera3HardwareInterface::sendPartialMetadataWithLock(
3553 metadata_buffer_t *metadata,
3554 const pendingRequestIterator requestIter,
Shuzhen Wang485e2442017-08-02 12:21:08 -07003555 bool lastUrgentMetadataInBatch,
3556 bool isJumpstartMetadata)
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003557{
3558 camera3_capture_result_t result;
3559 memset(&result, 0, sizeof(camera3_capture_result_t));
3560
3561 requestIter->partial_result_cnt++;
3562
3563 // Extract 3A metadata
3564 result.result = translateCbUrgentMetadataToResultMetadata(
Shuzhen Wang485e2442017-08-02 12:21:08 -07003565 metadata, lastUrgentMetadataInBatch, requestIter->frame_number,
3566 isJumpstartMetadata);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003567 // Populate metadata result
3568 result.frame_number = requestIter->frame_number;
3569 result.num_output_buffers = 0;
3570 result.output_buffers = NULL;
3571 result.partial_result = requestIter->partial_result_cnt;
3572
3573 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07003574 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003575 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
3576 // Notify HDR+ client about the partial metadata.
3577 gHdrPlusClient->notifyFrameMetadata(result.frame_number, *result.result,
3578 result.partial_result == PARTIAL_RESULT_COUNT);
3579 }
3580 }
3581
3582 orchestrateResult(&result);
3583 LOGD("urgent frame_number = %u", result.frame_number);
3584 free_camera_metadata((camera_metadata_t *)result.result);
3585}
3586
Thierry Strudel3d639192016-09-09 11:52:26 -07003587/*===========================================================================
3588 * FUNCTION : handleMetadataWithLock
3589 *
3590 * DESCRIPTION: Handles metadata buffer callback with mMutex lock held.
3591 *
3592 * PARAMETERS : @metadata_buf: metadata buffer
3593 * @free_and_bufdone_meta_buf: Buf done on the meta buf and free
3594 * the meta buf in this method
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003595 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the
3596 * last urgent metadata in a batch. Always true for non-batch mode
3597 * @lastMetadataInBatch: Boolean to indicate whether this is the
3598 * last metadata in a batch. Always true for non-batch mode
Thierry Strudel54dc9782017-02-15 12:12:10 -08003599 * @p_is_metabuf_queued: Pointer to Boolean to check if metadata
3600 * buffer is enqueued or not.
Thierry Strudel3d639192016-09-09 11:52:26 -07003601 *
3602 * RETURN :
3603 *
3604 *==========================================================================*/
3605void QCamera3HardwareInterface::handleMetadataWithLock(
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003606 mm_camera_super_buf_t *metadata_buf, bool free_and_bufdone_meta_buf,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07003607 bool lastUrgentMetadataInBatch, bool lastMetadataInBatch,
3608 bool *p_is_metabuf_queued)
Thierry Strudel3d639192016-09-09 11:52:26 -07003609{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003610 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_METADATA_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07003611 if ((mFlushPerf) || (ERROR == mState) || (DEINIT == mState)) {
3612 //during flush do not send metadata from this thread
3613 LOGD("not sending metadata during flush or when mState is error");
3614 if (free_and_bufdone_meta_buf) {
3615 mMetadataChannel->bufDone(metadata_buf);
3616 free(metadata_buf);
3617 }
3618 return;
3619 }
3620
3621 //not in flush
3622 metadata_buffer_t *metadata = (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
3623 int32_t frame_number_valid, urgent_frame_number_valid;
3624 uint32_t frame_number, urgent_frame_number;
Jason Lee603176d2017-05-31 11:43:27 -07003625 int64_t capture_time, capture_time_av;
Thierry Strudel3d639192016-09-09 11:52:26 -07003626 nsecs_t currentSysTime;
3627
3628 int32_t *p_frame_number_valid =
3629 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
3630 uint32_t *p_frame_number = POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
3631 int64_t *p_capture_time = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP, metadata);
Jason Lee603176d2017-05-31 11:43:27 -07003632 int64_t *p_capture_time_av = POINTER_OF_META(CAM_INTF_META_SENSOR_TIMESTAMP_AV, metadata);
Thierry Strudel3d639192016-09-09 11:52:26 -07003633 int32_t *p_urgent_frame_number_valid =
3634 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER_VALID, metadata);
3635 uint32_t *p_urgent_frame_number =
3636 POINTER_OF_META(CAM_INTF_META_URGENT_FRAME_NUMBER, metadata);
3637 IF_META_AVAILABLE(cam_stream_ID_t, p_cam_frame_drop, CAM_INTF_META_FRAME_DROPPED,
3638 metadata) {
3639 LOGD("Dropped frame info for frame_number_valid %d, frame_number %d",
3640 *p_frame_number_valid, *p_frame_number);
3641 }
3642
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003643 camera_metadata_t *resultMetadata = nullptr;
3644
Thierry Strudel3d639192016-09-09 11:52:26 -07003645 if ((NULL == p_frame_number_valid) || (NULL == p_frame_number) || (NULL == p_capture_time) ||
3646 (NULL == p_urgent_frame_number_valid) || (NULL == p_urgent_frame_number)) {
3647 LOGE("Invalid metadata");
3648 if (free_and_bufdone_meta_buf) {
3649 mMetadataChannel->bufDone(metadata_buf);
3650 free(metadata_buf);
3651 }
3652 goto done_metadata;
3653 }
3654 frame_number_valid = *p_frame_number_valid;
3655 frame_number = *p_frame_number;
3656 capture_time = *p_capture_time;
Jason Lee603176d2017-05-31 11:43:27 -07003657 capture_time_av = *p_capture_time_av;
Thierry Strudel3d639192016-09-09 11:52:26 -07003658 urgent_frame_number_valid = *p_urgent_frame_number_valid;
3659 urgent_frame_number = *p_urgent_frame_number;
3660 currentSysTime = systemTime(CLOCK_MONOTONIC);
3661
Jason Lee603176d2017-05-31 11:43:27 -07003662 if (!gCamCapability[mCameraId]->timestamp_calibrated) {
3663 const int tries = 3;
3664 nsecs_t bestGap, measured;
3665 for (int i = 0; i < tries; ++i) {
3666 const nsecs_t tmono = systemTime(SYSTEM_TIME_MONOTONIC);
3667 const nsecs_t tbase = systemTime(SYSTEM_TIME_BOOTTIME);
3668 const nsecs_t tmono2 = systemTime(SYSTEM_TIME_MONOTONIC);
3669 const nsecs_t gap = tmono2 - tmono;
3670 if (i == 0 || gap < bestGap) {
3671 bestGap = gap;
3672 measured = tbase - ((tmono + tmono2) >> 1);
3673 }
3674 }
3675 capture_time -= measured;
3676 }
3677
Thierry Strudel3d639192016-09-09 11:52:26 -07003678 // Detect if buffers from any requests are overdue
3679 for (auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003680 int64_t timeout;
3681 {
3682 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
3683 // If there is a pending HDR+ request, the following requests may be blocked until the
3684 // HDR+ request is done. So allow a longer timeout.
3685 timeout = (mHdrPlusPendingRequests.size() > 0) ?
3686 MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT : MISSING_REQUEST_BUF_TIMEOUT;
Emilian Peev30522a12017-08-03 14:36:33 +01003687 if (timeout < mExpectedInflightDuration) {
3688 timeout = mExpectedInflightDuration;
3689 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003690 }
3691
3692 if ( (currentSysTime - req.timestamp) > s2ns(timeout) ) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003693 for (auto &missed : req.mPendingBufferList) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003694 assert(missed.stream->priv);
3695 if (missed.stream->priv) {
3696 QCamera3Channel *ch = (QCamera3Channel *)(missed.stream->priv);
3697 assert(ch->mStreams[0]);
3698 if (ch->mStreams[0]) {
3699 LOGE("Cancel missing frame = %d, buffer = %p,"
3700 "stream type = %d, stream format = %d",
3701 req.frame_number, missed.buffer,
3702 ch->mStreams[0]->getMyType(), missed.stream->format);
3703 ch->timeoutFrame(req.frame_number);
3704 }
3705 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003706 }
3707 }
3708 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003709 //For the very first metadata callback, regardless whether it contains valid
3710 //frame number, send the partial metadata for the jumpstarting requests.
3711 //Note that this has to be done even if the metadata doesn't contain valid
3712 //urgent frame number, because in the case only 1 request is ever submitted
3713 //to HAL, there won't be subsequent valid urgent frame number.
3714 if (mFirstMetadataCallback) {
3715 for (pendingRequestIterator i =
3716 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3717 if (i->bUseFirstPartial) {
Shuzhen Wang485e2442017-08-02 12:21:08 -07003718 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch,
3719 true /*isJumpstartMetadata*/);
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003720 }
3721 }
3722 mFirstMetadataCallback = false;
3723 }
3724
Thierry Strudel3d639192016-09-09 11:52:26 -07003725 //Partial result on process_capture_result for timestamp
3726 if (urgent_frame_number_valid) {
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003727 LOGD("valid urgent frame_number = %u", urgent_frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003728
3729 //Recieved an urgent Frame Number, handle it
3730 //using partial results
3731 for (pendingRequestIterator i =
3732 mPendingRequestsList.begin(); i != mPendingRequestsList.end(); i++) {
3733 LOGD("Iterator Frame = %d urgent frame = %d",
3734 i->frame_number, urgent_frame_number);
3735
Chien-Yu Chen29fd1d72017-04-27 18:42:09 -07003736 if ((!i->input_buffer) && (!i->hdrplus) && (i->frame_number < urgent_frame_number) &&
Thierry Strudel3d639192016-09-09 11:52:26 -07003737 (i->partial_result_cnt == 0)) {
3738 LOGE("Error: HAL missed urgent metadata for frame number %d",
3739 i->frame_number);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07003740 i->partial_result_cnt++;
Thierry Strudel3d639192016-09-09 11:52:26 -07003741 }
3742
3743 if (i->frame_number == urgent_frame_number &&
Shuzhen Wang3c077d72017-04-20 22:48:59 -07003744 i->partial_result_cnt == 0) {
Shuzhen Wang485e2442017-08-02 12:21:08 -07003745 sendPartialMetadataWithLock(metadata, i, lastUrgentMetadataInBatch,
3746 false /*isJumpstartMetadata*/);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003747 if (mResetInstantAEC && mInstantAECSettledFrameNumber == 0) {
3748 // Instant AEC settled for this frame.
3749 LOGH("instant AEC settled for frame number %d", urgent_frame_number);
3750 mInstantAECSettledFrameNumber = urgent_frame_number;
3751 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003752 break;
3753 }
3754 }
3755 }
3756
3757 if (!frame_number_valid) {
3758 LOGD("Not a valid normal frame number, used as SOF only");
3759 if (free_and_bufdone_meta_buf) {
3760 mMetadataChannel->bufDone(metadata_buf);
3761 free(metadata_buf);
3762 }
3763 goto done_metadata;
3764 }
3765 LOGH("valid frame_number = %u, capture_time = %lld",
3766 frame_number, capture_time);
3767
Emilian Peev4e0fe952017-06-30 12:40:09 -07003768 handleDepthDataLocked(metadata->depth_data, frame_number,
3769 metadata->is_depth_data_valid);
Emilian Peev7650c122017-01-19 08:24:33 -08003770
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003771 // Check whether any stream buffer corresponding to this is dropped or not
3772 // If dropped, then send the ERROR_BUFFER for the corresponding stream
3773 // OR check if instant AEC is enabled, then need to drop frames untill AEC is settled.
3774 for (auto & pendingRequest : mPendingRequestsList) {
3775 if (p_cam_frame_drop || (mInstantAEC || pendingRequest.frame_number <
3776 mInstantAECSettledFrameNumber)) {
3777 camera3_notify_msg_t notify_msg = {};
3778 for (auto & buffer : pendingRequest.buffers) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003779 bool dropFrame = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003780 QCamera3ProcessingChannel *channel =
3781 (QCamera3ProcessingChannel *)buffer.stream->priv;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003782 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003783 if (p_cam_frame_drop) {
3784 for (uint32_t k = 0; k < p_cam_frame_drop->num_streams; k++) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08003785 if (streamID == p_cam_frame_drop->stream_request[k].streamID) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003786 // Got the stream ID for drop frame.
3787 dropFrame = true;
3788 break;
3789 }
3790 }
3791 } else {
3792 // This is instant AEC case.
3793 // For instant AEC drop the stream untill AEC is settled.
3794 dropFrame = true;
3795 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003796
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003797 if (dropFrame) {
3798 // Send Error notify to frameworks with CAMERA3_MSG_ERROR_BUFFER
3799 if (p_cam_frame_drop) {
3800 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003801 LOGE("Start of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003802 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003803 } else {
3804 // For instant AEC, inform frame drop and frame number
3805 LOGH("Start of reporting error frame#=%u for instant AEC, streamID=%u, "
3806 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003807 pendingRequest.frame_number, streamID,
3808 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003809 }
3810 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003811 notify_msg.message.error.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003812 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER ;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003813 notify_msg.message.error.error_stream = buffer.stream;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003814 orchestrateNotify(&notify_msg);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003815 if (p_cam_frame_drop) {
3816 // Treat msg as error for system buffer drops
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003817 LOGE("End of reporting error frame#=%u, streamID=%u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003818 pendingRequest.frame_number, streamID);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003819 } else {
3820 // For instant AEC, inform frame drop and frame number
3821 LOGH("End of reporting error frame#=%u for instant AEC, streamID=%u, "
3822 "AEC settled frame number = %u",
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003823 pendingRequest.frame_number, streamID,
3824 mInstantAECSettledFrameNumber);
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003825 }
3826 PendingFrameDropInfo PendingFrameDrop;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003827 PendingFrameDrop.frame_number = pendingRequest.frame_number;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07003828 PendingFrameDrop.stream_ID = streamID;
3829 // Add the Frame drop info to mPendingFrameDropList
3830 mPendingFrameDropList.push_back(PendingFrameDrop);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003831 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003832 }
3833 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003834 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003835
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003836 for (auto & pendingRequest : mPendingRequestsList) {
3837 // Find the pending request with the frame number.
3838 if (pendingRequest.frame_number == frame_number) {
3839 // Update the sensor timestamp.
3840 pendingRequest.timestamp = capture_time;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07003841
Thierry Strudel3d639192016-09-09 11:52:26 -07003842
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003843 /* Set the timestamp in display metadata so that clients aware of
3844 private_handle such as VT can use this un-modified timestamps.
3845 Camera framework is unaware of this timestamp and cannot change this */
Jason Lee603176d2017-05-31 11:43:27 -07003846 updateTimeStampInPendingBuffers(pendingRequest.frame_number, capture_time_av);
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07003847
Thierry Strudel3d639192016-09-09 11:52:26 -07003848 // Find channel requiring metadata, meaning internal offline postprocess
3849 // is needed.
3850 //TODO: for now, we don't support two streams requiring metadata at the same time.
3851 // (because we are not making copies, and metadata buffer is not reference counted.
3852 bool internalPproc = false;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003853 for (pendingBufferIterator iter = pendingRequest.buffers.begin();
3854 iter != pendingRequest.buffers.end(); iter++) {
Thierry Strudel3d639192016-09-09 11:52:26 -07003855 if (iter->need_metadata) {
3856 internalPproc = true;
3857 QCamera3ProcessingChannel *channel =
3858 (QCamera3ProcessingChannel *)iter->stream->priv;
3859 channel->queueReprocMetadata(metadata_buf);
Thierry Strudel54dc9782017-02-15 12:12:10 -08003860 if(p_is_metabuf_queued != NULL) {
3861 *p_is_metabuf_queued = true;
3862 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003863 break;
3864 }
3865 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003866 for (auto itr = pendingRequest.internalRequestList.begin();
3867 itr != pendingRequest.internalRequestList.end(); itr++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08003868 if (itr->need_metadata) {
3869 internalPproc = true;
3870 QCamera3ProcessingChannel *channel =
3871 (QCamera3ProcessingChannel *)itr->stream->priv;
3872 channel->queueReprocMetadata(metadata_buf);
3873 break;
3874 }
3875 }
3876
Thierry Strudel54dc9782017-02-15 12:12:10 -08003877 saveExifParams(metadata);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003878
3879 bool *enableZsl = nullptr;
3880 if (gExposeEnableZslKey) {
3881 enableZsl = &pendingRequest.enableZsl;
3882 }
3883
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003884 resultMetadata = translateFromHalMetadata(metadata,
Shuzhen Wang181c57b2017-07-21 11:39:44 -07003885 pendingRequest, internalPproc,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07003886 lastMetadataInBatch, enableZsl);
Thierry Strudel3d639192016-09-09 11:52:26 -07003887
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003888 updateFpsInPreviewBuffer(metadata, pendingRequest.frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003889
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003890 if (pendingRequest.blob_request) {
3891 //Dump tuning metadata if enabled and available
3892 char prop[PROPERTY_VALUE_MAX];
3893 memset(prop, 0, sizeof(prop));
3894 property_get("persist.camera.dumpmetadata", prop, "0");
3895 int32_t enabled = atoi(prop);
3896 if (enabled && metadata->is_tuning_params_valid) {
3897 dumpMetadataToFile(metadata->tuning_params,
3898 mMetaFrameCount,
3899 enabled,
3900 "Snapshot",
3901 frame_number);
Thierry Strudel3d639192016-09-09 11:52:26 -07003902 }
3903 }
3904
3905 if (!internalPproc) {
3906 LOGD("couldn't find need_metadata for this metadata");
3907 // Return metadata buffer
3908 if (free_and_bufdone_meta_buf) {
3909 mMetadataChannel->bufDone(metadata_buf);
3910 free(metadata_buf);
3911 }
3912 }
Thierry Strudel3d639192016-09-09 11:52:26 -07003913
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003914 break;
Thierry Strudel3d639192016-09-09 11:52:26 -07003915 }
3916 }
3917
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003918 mShutterDispatcher.markShutterReady(frame_number, capture_time);
3919
3920 // Try to send out capture result metadata.
3921 handlePendingResultMetadataWithLock(frame_number, resultMetadata);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08003922 return;
3923
Thierry Strudel3d639192016-09-09 11:52:26 -07003924done_metadata:
3925 for (pendingRequestIterator i = mPendingRequestsList.begin();
3926 i != mPendingRequestsList.end() ;i++) {
3927 i->pipeline_depth++;
3928 }
3929 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
3930 unblockRequestIfNecessary();
3931}
3932
3933/*===========================================================================
Emilian Peev7650c122017-01-19 08:24:33 -08003934 * FUNCTION : handleDepthDataWithLock
3935 *
3936 * DESCRIPTION: Handles incoming depth data
3937 *
3938 * PARAMETERS : @depthData : Depth data
3939 * @frameNumber: Frame number of the incoming depth data
Emilian Peev4e0fe952017-06-30 12:40:09 -07003940 * @valid : Valid flag for the incoming data
Emilian Peev7650c122017-01-19 08:24:33 -08003941 *
3942 * RETURN :
3943 *
3944 *==========================================================================*/
3945void QCamera3HardwareInterface::handleDepthDataLocked(
Emilian Peev4e0fe952017-06-30 12:40:09 -07003946 const cam_depth_data_t &depthData, uint32_t frameNumber, uint8_t valid) {
Emilian Peev7650c122017-01-19 08:24:33 -08003947 uint32_t currentFrameNumber;
3948 buffer_handle_t *depthBuffer;
3949
3950 if (nullptr == mDepthChannel) {
Emilian Peev7650c122017-01-19 08:24:33 -08003951 return;
3952 }
3953
3954 camera3_stream_buffer_t resultBuffer =
3955 {.acquire_fence = -1,
3956 .release_fence = -1,
3957 .status = CAMERA3_BUFFER_STATUS_OK,
3958 .buffer = nullptr,
3959 .stream = mDepthChannel->getStream()};
Emilian Peev7650c122017-01-19 08:24:33 -08003960 do {
3961 depthBuffer = mDepthChannel->getOldestFrame(currentFrameNumber);
3962 if (nullptr == depthBuffer) {
3963 break;
3964 }
3965
Emilian Peev7650c122017-01-19 08:24:33 -08003966 resultBuffer.buffer = depthBuffer;
3967 if (currentFrameNumber == frameNumber) {
Emilian Peev4e0fe952017-06-30 12:40:09 -07003968 if (valid) {
3969 int32_t rc = mDepthChannel->populateDepthData(depthData,
3970 frameNumber);
3971 if (NO_ERROR != rc) {
3972 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3973 } else {
3974 resultBuffer.status = CAMERA3_BUFFER_STATUS_OK;
3975 }
Emilian Peev7650c122017-01-19 08:24:33 -08003976 } else {
Emilian Peev4e0fe952017-06-30 12:40:09 -07003977 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
Emilian Peev7650c122017-01-19 08:24:33 -08003978 }
3979 } else if (currentFrameNumber > frameNumber) {
3980 break;
3981 } else {
3982 camera3_notify_msg_t notify_msg = {.type = CAMERA3_MSG_ERROR,
3983 {{currentFrameNumber, mDepthChannel->getStream(),
3984 CAMERA3_MSG_ERROR_BUFFER}}};
3985 orchestrateNotify(&notify_msg);
3986
3987 LOGE("Depth buffer for frame number: %d is missing "
3988 "returning back!", currentFrameNumber);
3989 resultBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
3990 }
3991 mDepthChannel->unmapBuffer(currentFrameNumber);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07003992 mOutputBufferDispatcher.markBufferReady(currentFrameNumber, resultBuffer);
Emilian Peev7650c122017-01-19 08:24:33 -08003993 } while (currentFrameNumber < frameNumber);
3994}
3995
3996/*===========================================================================
3997 * FUNCTION : notifyErrorFoPendingDepthData
3998 *
3999 * DESCRIPTION: Returns error for any pending depth buffers
4000 *
4001 * PARAMETERS : depthCh - depth channel that needs to get flushed
4002 *
4003 * RETURN :
4004 *
4005 *==========================================================================*/
4006void QCamera3HardwareInterface::notifyErrorFoPendingDepthData(
4007 QCamera3DepthChannel *depthCh) {
4008 uint32_t currentFrameNumber;
4009 buffer_handle_t *depthBuffer;
4010
4011 if (nullptr == depthCh) {
4012 return;
4013 }
4014
4015 camera3_notify_msg_t notify_msg =
4016 {.type = CAMERA3_MSG_ERROR,
4017 {{0, depthCh->getStream(), CAMERA3_MSG_ERROR_BUFFER}}};
4018 camera3_stream_buffer_t resultBuffer =
4019 {.acquire_fence = -1,
4020 .release_fence = -1,
4021 .buffer = nullptr,
4022 .stream = depthCh->getStream(),
4023 .status = CAMERA3_BUFFER_STATUS_ERROR};
Emilian Peev7650c122017-01-19 08:24:33 -08004024
4025 while (nullptr !=
4026 (depthBuffer = depthCh->getOldestFrame(currentFrameNumber))) {
4027 depthCh->unmapBuffer(currentFrameNumber);
4028
4029 notify_msg.message.error.frame_number = currentFrameNumber;
4030 orchestrateNotify(&notify_msg);
4031
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004032 mOutputBufferDispatcher.markBufferReady(currentFrameNumber, resultBuffer);
Emilian Peev7650c122017-01-19 08:24:33 -08004033 };
4034}
4035
4036/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07004037 * FUNCTION : hdrPlusPerfLock
4038 *
4039 * DESCRIPTION: perf lock for HDR+ using custom intent
4040 *
4041 * PARAMETERS : @metadata_buf: Metadata super_buf pointer
4042 *
4043 * RETURN : None
4044 *
4045 *==========================================================================*/
4046void QCamera3HardwareInterface::hdrPlusPerfLock(
4047 mm_camera_super_buf_t *metadata_buf)
4048{
4049 if (NULL == metadata_buf) {
4050 LOGE("metadata_buf is NULL");
4051 return;
4052 }
4053 metadata_buffer_t *metadata =
4054 (metadata_buffer_t *)metadata_buf->bufs[0]->buffer;
4055 int32_t *p_frame_number_valid =
4056 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER_VALID, metadata);
4057 uint32_t *p_frame_number =
4058 POINTER_OF_META(CAM_INTF_META_FRAME_NUMBER, metadata);
4059
4060 if (p_frame_number_valid == NULL || p_frame_number == NULL) {
4061 LOGE("%s: Invalid metadata", __func__);
4062 return;
4063 }
4064
Wei Wang01385482017-08-03 10:49:34 -07004065 //acquire perf lock for 2 secs after the last HDR frame is captured
4066 constexpr uint32_t HDR_PLUS_PERF_TIME_OUT = 2000;
Thierry Strudel3d639192016-09-09 11:52:26 -07004067 if ((p_frame_number_valid != NULL) && *p_frame_number_valid) {
4068 if ((p_frame_number != NULL) &&
4069 (mLastCustIntentFrmNum == (int32_t)*p_frame_number)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004070 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT, HDR_PLUS_PERF_TIME_OUT);
Thierry Strudel3d639192016-09-09 11:52:26 -07004071 }
4072 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004073}
4074
4075/*===========================================================================
4076 * FUNCTION : handleInputBufferWithLock
4077 *
4078 * DESCRIPTION: Handles input buffer and shutter callback with mMutex lock held.
4079 *
4080 * PARAMETERS : @frame_number: frame number of the input buffer
4081 *
4082 * RETURN :
4083 *
4084 *==========================================================================*/
4085void QCamera3HardwareInterface::handleInputBufferWithLock(uint32_t frame_number)
4086{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004087 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_IN_BUF_LKD);
Thierry Strudel3d639192016-09-09 11:52:26 -07004088 pendingRequestIterator i = mPendingRequestsList.begin();
4089 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
4090 i++;
4091 }
4092 if (i != mPendingRequestsList.end() && i->input_buffer) {
4093 //found the right request
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004094 CameraMetadata settings;
4095 nsecs_t capture_time = systemTime(CLOCK_MONOTONIC);
4096 if(i->settings) {
4097 settings = i->settings;
4098 if (settings.exists(ANDROID_SENSOR_TIMESTAMP)) {
4099 capture_time = settings.find(ANDROID_SENSOR_TIMESTAMP).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -07004100 } else {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004101 LOGE("No timestamp in input settings! Using current one.");
Thierry Strudel3d639192016-09-09 11:52:26 -07004102 }
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004103 } else {
4104 LOGE("Input settings missing!");
Thierry Strudel3d639192016-09-09 11:52:26 -07004105 }
4106
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004107 mShutterDispatcher.markShutterReady(frame_number, capture_time);
4108 LOGD("Input request metadata notify frame_number = %u, capture_time = %llu",
4109 i->frame_number, capture_time);
Thierry Strudel3d639192016-09-09 11:52:26 -07004110
4111 camera3_capture_result result;
4112 memset(&result, 0, sizeof(camera3_capture_result));
4113 result.frame_number = frame_number;
4114 result.result = i->settings;
4115 result.input_buffer = i->input_buffer;
4116 result.partial_result = PARTIAL_RESULT_COUNT;
4117
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004118 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -07004119 LOGD("Input request metadata and input buffer frame_number = %u",
4120 i->frame_number);
4121 i = erasePendingRequest(i);
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004122
4123 // Dispatch result metadata that may be just unblocked by this reprocess result.
4124 dispatchResultMetadataWithLock(frame_number, /*isLiveRequest*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -07004125 } else {
4126 LOGE("Could not find input request for frame number %d", frame_number);
4127 }
4128}
4129
4130/*===========================================================================
4131 * FUNCTION : handleBufferWithLock
4132 *
4133 * DESCRIPTION: Handles image buffer callback with mMutex lock held.
4134 *
4135 * PARAMETERS : @buffer: image buffer for the callback
4136 * @frame_number: frame number of the image buffer
4137 *
4138 * RETURN :
4139 *
4140 *==========================================================================*/
4141void QCamera3HardwareInterface::handleBufferWithLock(
4142 camera3_stream_buffer_t *buffer, uint32_t frame_number)
4143{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004144 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_HANDLE_BUF_LKD);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004145
4146 if (buffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
4147 mPerfLockMgr.releasePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
4148 }
4149
Thierry Strudel3d639192016-09-09 11:52:26 -07004150 /* Nothing to be done during error state */
4151 if ((ERROR == mState) || (DEINIT == mState)) {
4152 return;
4153 }
4154 if (mFlushPerf) {
4155 handleBuffersDuringFlushLock(buffer);
4156 return;
4157 }
4158 //not in flush
4159 // If the frame number doesn't exist in the pending request list,
4160 // directly send the buffer to the frameworks, and update pending buffers map
4161 // Otherwise, book-keep the buffer.
4162 pendingRequestIterator i = mPendingRequestsList.begin();
4163 while (i != mPendingRequestsList.end() && i->frame_number != frame_number){
4164 i++;
4165 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004166
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004167 if (i != mPendingRequestsList.end()) {
Thierry Strudel3d639192016-09-09 11:52:26 -07004168 if (i->input_buffer) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004169 // For a reprocessing request, try to send out result metadata.
4170 handlePendingResultMetadataWithLock(frame_number, nullptr);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004171 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004172 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004173
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004174 // Check if this frame was dropped.
4175 for (List<PendingFrameDropInfo>::iterator m = mPendingFrameDropList.begin();
4176 m != mPendingFrameDropList.end(); m++) {
4177 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4178 uint32_t streamID = channel->getStreamID(channel->getStreamTypeMask());
4179 if((m->stream_ID == streamID) && (m->frame_number==frame_number) ) {
4180 buffer->status=CAMERA3_BUFFER_STATUS_ERROR;
4181 LOGD("Stream STATUS_ERROR frame_number=%d, streamID=%d",
4182 frame_number, streamID);
4183 m = mPendingFrameDropList.erase(m);
4184 break;
4185 }
4186 }
4187
Binhao Lin09245482017-08-31 18:25:29 -07004188 // WAR for encoder avtimer timestamp issue
4189 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4190 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask() &&
4191 m_bAVTimerEnabled) {
4192 for (auto req = mPendingBuffersMap.mPendingBuffersInRequest.begin();
4193 req != mPendingBuffersMap.mPendingBuffersInRequest.end(); req++) {
4194 if (req->frame_number != frame_number)
4195 continue;
4196 if(req->av_timestamp == 0) {
4197 buffer->status |= CAMERA3_BUFFER_STATUS_ERROR;
4198 }
4199 else {
4200 struct private_handle_t *priv_handle =
4201 (struct private_handle_t *) (*(buffer->buffer));
4202 setMetaData(priv_handle, SET_VT_TIMESTAMP, &(req->av_timestamp));
4203 }
4204 }
4205 }
4206
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004207 buffer->status |= mPendingBuffersMap.getBufErrStatus(buffer->buffer);
4208 LOGH("result frame_number = %d, buffer = %p",
4209 frame_number, buffer->buffer);
4210
4211 mPendingBuffersMap.removeBuf(buffer->buffer);
4212 mOutputBufferDispatcher.markBufferReady(frame_number, *buffer);
4213
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004214 if (mPreviewStarted == false) {
4215 QCamera3Channel *channel = (QCamera3Channel *)buffer->stream->priv;
4216 if ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask()) {
Chien-Yu Chen509314b2017-04-07 15:27:55 -07004217 logEaselEvent("EASEL_STARTUP_LATENCY", "Preview Started");
4218
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004219 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
4220 mPerfLockMgr.releasePerfLock(PERF_LOCK_OPEN_CAMERA);
4221 mPreviewStarted = true;
4222
4223 // Set power hint for preview
4224 mPerfLockMgr.acquirePerfLock(PERF_LOCK_POWERHINT_ENCODE, 0);
4225 }
4226 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004227}
4228
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004229void QCamera3HardwareInterface::handlePendingResultMetadataWithLock(uint32_t frameNumber,
Chien-Yu Chenbc730232017-07-12 14:49:55 -07004230 camera_metadata_t *resultMetadata)
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004231{
4232 // Find the pending request for this result metadata.
4233 auto requestIter = mPendingRequestsList.begin();
4234 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != frameNumber) {
4235 requestIter++;
4236 }
4237
4238 if (requestIter == mPendingRequestsList.end()) {
4239 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, frameNumber);
4240 return;
4241 }
4242
4243 // Update the result metadata
4244 requestIter->resultMetadata = resultMetadata;
4245
4246 // Check what type of request this is.
4247 bool liveRequest = false;
4248 if (requestIter->hdrplus) {
Chien-Yu Chen9264fe92017-04-29 03:28:46 +00004249 // HDR+ request doesn't have partial results.
4250 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004251 } else if (requestIter->input_buffer != nullptr) {
4252 // Reprocessing request result is the same as settings.
4253 requestIter->resultMetadata = requestIter->settings;
4254 // Reprocessing request doesn't have partial results.
4255 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4256 } else {
4257 liveRequest = true;
Chien-Yu Chen0a921f92017-08-27 17:25:33 -07004258 requestIter->partial_result_cnt = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004259 mPendingLiveRequest--;
4260
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07004261 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07004262 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07004263 // For a live request, send the metadata to HDR+ client.
4264 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
4265 gHdrPlusClient->notifyFrameMetadata(frameNumber, *resultMetadata,
4266 requestIter->partial_result_cnt == PARTIAL_RESULT_COUNT);
4267 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004268 }
4269 }
4270
Chien-Yu Chenbc730232017-07-12 14:49:55 -07004271 // Remove len shading map if it's not requested.
4272 if (requestIter->requestedLensShadingMapMode == ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF) {
4273 CameraMetadata metadata;
4274 metadata.acquire(resultMetadata);
4275 metadata.erase(ANDROID_STATISTICS_LENS_SHADING_MAP);
4276 metadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE,
4277 &requestIter->requestedLensShadingMapMode, 1);
4278
4279 requestIter->resultMetadata = metadata.release();
4280 }
4281
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004282 dispatchResultMetadataWithLock(frameNumber, liveRequest);
4283}
4284
4285void QCamera3HardwareInterface::dispatchResultMetadataWithLock(uint32_t frameNumber,
4286 bool isLiveRequest) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004287 // The pending requests are ordered by increasing frame numbers. The result metadata are ready
4288 // to be sent if all previous pending requests are ready to be sent.
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004289 bool readyToSend = true;
4290
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004291 // Iterate through the pending requests to send out result metadata that are ready. Also if
4292 // this result metadata belongs to a live request, notify errors for previous live requests
4293 // that don't have result metadata yet.
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004294 auto iter = mPendingRequestsList.begin();
4295 while (iter != mPendingRequestsList.end()) {
4296 // Check if current pending request is ready. If it's not ready, the following pending
4297 // requests are also not ready.
4298 if (readyToSend && iter->resultMetadata == nullptr) {
4299 readyToSend = false;
4300 }
4301
4302 bool thisLiveRequest = iter->hdrplus == false && iter->input_buffer == nullptr;
4303
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004304 camera3_capture_result_t result = {};
4305 result.frame_number = iter->frame_number;
4306 result.result = iter->resultMetadata;
4307 result.partial_result = iter->partial_result_cnt;
4308
4309 // If this pending buffer has result metadata, we may be able to send out shutter callback
4310 // and result metadata.
4311 if (iter->resultMetadata != nullptr) {
4312 if (!readyToSend) {
4313 // If any of the previous pending request is not ready, this pending request is
4314 // also not ready to send in order to keep shutter callbacks and result metadata
4315 // in order.
4316 iter++;
4317 continue;
4318 }
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004319 } else if (iter->frame_number < frameNumber && isLiveRequest && thisLiveRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004320 // If the result metadata belongs to a live request, notify errors for previous pending
4321 // live requests.
4322 mPendingLiveRequest--;
4323
4324 CameraMetadata dummyMetadata;
4325 dummyMetadata.update(ANDROID_REQUEST_ID, &(iter->request_id), 1);
4326 result.result = dummyMetadata.release();
4327
4328 notifyError(iter->frame_number, CAMERA3_MSG_ERROR_RESULT);
Shuzhen Wang1ee712a2017-03-22 17:51:26 -07004329
4330 // partial_result should be PARTIAL_RESULT_CNT in case of
4331 // ERROR_RESULT.
4332 iter->partial_result_cnt = PARTIAL_RESULT_COUNT;
4333 result.partial_result = PARTIAL_RESULT_COUNT;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004334 } else {
4335 iter++;
4336 continue;
4337 }
4338
Chien-Yu Chen3f303522017-05-19 15:21:45 -07004339 result.output_buffers = nullptr;
4340 result.num_output_buffers = 0;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004341 orchestrateResult(&result);
4342
4343 // For reprocessing, result metadata is the same as settings so do not free it here to
4344 // avoid double free.
4345 if (result.result != iter->settings) {
4346 free_camera_metadata((camera_metadata_t *)result.result);
4347 }
4348 iter->resultMetadata = nullptr;
4349 iter = erasePendingRequest(iter);
4350 }
4351
Chien-Yu Chen588cc852017-06-23 18:39:51 -07004352 if (isLiveRequest) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004353 for (auto &iter : mPendingRequestsList) {
4354 // Increment pipeline depth for the following pending requests.
4355 if (iter.frame_number > frameNumber) {
4356 iter.pipeline_depth++;
4357 }
4358 }
4359 }
4360
4361 unblockRequestIfNecessary();
4362}
4363
Thierry Strudel3d639192016-09-09 11:52:26 -07004364/*===========================================================================
4365 * FUNCTION : unblockRequestIfNecessary
4366 *
4367 * DESCRIPTION: Unblock capture_request if max_buffer hasn't been reached. Note
4368 * that mMutex is held when this function is called.
4369 *
4370 * PARAMETERS :
4371 *
4372 * RETURN :
4373 *
4374 *==========================================================================*/
4375void QCamera3HardwareInterface::unblockRequestIfNecessary()
4376{
4377 // Unblock process_capture_request
4378 pthread_cond_signal(&mRequestCond);
4379}
4380
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004381/*===========================================================================
4382 * FUNCTION : isHdrSnapshotRequest
4383 *
4384 * DESCRIPTION: Function to determine if the request is for a HDR snapshot
4385 *
4386 * PARAMETERS : camera3 request structure
4387 *
4388 * RETURN : boolean decision variable
4389 *
4390 *==========================================================================*/
4391bool QCamera3HardwareInterface::isHdrSnapshotRequest(camera3_capture_request *request)
4392{
4393 if (request == NULL) {
4394 LOGE("Invalid request handle");
4395 assert(0);
4396 return false;
4397 }
4398
4399 if (!mForceHdrSnapshot) {
4400 CameraMetadata frame_settings;
4401 frame_settings = request->settings;
4402
4403 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
4404 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
4405 if (metaMode != ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
4406 return false;
4407 }
4408 } else {
4409 return false;
4410 }
4411
4412 if (frame_settings.exists(ANDROID_CONTROL_SCENE_MODE)) {
4413 uint8_t fwk_sceneMode = frame_settings.find(ANDROID_CONTROL_SCENE_MODE).data.u8[0];
4414 if (fwk_sceneMode != ANDROID_CONTROL_SCENE_MODE_HDR) {
4415 return false;
4416 }
4417 } else {
4418 return false;
4419 }
4420 }
4421
4422 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4423 if (request->output_buffers[i].stream->format
4424 == HAL_PIXEL_FORMAT_BLOB) {
4425 return true;
4426 }
4427 }
4428
4429 return false;
4430}
4431/*===========================================================================
4432 * FUNCTION : orchestrateRequest
4433 *
4434 * DESCRIPTION: Orchestrates a capture request from camera service
4435 *
4436 * PARAMETERS :
4437 * @request : request from framework to process
4438 *
4439 * RETURN : Error status codes
4440 *
4441 *==========================================================================*/
4442int32_t QCamera3HardwareInterface::orchestrateRequest(
4443 camera3_capture_request_t *request)
4444{
4445
4446 uint32_t originalFrameNumber = request->frame_number;
4447 uint32_t originalOutputCount = request->num_output_buffers;
4448 const camera_metadata_t *original_settings = request->settings;
4449 List<InternalRequest> internallyRequestedStreams;
4450 List<InternalRequest> emptyInternalList;
4451
4452 if (isHdrSnapshotRequest(request) && request->input_buffer == NULL) {
4453 LOGD("Framework requested:%d buffers in HDR snapshot", request->num_output_buffers);
4454 uint32_t internalFrameNumber;
4455 CameraMetadata modified_meta;
4456
4457
4458 /* Add Blob channel to list of internally requested streams */
4459 for (uint32_t i = 0; i < request->num_output_buffers; i++) {
4460 if (request->output_buffers[i].stream->format
4461 == HAL_PIXEL_FORMAT_BLOB) {
4462 InternalRequest streamRequested;
4463 streamRequested.meteringOnly = 1;
4464 streamRequested.need_metadata = 0;
4465 streamRequested.stream = request->output_buffers[i].stream;
4466 internallyRequestedStreams.push_back(streamRequested);
4467 }
4468 }
4469 request->num_output_buffers = 0;
4470 auto itr = internallyRequestedStreams.begin();
4471
4472 /* Modify setting to set compensation */
4473 modified_meta = request->settings;
4474 int32_t expCompensation = GB_HDR_HALF_STEP_EV;
4475 uint8_t aeLock = 1;
4476 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4477 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4478 camera_metadata_t *modified_settings = modified_meta.release();
4479 request->settings = modified_settings;
4480
4481 /* Capture Settling & -2x frame */
4482 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4483 request->frame_number = internalFrameNumber;
4484 processCaptureRequest(request, internallyRequestedStreams);
4485
4486 request->num_output_buffers = originalOutputCount;
4487 _orchestrationDb.allocStoreInternalFrameNumber(originalFrameNumber, internalFrameNumber);
4488 request->frame_number = internalFrameNumber;
4489 processCaptureRequest(request, emptyInternalList);
4490 request->num_output_buffers = 0;
4491
4492 modified_meta = modified_settings;
4493 expCompensation = 0;
4494 aeLock = 1;
4495 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4496 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4497 modified_settings = modified_meta.release();
4498 request->settings = modified_settings;
4499
4500 /* Capture Settling & 0X frame */
4501
4502 itr = internallyRequestedStreams.begin();
4503 if (itr == internallyRequestedStreams.end()) {
4504 LOGE("Error Internally Requested Stream list is empty");
4505 assert(0);
4506 } else {
4507 itr->need_metadata = 0;
4508 itr->meteringOnly = 1;
4509 }
4510
4511 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4512 request->frame_number = internalFrameNumber;
4513 processCaptureRequest(request, internallyRequestedStreams);
4514
4515 itr = internallyRequestedStreams.begin();
4516 if (itr == internallyRequestedStreams.end()) {
4517 ALOGE("Error Internally Requested Stream list is empty");
4518 assert(0);
4519 } else {
4520 itr->need_metadata = 1;
4521 itr->meteringOnly = 0;
4522 }
4523
4524 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4525 request->frame_number = internalFrameNumber;
4526 processCaptureRequest(request, internallyRequestedStreams);
4527
4528 /* Capture 2X frame*/
4529 modified_meta = modified_settings;
4530 expCompensation = GB_HDR_2X_STEP_EV;
4531 aeLock = 1;
4532 modified_meta.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, &expCompensation, 1);
4533 modified_meta.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
4534 modified_settings = modified_meta.release();
4535 request->settings = modified_settings;
4536
4537 itr = internallyRequestedStreams.begin();
4538 if (itr == internallyRequestedStreams.end()) {
4539 ALOGE("Error Internally Requested Stream list is empty");
4540 assert(0);
4541 } else {
4542 itr->need_metadata = 0;
4543 itr->meteringOnly = 1;
4544 }
4545 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4546 request->frame_number = internalFrameNumber;
4547 processCaptureRequest(request, internallyRequestedStreams);
4548
4549 itr = internallyRequestedStreams.begin();
4550 if (itr == internallyRequestedStreams.end()) {
4551 ALOGE("Error Internally Requested Stream list is empty");
4552 assert(0);
4553 } else {
4554 itr->need_metadata = 1;
4555 itr->meteringOnly = 0;
4556 }
4557
4558 _orchestrationDb.generateStoreInternalFrameNumber(internalFrameNumber);
4559 request->frame_number = internalFrameNumber;
4560 processCaptureRequest(request, internallyRequestedStreams);
4561
4562
4563 /* Capture 2X on original streaming config*/
4564 internallyRequestedStreams.clear();
4565
4566 /* Restore original settings pointer */
4567 request->settings = original_settings;
4568 } else {
4569 uint32_t internalFrameNumber;
4570 _orchestrationDb.allocStoreInternalFrameNumber(request->frame_number, internalFrameNumber);
4571 request->frame_number = internalFrameNumber;
4572 return processCaptureRequest(request, internallyRequestedStreams);
4573 }
4574
4575 return NO_ERROR;
4576}
4577
4578/*===========================================================================
4579 * FUNCTION : orchestrateResult
4580 *
4581 * DESCRIPTION: Orchestrates a capture result to camera service
4582 *
4583 * PARAMETERS :
4584 * @request : request from framework to process
4585 *
4586 * RETURN :
4587 *
4588 *==========================================================================*/
4589void QCamera3HardwareInterface::orchestrateResult(
4590 camera3_capture_result_t *result)
4591{
4592 uint32_t frameworkFrameNumber;
4593 int32_t rc = _orchestrationDb.getFrameworkFrameNumber(result->frame_number,
4594 frameworkFrameNumber);
4595 if (rc != NO_ERROR) {
4596 LOGE("Cannot find translated frameworkFrameNumber");
4597 assert(0);
4598 } else {
4599 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004600 LOGD("Internal Request drop the result");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004601 } else {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004602 if (result->result != NULL) {
Binhao Lin299ffc92017-04-27 11:22:47 -07004603 camera_metadata_t *metadata = const_cast<camera_metadata_t*>(result->result);
4604 camera_metadata_entry_t entry;
4605 int ret = find_camera_metadata_entry(metadata, ANDROID_SYNC_FRAME_NUMBER, &entry);
4606 if (ret == OK) {
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004607 int64_t sync_frame_number = frameworkFrameNumber;
Binhao Lin299ffc92017-04-27 11:22:47 -07004608 ret = update_camera_metadata_entry(metadata, entry.index, &sync_frame_number, 1, &entry);
4609 if (ret != OK)
4610 LOGE("Update ANDROID_SYNC_FRAME_NUMBER Error!");
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004611 }
Binhao Lin9cdfa3f2017-04-19 11:47:45 -07004612 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004613 result->frame_number = frameworkFrameNumber;
4614 mCallbackOps->process_capture_result(mCallbackOps, result);
4615 }
4616 }
4617}
4618
4619/*===========================================================================
4620 * FUNCTION : orchestrateNotify
4621 *
4622 * DESCRIPTION: Orchestrates a notify to camera service
4623 *
4624 * PARAMETERS :
4625 * @request : request from framework to process
4626 *
4627 * RETURN :
4628 *
4629 *==========================================================================*/
4630void QCamera3HardwareInterface::orchestrateNotify(camera3_notify_msg_t *notify_msg)
4631{
4632 uint32_t frameworkFrameNumber;
4633 uint32_t internalFrameNumber = notify_msg->message.shutter.frame_number;
Thierry Strudel2896d122017-02-23 19:18:03 -08004634 int32_t rc = NO_ERROR;
4635
4636 rc = _orchestrationDb.getFrameworkFrameNumber(internalFrameNumber,
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004637 frameworkFrameNumber);
Thierry Strudel2896d122017-02-23 19:18:03 -08004638
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004639 if (rc != NO_ERROR) {
Thierry Strudel2896d122017-02-23 19:18:03 -08004640 if (notify_msg->message.error.error_code == CAMERA3_MSG_ERROR_DEVICE) {
4641 LOGD("Sending CAMERA3_MSG_ERROR_DEVICE to framework");
4642 frameworkFrameNumber = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004643 } else {
Thierry Strudel2896d122017-02-23 19:18:03 -08004644 LOGE("Cannot find translated frameworkFrameNumber");
4645 assert(0);
4646 return;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004647 }
4648 }
Thierry Strudel2896d122017-02-23 19:18:03 -08004649
4650 if (frameworkFrameNumber == EMPTY_FRAMEWORK_FRAME_NUMBER) {
4651 LOGD("Internal Request drop the notifyCb");
4652 } else {
4653 notify_msg->message.shutter.frame_number = frameworkFrameNumber;
4654 mCallbackOps->notify(mCallbackOps, notify_msg);
4655 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004656}
4657
4658/*===========================================================================
4659 * FUNCTION : FrameNumberRegistry
4660 *
4661 * DESCRIPTION: Constructor
4662 *
4663 * PARAMETERS :
4664 *
4665 * RETURN :
4666 *
4667 *==========================================================================*/
4668FrameNumberRegistry::FrameNumberRegistry()
4669{
4670 _nextFreeInternalNumber = INTERNAL_FRAME_STARTING_NUMBER;
4671}
4672
4673/*===========================================================================
4674 * FUNCTION : ~FrameNumberRegistry
4675 *
4676 * DESCRIPTION: Destructor
4677 *
4678 * PARAMETERS :
4679 *
4680 * RETURN :
4681 *
4682 *==========================================================================*/
4683FrameNumberRegistry::~FrameNumberRegistry()
4684{
4685}
4686
4687/*===========================================================================
4688 * FUNCTION : PurgeOldEntriesLocked
4689 *
4690 * DESCRIPTION: Maintainance function to trigger LRU cleanup mechanism
4691 *
4692 * PARAMETERS :
4693 *
4694 * RETURN : NONE
4695 *
4696 *==========================================================================*/
4697void FrameNumberRegistry::purgeOldEntriesLocked()
4698{
4699 while (_register.begin() != _register.end()) {
4700 auto itr = _register.begin();
4701 if (itr->first < (_nextFreeInternalNumber - FRAME_REGISTER_LRU_SIZE)) {
4702 _register.erase(itr);
4703 } else {
4704 return;
4705 }
4706 }
4707}
4708
4709/*===========================================================================
4710 * FUNCTION : allocStoreInternalFrameNumber
4711 *
4712 * DESCRIPTION: Method to note down a framework request and associate a new
4713 * internal request number against it
4714 *
4715 * PARAMETERS :
4716 * @fFrameNumber: Identifier given by framework
4717 * @internalFN : Output parameter which will have the newly generated internal
4718 * entry
4719 *
4720 * RETURN : Error code
4721 *
4722 *==========================================================================*/
4723int32_t FrameNumberRegistry::allocStoreInternalFrameNumber(uint32_t frameworkFrameNumber,
4724 uint32_t &internalFrameNumber)
4725{
4726 Mutex::Autolock lock(mRegistryLock);
4727 internalFrameNumber = _nextFreeInternalNumber++;
4728 LOGD("Storing ff#:%d, with internal:%d", frameworkFrameNumber, internalFrameNumber);
4729 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, frameworkFrameNumber));
4730 purgeOldEntriesLocked();
4731 return NO_ERROR;
4732}
4733
4734/*===========================================================================
4735 * FUNCTION : generateStoreInternalFrameNumber
4736 *
4737 * DESCRIPTION: Method to associate a new internal request number independent
4738 * of any associate with framework requests
4739 *
4740 * PARAMETERS :
4741 * @internalFrame#: Output parameter which will have the newly generated internal
4742 *
4743 *
4744 * RETURN : Error code
4745 *
4746 *==========================================================================*/
4747int32_t FrameNumberRegistry::generateStoreInternalFrameNumber(uint32_t &internalFrameNumber)
4748{
4749 Mutex::Autolock lock(mRegistryLock);
4750 internalFrameNumber = _nextFreeInternalNumber++;
4751 LOGD("Generated internal framenumber:%d", internalFrameNumber);
4752 _register.insert(std::pair<uint32_t,uint32_t>(internalFrameNumber, EMPTY_FRAMEWORK_FRAME_NUMBER));
4753 purgeOldEntriesLocked();
4754 return NO_ERROR;
4755}
4756
4757/*===========================================================================
4758 * FUNCTION : getFrameworkFrameNumber
4759 *
4760 * DESCRIPTION: Method to query the framework framenumber given an internal #
4761 *
4762 * PARAMETERS :
4763 * @internalFrame#: Internal reference
4764 * @frameworkframenumber: Output parameter holding framework frame entry
4765 *
4766 * RETURN : Error code
4767 *
4768 *==========================================================================*/
4769int32_t FrameNumberRegistry::getFrameworkFrameNumber(uint32_t internalFrameNumber,
4770 uint32_t &frameworkFrameNumber)
4771{
4772 Mutex::Autolock lock(mRegistryLock);
4773 auto itr = _register.find(internalFrameNumber);
4774 if (itr == _register.end()) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08004775 LOGE("Cannot find internal#: %d", internalFrameNumber);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004776 return -ENOENT;
4777 }
4778
4779 frameworkFrameNumber = itr->second;
4780 purgeOldEntriesLocked();
4781 return NO_ERROR;
4782}
Thierry Strudel3d639192016-09-09 11:52:26 -07004783
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004784status_t QCamera3HardwareInterface::fillPbStreamConfig(
Chien-Yu Chen14d3e392017-07-10 18:27:05 -07004785 pbcamera::StreamConfiguration *config, uint32_t pbStreamId, QCamera3Channel *channel,
4786 uint32_t streamIndex) {
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004787 if (config == nullptr) {
4788 LOGE("%s: config is null", __FUNCTION__);
4789 return BAD_VALUE;
4790 }
4791
4792 if (channel == nullptr) {
4793 LOGE("%s: channel is null", __FUNCTION__);
4794 return BAD_VALUE;
4795 }
4796
4797 QCamera3Stream *stream = channel->getStreamByIndex(streamIndex);
4798 if (stream == nullptr) {
4799 LOGE("%s: Failed to get stream %d in channel.", __FUNCTION__, streamIndex);
4800 return NAME_NOT_FOUND;
4801 }
4802
4803 const cam_stream_info_t* streamInfo = stream->getStreamInfo();
4804 if (streamInfo == nullptr) {
4805 LOGE("%s: Failed to get stream info for stream %d in channel.", __FUNCTION__, streamIndex);
4806 return NAME_NOT_FOUND;
4807 }
4808
4809 config->id = pbStreamId;
4810 config->image.width = streamInfo->dim.width;
4811 config->image.height = streamInfo->dim.height;
4812 config->image.padding = 0;
Chien-Yu Chen14d3e392017-07-10 18:27:05 -07004813
4814 int bytesPerPixel = 0;
4815
4816 switch (streamInfo->fmt) {
4817 case CAM_FORMAT_YUV_420_NV21:
4818 config->image.format = HAL_PIXEL_FORMAT_YCrCb_420_SP;
4819 bytesPerPixel = 1;
4820 break;
4821 case CAM_FORMAT_YUV_420_NV12:
4822 case CAM_FORMAT_YUV_420_NV12_VENUS:
4823 config->image.format = HAL_PIXEL_FORMAT_YCbCr_420_SP;
4824 bytesPerPixel = 1;
4825 break;
4826 default:
4827 ALOGE("%s: Stream format %d not supported.", __FUNCTION__, streamInfo->fmt);
4828 return BAD_VALUE;
4829 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004830
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004831 uint32_t totalPlaneSize = 0;
4832
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004833 // Fill plane information.
4834 for (uint32_t i = 0; i < streamInfo->buf_planes.plane_info.num_planes; i++) {
4835 pbcamera::PlaneConfiguration plane;
Chien-Yu Chen14d3e392017-07-10 18:27:05 -07004836 plane.stride = streamInfo->buf_planes.plane_info.mp[i].stride * bytesPerPixel;
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004837 plane.scanline = streamInfo->buf_planes.plane_info.mp[i].scanline;
4838 config->image.planes.push_back(plane);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004839
4840 totalPlaneSize += (plane.stride * plane.scanline);
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004841 }
4842
Chien-Yu Chene687bd02016-12-07 18:30:26 -08004843 config->image.padding = streamInfo->buf_planes.plane_info.frame_len - totalPlaneSize;
Chien-Yu Chen8e599492016-11-01 13:37:46 -07004844 return OK;
4845}
4846
Thierry Strudel3d639192016-09-09 11:52:26 -07004847/*===========================================================================
4848 * FUNCTION : processCaptureRequest
4849 *
4850 * DESCRIPTION: process a capture request from camera service
4851 *
4852 * PARAMETERS :
4853 * @request : request from framework to process
4854 *
4855 * RETURN :
4856 *
4857 *==========================================================================*/
4858int QCamera3HardwareInterface::processCaptureRequest(
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004859 camera3_capture_request_t *request,
4860 List<InternalRequest> &internallyRequestedStreams)
Thierry Strudel3d639192016-09-09 11:52:26 -07004861{
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004862 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_PROC_CAP_REQ);
Thierry Strudel3d639192016-09-09 11:52:26 -07004863 int rc = NO_ERROR;
4864 int32_t request_id;
4865 CameraMetadata meta;
Thierry Strudel3d639192016-09-09 11:52:26 -07004866 bool isVidBufRequested = false;
4867 camera3_stream_buffer_t *pInputBuffer = NULL;
Thierry Strudel54dc9782017-02-15 12:12:10 -08004868 char prop[PROPERTY_VALUE_MAX];
Thierry Strudel3d639192016-09-09 11:52:26 -07004869
4870 pthread_mutex_lock(&mMutex);
4871
4872 // Validate current state
4873 switch (mState) {
4874 case CONFIGURED:
4875 case STARTED:
4876 /* valid state */
4877 break;
4878
4879 case ERROR:
4880 pthread_mutex_unlock(&mMutex);
4881 handleCameraDeviceError();
4882 return -ENODEV;
4883
4884 default:
4885 LOGE("Invalid state %d", mState);
4886 pthread_mutex_unlock(&mMutex);
4887 return -ENODEV;
4888 }
4889
Thierry Strudele80ad7c2016-12-06 10:16:27 -08004890 rc = validateCaptureRequest(request, internallyRequestedStreams);
Thierry Strudel3d639192016-09-09 11:52:26 -07004891 if (rc != NO_ERROR) {
4892 LOGE("incoming request is not valid");
4893 pthread_mutex_unlock(&mMutex);
4894 return rc;
4895 }
4896
4897 meta = request->settings;
4898
4899 // For first capture request, send capture intent, and
4900 // stream on all streams
4901 if (mState == CONFIGURED) {
Chien-Yu Chene96475e2017-04-11 11:53:26 -07004902 logEaselEvent("EASEL_STARTUP_LATENCY", "First request");
Thierry Strudel3d639192016-09-09 11:52:26 -07004903 // send an unconfigure to the backend so that the isp
4904 // resources are deallocated
4905 if (!mFirstConfiguration) {
4906 cam_stream_size_info_t stream_config_info;
4907 int32_t hal_version = CAM_HAL_V3;
4908 memset(&stream_config_info, 0, sizeof(cam_stream_size_info_t));
4909 stream_config_info.buffer_info.min_buffers =
4910 MIN_INFLIGHT_REQUESTS;
4911 stream_config_info.buffer_info.max_buffers =
Thierry Strudel2896d122017-02-23 19:18:03 -08004912 m_bIs4KVideo ? 0 :
Jason Leea46ad5e2017-07-07 15:20:56 -07004913 m_bEis3PropertyEnabled && m_bIsVideo ? MAX_VIDEO_BUFFERS : MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07004914 clear_metadata_buffer(mParameters);
4915 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4916 CAM_INTF_PARM_HAL_VERSION, hal_version);
4917 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
4918 CAM_INTF_META_STREAM_INFO, stream_config_info);
4919 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
4920 mParameters);
4921 if (rc < 0) {
4922 LOGE("set_parms for unconfigure failed");
4923 pthread_mutex_unlock(&mMutex);
4924 return rc;
4925 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07004926
Thierry Strudel3d639192016-09-09 11:52:26 -07004927 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08004928 mPerfLockMgr.acquirePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07004929 /* get eis information for stream configuration */
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004930 cam_is_type_t isTypeVideo, isTypePreview, is_type=IS_TYPE_NONE;
Thierry Strudel3d639192016-09-09 11:52:26 -07004931 char is_type_value[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004932 property_get("persist.camera.is_type", is_type_value, "4");
4933 isTypeVideo = static_cast<cam_is_type_t>(atoi(is_type_value));
4934 // Make default value for preview IS_TYPE as IS_TYPE_EIS_2_0
4935 property_get("persist.camera.is_type_preview", is_type_value, "4");
4936 isTypePreview = static_cast<cam_is_type_t>(atoi(is_type_value));
4937 LOGD("isTypeVideo: %d isTypePreview: %d", isTypeVideo, isTypePreview);
Thierry Strudel3d639192016-09-09 11:52:26 -07004938
4939 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
4940 int32_t hal_version = CAM_HAL_V3;
4941 uint8_t captureIntent =
4942 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
4943 mCaptureIntent = captureIntent;
4944 clear_metadata_buffer(mParameters);
4945 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version);
4946 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_CAPTURE_INTENT, captureIntent);
4947 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07004948 if (mFirstConfiguration) {
4949 // configure instant AEC
4950 // Instant AEC is a session based parameter and it is needed only
4951 // once per complete session after open camera.
4952 // i.e. This is set only once for the first capture request, after open camera.
4953 setInstantAEC(meta);
4954 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004955 uint8_t fwkVideoStabMode=0;
4956 if (meta.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
4957 fwkVideoStabMode = meta.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
4958 }
4959
Xue Tuecac74e2017-04-17 13:58:15 -07004960 // If EIS setprop is enabled then only turn it on for video/preview
4961 bool setEis = m_bEisEnable && m_bEisSupportedSize &&
Jason Lee603176d2017-05-31 11:43:27 -07004962 (isTypeVideo >= IS_TYPE_EIS_2_0) && !meta.exists(QCAMERA3_USE_AV_TIMER);
Thierry Strudel3d639192016-09-09 11:52:26 -07004963 int32_t vsMode;
4964 vsMode = (setEis)? DIS_ENABLE: DIS_DISABLE;
4965 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_DIS_ENABLE, vsMode)) {
4966 rc = BAD_VALUE;
4967 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004968 LOGD("setEis %d", setEis);
4969 bool eis3Supported = false;
4970 size_t count = IS_TYPE_MAX;
4971 count = MIN(gCamCapability[mCameraId]->supported_is_types_cnt, count);
4972 for (size_t i = 0; i < count; i++) {
4973 if (gCamCapability[mCameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0) {
4974 eis3Supported = true;
4975 break;
4976 }
4977 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004978
4979 //IS type will be 0 unless EIS is supported. If EIS is supported
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004980 //it could either be 4 or 5 depending on the stream and video size
Thierry Strudel3d639192016-09-09 11:52:26 -07004981 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
4982 if (setEis) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004983 if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_PREVIEW) {
4984 is_type = isTypePreview;
4985 } else if (mStreamConfigInfo.type[i] == CAM_STREAM_TYPE_VIDEO ) {
4986 if ( (isTypeVideo == IS_TYPE_EIS_3_0) && (eis3Supported == FALSE) ) {
4987 LOGW(" EIS_3.0 is not supported and so setting EIS_2.0");
Thierry Strudel3d639192016-09-09 11:52:26 -07004988 is_type = IS_TYPE_EIS_2_0;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004989 } else {
4990 is_type = isTypeVideo;
Thierry Strudel3d639192016-09-09 11:52:26 -07004991 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004992 } else {
4993 is_type = IS_TYPE_NONE;
4994 }
Thierry Strudel3d639192016-09-09 11:52:26 -07004995 mStreamConfigInfo.is_type[i] = is_type;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07004996 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07004997 mStreamConfigInfo.is_type[i] = IS_TYPE_NONE;
4998 }
4999 }
5000
5001 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5002 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
5003
Thierry Strudel54dc9782017-02-15 12:12:10 -08005004 //Disable tintless only if the property is set to 0
5005 memset(prop, 0, sizeof(prop));
5006 property_get("persist.camera.tintless.enable", prop, "1");
5007 int32_t tintless_value = atoi(prop);
5008
Thierry Strudel3d639192016-09-09 11:52:26 -07005009 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5010 CAM_INTF_PARM_TINTLESS, tintless_value);
Thierry Strudel54dc9782017-02-15 12:12:10 -08005011
Thierry Strudel3d639192016-09-09 11:52:26 -07005012 //Disable CDS for HFR mode or if DIS/EIS is on.
5013 //CDS is a session parameter in the backend/ISP, so need to be set/reset
5014 //after every configure_stream
5015 if ((CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) ||
5016 (m_bIsVideo)) {
5017 int32_t cds = CAM_CDS_MODE_OFF;
5018 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5019 CAM_INTF_PARM_CDS_MODE, cds))
5020 LOGE("Failed to disable CDS for HFR mode");
5021
5022 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005023
5024 if (m_debug_avtimer || meta.exists(QCAMERA3_USE_AV_TIMER)) {
5025 uint8_t* use_av_timer = NULL;
5026
5027 if (m_debug_avtimer){
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005028 LOGI(" Enabling AV timer through setprop");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005029 use_av_timer = &m_debug_avtimer;
Binhao Lin09245482017-08-31 18:25:29 -07005030 m_bAVTimerEnabled = true;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005031 }
5032 else{
5033 use_av_timer =
5034 meta.find(QCAMERA3_USE_AV_TIMER).data.u8;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005035 if (use_av_timer) {
Binhao Lin09245482017-08-31 18:25:29 -07005036 m_bAVTimerEnabled = true;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005037 LOGI("Enabling AV timer through Metadata: use_av_timer: %d", *use_av_timer);
5038 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005039 }
5040
5041 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_USE_AV_TIMER, *use_av_timer)) {
5042 rc = BAD_VALUE;
5043 }
5044 }
5045
Thierry Strudel3d639192016-09-09 11:52:26 -07005046 setMobicat();
5047
Emilian Peev49c4c6b2017-04-24 10:21:34 +01005048 uint8_t nrMode = 0;
5049 if (meta.exists(ANDROID_NOISE_REDUCTION_MODE)) {
5050 nrMode = meta.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
5051 }
5052
Thierry Strudel3d639192016-09-09 11:52:26 -07005053 /* Set fps and hfr mode while sending meta stream info so that sensor
5054 * can configure appropriate streaming mode */
5055 mHFRVideoFps = DEFAULT_VIDEO_FPS;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005056 mMinInFlightRequests = MIN_INFLIGHT_REQUESTS;
5057 mMaxInFlightRequests = MAX_INFLIGHT_REQUESTS;
Thierry Strudel3d639192016-09-09 11:52:26 -07005058 if (meta.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
5059 rc = setHalFpsRange(meta, mParameters);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005060 if (rc == NO_ERROR) {
5061 int32_t max_fps =
5062 (int32_t) meta.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
Zhijun He21b864a2016-06-24 13:41:19 -07005063 if (max_fps == 60 || mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005064 mMinInFlightRequests = MIN_INFLIGHT_60FPS_REQUESTS;
5065 }
5066 /* For HFR, more buffers are dequeued upfront to improve the performance */
5067 if (mBatchSize) {
5068 mMinInFlightRequests = MIN_INFLIGHT_HFR_REQUESTS;
5069 mMaxInFlightRequests = MAX_INFLIGHT_HFR_REQUESTS;
5070 }
5071 }
5072 else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005073 LOGE("setHalFpsRange failed");
5074 }
5075 }
5076 if (meta.exists(ANDROID_CONTROL_MODE)) {
5077 uint8_t metaMode = meta.find(ANDROID_CONTROL_MODE).data.u8[0];
5078 rc = extractSceneMode(meta, metaMode, mParameters);
5079 if (rc != NO_ERROR) {
5080 LOGE("extractSceneMode failed");
5081 }
5082 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005083 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07005084
Thierry Strudel04e026f2016-10-10 11:27:36 -07005085 if (meta.exists(QCAMERA3_VIDEO_HDR_MODE)) {
5086 cam_video_hdr_mode_t vhdr = (cam_video_hdr_mode_t)
5087 meta.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
5088 rc = setVideoHdrMode(mParameters, vhdr);
5089 if (rc != NO_ERROR) {
5090 LOGE("setVideoHDR is failed");
5091 }
5092 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005093
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07005094 if (meta.exists(TANGO_MODE_DATA_SENSOR_FULLFOV)) {
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07005095 uint8_t sensorModeFullFov =
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -07005096 meta.find(TANGO_MODE_DATA_SENSOR_FULLFOV).data.u8[0];
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -07005097 LOGD("SENSOR_MODE_FULLFOV %d" , sensorModeFullFov);
5098 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_SENSOR_MODE_FULLFOV,
5099 sensorModeFullFov)) {
5100 rc = BAD_VALUE;
5101 }
5102 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005103 //TODO: validate the arguments, HSV scenemode should have only the
5104 //advertised fps ranges
5105
5106 /*set the capture intent, hal version, tintless, stream info,
5107 *and disenable parameters to the backend*/
5108 LOGD("set_parms META_STREAM_INFO " );
5109 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
Thierry Strudel2896d122017-02-23 19:18:03 -08005110 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%" PRIx64
5111 ", Format:%d is_type: %d",
Thierry Strudel3d639192016-09-09 11:52:26 -07005112 mStreamConfigInfo.type[i],
5113 mStreamConfigInfo.stream_sizes[i].width,
5114 mStreamConfigInfo.stream_sizes[i].height,
5115 mStreamConfigInfo.postprocess_mask[i],
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005116 mStreamConfigInfo.format[i],
5117 mStreamConfigInfo.is_type[i]);
Thierry Strudel3d639192016-09-09 11:52:26 -07005118 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005119
Thierry Strudel3d639192016-09-09 11:52:26 -07005120 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5121 mParameters);
5122 if (rc < 0) {
5123 LOGE("set_parms failed for hal version, stream info");
5124 }
5125
Chien-Yu Chen605c3872017-06-14 11:09:23 -07005126 cam_sensor_mode_info_t sensorModeInfo = {};
5127 rc = getSensorModeInfo(sensorModeInfo);
Thierry Strudel3d639192016-09-09 11:52:26 -07005128 if (rc != NO_ERROR) {
5129 LOGE("Failed to get sensor output size");
5130 pthread_mutex_unlock(&mMutex);
5131 goto error_exit;
5132 }
5133
5134 mCropRegionMapper.update(gCamCapability[mCameraId]->active_array_size.width,
5135 gCamCapability[mCameraId]->active_array_size.height,
Chien-Yu Chen605c3872017-06-14 11:09:23 -07005136 sensorModeInfo.active_array_size.width,
5137 sensorModeInfo.active_array_size.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07005138
5139 /* Set batchmode before initializing channel. Since registerBuffer
5140 * internally initializes some of the channels, better set batchmode
5141 * even before first register buffer */
5142 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5143 it != mStreamInfo.end(); it++) {
5144 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
5145 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5146 && mBatchSize) {
5147 rc = channel->setBatchSize(mBatchSize);
5148 //Disable per frame map unmap for HFR/batchmode case
5149 rc |= channel->setPerFrameMapUnmap(false);
5150 if (NO_ERROR != rc) {
5151 LOGE("Channel init failed %d", rc);
5152 pthread_mutex_unlock(&mMutex);
5153 goto error_exit;
5154 }
5155 }
5156 }
5157
5158 //First initialize all streams
5159 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
5160 it != mStreamInfo.end(); it++) {
5161 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
Emilian Peev49c4c6b2017-04-24 10:21:34 +01005162
5163 /* Initial value of NR mode is needed before stream on */
5164 channel->setNRMode(nrMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07005165 if ((((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) ||
5166 ((1U << CAM_STREAM_TYPE_PREVIEW) == channel->getStreamTypeMask())) &&
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005167 setEis) {
5168 for (size_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
5169 if ( (1U << mStreamConfigInfo.type[i]) == channel->getStreamTypeMask() ) {
5170 is_type = mStreamConfigInfo.is_type[i];
5171 break;
5172 }
5173 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005174 rc = channel->initialize(is_type);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005175 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005176 rc = channel->initialize(IS_TYPE_NONE);
5177 }
5178 if (NO_ERROR != rc) {
5179 LOGE("Channel initialization failed %d", rc);
5180 pthread_mutex_unlock(&mMutex);
5181 goto error_exit;
5182 }
5183 }
5184
5185 if (mRawDumpChannel) {
5186 rc = mRawDumpChannel->initialize(IS_TYPE_NONE);
5187 if (rc != NO_ERROR) {
5188 LOGE("Error: Raw Dump Channel init failed");
5189 pthread_mutex_unlock(&mMutex);
5190 goto error_exit;
5191 }
5192 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005193 if (mHdrPlusRawSrcChannel) {
5194 rc = mHdrPlusRawSrcChannel->initialize(IS_TYPE_NONE);
5195 if (rc != NO_ERROR) {
5196 LOGE("Error: HDR+ RAW Source Channel init failed");
5197 pthread_mutex_unlock(&mMutex);
5198 goto error_exit;
5199 }
5200 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005201 if (mSupportChannel) {
5202 rc = mSupportChannel->initialize(IS_TYPE_NONE);
5203 if (rc < 0) {
5204 LOGE("Support channel initialization failed");
5205 pthread_mutex_unlock(&mMutex);
5206 goto error_exit;
5207 }
5208 }
5209 if (mAnalysisChannel) {
5210 rc = mAnalysisChannel->initialize(IS_TYPE_NONE);
5211 if (rc < 0) {
5212 LOGE("Analysis channel initialization failed");
5213 pthread_mutex_unlock(&mMutex);
5214 goto error_exit;
5215 }
5216 }
5217 if (mDummyBatchChannel) {
5218 rc = mDummyBatchChannel->setBatchSize(mBatchSize);
5219 if (rc < 0) {
5220 LOGE("mDummyBatchChannel setBatchSize failed");
5221 pthread_mutex_unlock(&mMutex);
5222 goto error_exit;
5223 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005224 rc = mDummyBatchChannel->initialize(IS_TYPE_NONE);
Thierry Strudel3d639192016-09-09 11:52:26 -07005225 if (rc < 0) {
5226 LOGE("mDummyBatchChannel initialization failed");
5227 pthread_mutex_unlock(&mMutex);
5228 goto error_exit;
5229 }
5230 }
5231
5232 // Set bundle info
5233 rc = setBundleInfo();
5234 if (rc < 0) {
5235 LOGE("setBundleInfo failed %d", rc);
5236 pthread_mutex_unlock(&mMutex);
5237 goto error_exit;
5238 }
5239
5240 //update settings from app here
5241 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
5242 mIsDeviceLinked = meta.find(QCAMERA3_DUALCAM_LINK_ENABLE).data.u8[0];
5243 LOGH("Dualcam: setting On=%d id =%d", mIsDeviceLinked, mCameraId);
5244 }
5245 if (meta.exists(QCAMERA3_DUALCAM_LINK_IS_MAIN)) {
5246 mIsMainCamera = meta.find(QCAMERA3_DUALCAM_LINK_IS_MAIN).data.u8[0];
5247 LOGH("Dualcam: Is this main camera = %d id =%d", mIsMainCamera, mCameraId);
5248 }
5249 if (meta.exists(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID)) {
5250 mLinkedCameraId = meta.find(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID).data.u8[0];
5251 LOGH("Dualcam: Linked camera Id %d id =%d", mLinkedCameraId, mCameraId);
5252
5253 if ( (mLinkedCameraId >= MM_CAMERA_MAX_NUM_SENSORS) &&
5254 (mLinkedCameraId != mCameraId) ) {
5255 LOGE("Dualcam: mLinkedCameraId %d is invalid, current cam id = %d",
5256 mLinkedCameraId, mCameraId);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005257 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005258 goto error_exit;
5259 }
5260 }
5261
5262 // add bundle related cameras
5263 LOGH("%s: Dualcam: id =%d, mIsDeviceLinked=%d", __func__,mCameraId, mIsDeviceLinked);
5264 if (meta.exists(QCAMERA3_DUALCAM_LINK_ENABLE)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005265 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
5266 &m_pDualCamCmdPtr->bundle_info;
5267 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005268 if (mIsDeviceLinked)
5269 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_ON;
5270 else
5271 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
5272
5273 pthread_mutex_lock(&gCamLock);
5274
5275 if (sessionId[mLinkedCameraId] == 0xDEADBEEF) {
5276 LOGE("Dualcam: Invalid Session Id ");
5277 pthread_mutex_unlock(&gCamLock);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005278 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005279 goto error_exit;
5280 }
5281
5282 if (mIsMainCamera == 1) {
5283 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
5284 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005285 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005286 m_pRelCamSyncBuf->cam_role = CAM_ROLE_BAYER;
Thierry Strudel3d639192016-09-09 11:52:26 -07005287 // related session id should be session id of linked session
5288 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5289 } else {
5290 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
5291 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07005292 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005293 m_pRelCamSyncBuf->cam_role = CAM_ROLE_MONO;
Thierry Strudel3d639192016-09-09 11:52:26 -07005294 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
5295 }
Thierry Strudel2896d122017-02-23 19:18:03 -08005296 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07005297 pthread_mutex_unlock(&gCamLock);
5298
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005299 rc = mCameraHandle->ops->set_dual_cam_cmd(
5300 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07005301 if (rc < 0) {
5302 LOGE("Dualcam: link failed");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005303 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005304 goto error_exit;
5305 }
5306 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005307 goto no_error;
5308error_exit:
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005309 mPerfLockMgr.releasePerfLock(PERF_LOCK_START_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07005310 return rc;
5311no_error:
Thierry Strudel3d639192016-09-09 11:52:26 -07005312 mWokenUpByDaemon = false;
5313 mPendingLiveRequest = 0;
5314 mFirstConfiguration = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005315 }
5316
5317 uint32_t frameNumber = request->frame_number;
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005318 cam_stream_ID_t streamsArray;
Thierry Strudel3d639192016-09-09 11:52:26 -07005319
5320 if (mFlushPerf) {
5321 //we cannot accept any requests during flush
5322 LOGE("process_capture_request cannot proceed during flush");
5323 pthread_mutex_unlock(&mMutex);
5324 return NO_ERROR; //should return an error
5325 }
5326
5327 if (meta.exists(ANDROID_REQUEST_ID)) {
5328 request_id = meta.find(ANDROID_REQUEST_ID).data.i32[0];
5329 mCurrentRequestId = request_id;
5330 LOGD("Received request with id: %d", request_id);
5331 } else if (mState == CONFIGURED || mCurrentRequestId == -1){
5332 LOGE("Unable to find request id field, \
5333 & no previous id available");
5334 pthread_mutex_unlock(&mMutex);
5335 return NAME_NOT_FOUND;
5336 } else {
5337 LOGD("Re-using old request id");
5338 request_id = mCurrentRequestId;
5339 }
5340
5341 LOGH("num_output_buffers = %d input_buffer = %p frame_number = %d",
5342 request->num_output_buffers,
5343 request->input_buffer,
5344 frameNumber);
5345 // Acquire all request buffers first
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005346 streamsArray.num_streams = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07005347 int blob_request = 0;
Emilian Peev7650c122017-01-19 08:24:33 -08005348 bool depthRequestPresent = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07005349 uint32_t snapshotStreamId = 0;
5350 for (size_t i = 0; i < request->num_output_buffers; i++) {
5351 const camera3_stream_buffer_t& output = request->output_buffers[i];
5352 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5353
Emilian Peev7650c122017-01-19 08:24:33 -08005354 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5355 (output.stream->data_space != HAL_DATASPACE_DEPTH)) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005356 //FIXME??:Call function to store local copy of jpeg data for encode params.
Thierry Strudel3d639192016-09-09 11:52:26 -07005357 blob_request = 1;
5358 snapshotStreamId = channel->getStreamID(channel->getStreamTypeMask());
5359 }
5360
5361 if (output.acquire_fence != -1) {
5362 rc = sync_wait(output.acquire_fence, TIMEOUT_NEVER);
5363 close(output.acquire_fence);
5364 if (rc != OK) {
5365 LOGE("sync wait failed %d", rc);
5366 pthread_mutex_unlock(&mMutex);
5367 return rc;
5368 }
5369 }
5370
Emilian Peev0f3c3162017-03-15 12:57:46 +00005371 if ((output.stream->format == HAL_PIXEL_FORMAT_BLOB) &&
5372 (output.stream->data_space == HAL_DATASPACE_DEPTH)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005373 depthRequestPresent = true;
5374 continue;
5375 }
5376
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005377 streamsArray.stream_request[streamsArray.num_streams++].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005378 channel->getStreamID(channel->getStreamTypeMask());
Thierry Strudel3d639192016-09-09 11:52:26 -07005379
5380 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5381 isVidBufRequested = true;
5382 }
5383 }
5384
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005385 //FIXME: Add checks to ensure to dups in validateCaptureRequest
5386 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5387 itr++) {
5388 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5389 streamsArray.stream_request[streamsArray.num_streams++].streamID =
5390 channel->getStreamID(channel->getStreamTypeMask());
5391
5392 if ((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask()) {
5393 isVidBufRequested = true;
5394 }
5395 }
5396
Thierry Strudel3d639192016-09-09 11:52:26 -07005397 if (blob_request) {
Shuzhen Wang850a7c22017-05-02 14:48:23 -07005398 ATRACE_ASYNC_BEGIN("SNAPSHOT", frameNumber);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005399 mPerfLockMgr.acquirePerfLock(PERF_LOCK_TAKE_SNAPSHOT);
Thierry Strudel3d639192016-09-09 11:52:26 -07005400 }
5401 if (blob_request && mRawDumpChannel) {
5402 LOGD("Trigger Raw based on blob request if Raw dump is enabled");
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005403 streamsArray.stream_request[streamsArray.num_streams].streamID =
Thierry Strudel3d639192016-09-09 11:52:26 -07005404 mRawDumpChannel->getStreamID(mRawDumpChannel->getStreamTypeMask());
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005405 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
Thierry Strudel3d639192016-09-09 11:52:26 -07005406 }
5407
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005408 {
5409 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5410 // Request a RAW buffer if
5411 // 1. mHdrPlusRawSrcChannel is valid.
5412 // 2. frameNumber is multiples of kHdrPlusRawPeriod (in order to limit RAW capture rate.)
5413 // 3. There is no pending HDR+ request.
5414 if (mHdrPlusRawSrcChannel && frameNumber % kHdrPlusRawPeriod == 0 &&
5415 mHdrPlusPendingRequests.size() == 0) {
5416 streamsArray.stream_request[streamsArray.num_streams].streamID =
5417 mHdrPlusRawSrcChannel->getStreamID(mHdrPlusRawSrcChannel->getStreamTypeMask());
5418 streamsArray.stream_request[streamsArray.num_streams++].buf_index = CAM_FREERUN_IDX;
5419 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -07005420 }
5421
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005422 //extract capture intent
5423 if (meta.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
5424 mCaptureIntent =
5425 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
5426 }
5427
5428 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5429 mCacMode =
5430 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5431 }
5432
Chien-Yu Chenbc730232017-07-12 14:49:55 -07005433 uint8_t requestedLensShadingMapMode;
5434 // Get the shading map mode.
5435 if (meta.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
5436 mLastRequestedLensShadingMapMode = requestedLensShadingMapMode =
5437 meta.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
5438 } else {
5439 requestedLensShadingMapMode = mLastRequestedLensShadingMapMode;
5440 }
5441
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005442 bool hdrPlusRequest = false;
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005443 HdrPlusPendingRequest pendingHdrPlusRequest = {};
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005444
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005445 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07005446 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -07005447 // If this request has a still capture intent, try to submit an HDR+ request.
5448 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled &&
5449 mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE) {
5450 hdrPlusRequest = trySubmittingHdrPlusRequestLocked(&pendingHdrPlusRequest, *request, meta);
5451 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005452 }
5453
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005454 if (hdrPlusRequest) {
5455 // For a HDR+ request, just set the frame parameters.
5456 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
5457 if (rc < 0) {
5458 LOGE("fail to set frame parameters");
5459 pthread_mutex_unlock(&mMutex);
5460 return rc;
5461 }
5462 } else if(request->input_buffer == NULL) {
Thierry Strudel3d639192016-09-09 11:52:26 -07005463 /* Parse the settings:
5464 * - For every request in NORMAL MODE
5465 * - For every request in HFR mode during preview only case
5466 * - For first request of every batch in HFR mode during video
5467 * recording. In batchmode the same settings except frame number is
5468 * repeated in each request of the batch.
5469 */
5470 if (!mBatchSize ||
5471 (mBatchSize && !isVidBufRequested) ||
5472 (mBatchSize && isVidBufRequested && !mToBeQueuedVidBufs)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005473 rc = setFrameParameters(request, streamsArray, blob_request, snapshotStreamId);
Thierry Strudel3d639192016-09-09 11:52:26 -07005474 if (rc < 0) {
5475 LOGE("fail to set frame parameters");
5476 pthread_mutex_unlock(&mMutex);
5477 return rc;
5478 }
Chien-Yu Chenbc730232017-07-12 14:49:55 -07005479
5480 {
5481 // If HDR+ mode is enabled, override lens shading mode to ON so lens shading map
5482 // will be reported in result metadata.
5483 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
5484 if (mHdrPlusModeEnabled) {
5485 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_LENS_SHADING_MAP_MODE,
5486 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON);
5487 }
5488 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005489 }
5490 /* For batchMode HFR, setFrameParameters is not called for every
5491 * request. But only frame number of the latest request is parsed.
5492 * Keep track of first and last frame numbers in a batch so that
5493 * metadata for the frame numbers of batch can be duplicated in
5494 * handleBatchMetadta */
5495 if (mBatchSize) {
5496 if (!mToBeQueuedVidBufs) {
5497 //start of the batch
5498 mFirstFrameNumberInBatch = request->frame_number;
5499 }
5500 if(ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5501 CAM_INTF_META_FRAME_NUMBER, request->frame_number)) {
5502 LOGE("Failed to set the frame number in the parameters");
Thierry Strudel9e74aae2016-09-22 17:10:18 -07005503 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07005504 return BAD_VALUE;
5505 }
5506 }
5507 if (mNeedSensorRestart) {
5508 /* Unlock the mutex as restartSensor waits on the channels to be
5509 * stopped, which in turn calls stream callback functions -
5510 * handleBufferWithLock and handleMetadataWithLock */
5511 pthread_mutex_unlock(&mMutex);
5512 rc = dynamicUpdateMetaStreamInfo();
5513 if (rc != NO_ERROR) {
5514 LOGE("Restarting the sensor failed");
5515 return BAD_VALUE;
5516 }
5517 mNeedSensorRestart = false;
5518 pthread_mutex_lock(&mMutex);
5519 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07005520 if(mResetInstantAEC) {
5521 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5522 CAM_INTF_PARM_INSTANT_AEC, (uint8_t)CAM_AEC_NORMAL_CONVERGENCE);
5523 mResetInstantAEC = false;
5524 }
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005525 } else {
Thierry Strudel3d639192016-09-09 11:52:26 -07005526 if (request->input_buffer->acquire_fence != -1) {
5527 rc = sync_wait(request->input_buffer->acquire_fence, TIMEOUT_NEVER);
5528 close(request->input_buffer->acquire_fence);
5529 if (rc != OK) {
5530 LOGE("input buffer sync wait failed %d", rc);
5531 pthread_mutex_unlock(&mMutex);
5532 return rc;
5533 }
5534 }
5535 }
5536
5537 if (mCaptureIntent == ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM) {
5538 mLastCustIntentFrmNum = frameNumber;
5539 }
5540 /* Update pending request list and pending buffers map */
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005541 PendingRequestInfo pendingRequest = {};
Thierry Strudel3d639192016-09-09 11:52:26 -07005542 pendingRequestIterator latestRequest;
5543 pendingRequest.frame_number = frameNumber;
Emilian Peev7650c122017-01-19 08:24:33 -08005544 pendingRequest.num_buffers = depthRequestPresent ?
5545 (request->num_output_buffers - 1 ) : request->num_output_buffers;
Thierry Strudel3d639192016-09-09 11:52:26 -07005546 pendingRequest.request_id = request_id;
5547 pendingRequest.blob_request = blob_request;
5548 pendingRequest.timestamp = 0;
Chien-Yu Chenbc730232017-07-12 14:49:55 -07005549 pendingRequest.requestedLensShadingMapMode = requestedLensShadingMapMode;
Thierry Strudel3d639192016-09-09 11:52:26 -07005550 if (request->input_buffer) {
5551 pendingRequest.input_buffer =
5552 (camera3_stream_buffer_t*)malloc(sizeof(camera3_stream_buffer_t));
5553 *(pendingRequest.input_buffer) = *(request->input_buffer);
5554 pInputBuffer = pendingRequest.input_buffer;
5555 } else {
5556 pendingRequest.input_buffer = NULL;
5557 pInputBuffer = NULL;
5558 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005559 pendingRequest.bUseFirstPartial = (mState == CONFIGURED && !request->input_buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -07005560
5561 pendingRequest.pipeline_depth = 0;
5562 pendingRequest.partial_result_cnt = 0;
5563 extractJpegMetadata(mCurJpegMeta, request);
5564 pendingRequest.jpegMetadata = mCurJpegMeta;
5565 pendingRequest.settings = saveRequestSettings(mCurJpegMeta, request);
Thierry Strudel3d639192016-09-09 11:52:26 -07005566 pendingRequest.capture_intent = mCaptureIntent;
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07005567 if (meta.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
Shuzhen Wang77b049a2017-08-30 12:24:36 -07005568 pendingRequest.hybrid_ae_enable =
Shuzhen Wang2abea3d2016-03-31 11:09:27 -07005569 meta.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8[0];
5570 }
Chien-Yu Chen98b126c2017-03-14 14:55:32 -07005571
Samuel Ha68ba5172016-12-15 18:41:12 -08005572 /* DevCamDebug metadata processCaptureRequest */
5573 if (meta.exists(DEVCAMDEBUG_META_ENABLE)) {
5574 mDevCamDebugMetaEnable =
5575 meta.find(DEVCAMDEBUG_META_ENABLE).data.u8[0];
5576 }
5577 pendingRequest.DevCamDebug_meta_enable = mDevCamDebugMetaEnable;
5578 /* DevCamDebug metadata end */
Thierry Strudel3d639192016-09-09 11:52:26 -07005579
5580 //extract CAC info
5581 if (meta.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
5582 mCacMode =
5583 meta.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
5584 }
5585 pendingRequest.fwkCacMode = mCacMode;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005586 pendingRequest.hdrplus = hdrPlusRequest;
Emilian Peev30522a12017-08-03 14:36:33 +01005587 pendingRequest.expectedFrameDuration = mExpectedFrameDuration;
5588 mExpectedInflightDuration += mExpectedFrameDuration;
Thierry Strudel3d639192016-09-09 11:52:26 -07005589
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07005590 // extract enableZsl info
5591 if (gExposeEnableZslKey) {
5592 if (meta.exists(ANDROID_CONTROL_ENABLE_ZSL)) {
5593 pendingRequest.enableZsl = meta.find(ANDROID_CONTROL_ENABLE_ZSL).data.u8[0];
5594 mZslEnabled = pendingRequest.enableZsl;
5595 } else {
5596 pendingRequest.enableZsl = mZslEnabled;
5597 }
5598 }
5599
Thierry Strudel3d639192016-09-09 11:52:26 -07005600 PendingBuffersInRequest bufsForCurRequest;
5601 bufsForCurRequest.frame_number = frameNumber;
5602 // Mark current timestamp for the new request
5603 bufsForCurRequest.timestamp = systemTime(CLOCK_MONOTONIC);
Binhao Lin09245482017-08-31 18:25:29 -07005604 bufsForCurRequest.av_timestamp = 0;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005605 bufsForCurRequest.hdrplus = hdrPlusRequest;
Thierry Strudel3d639192016-09-09 11:52:26 -07005606
Chien-Yu Chen92724a82017-01-06 11:50:30 -08005607 if (hdrPlusRequest) {
5608 // Save settings for this request.
5609 pendingHdrPlusRequest.settings = std::make_shared<metadata_buffer_t>();
5610 memcpy(pendingHdrPlusRequest.settings.get(), mParameters, sizeof(metadata_buffer_t));
5611
5612 // Add to pending HDR+ request queue.
5613 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
5614 mHdrPlusPendingRequests.emplace(frameNumber, pendingHdrPlusRequest);
5615
5616 ALOGD("%s: frame number %u is an HDR+ request.", __FUNCTION__, frameNumber);
5617 }
5618
Thierry Strudel3d639192016-09-09 11:52:26 -07005619 for (size_t i = 0; i < request->num_output_buffers; i++) {
Emilian Peev0f3c3162017-03-15 12:57:46 +00005620 if ((request->output_buffers[i].stream->data_space ==
5621 HAL_DATASPACE_DEPTH) &&
5622 (HAL_PIXEL_FORMAT_BLOB ==
5623 request->output_buffers[i].stream->format)) {
Emilian Peev7650c122017-01-19 08:24:33 -08005624 continue;
5625 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005626 RequestedBufferInfo requestedBuf;
5627 memset(&requestedBuf, 0, sizeof(requestedBuf));
5628 requestedBuf.stream = request->output_buffers[i].stream;
5629 requestedBuf.buffer = NULL;
5630 pendingRequest.buffers.push_back(requestedBuf);
5631
5632 // Add to buffer handle the pending buffers list
5633 PendingBufferInfo bufferInfo;
5634 bufferInfo.buffer = request->output_buffers[i].buffer;
5635 bufferInfo.stream = request->output_buffers[i].stream;
5636 bufsForCurRequest.mPendingBufferList.push_back(bufferInfo);
5637 QCamera3Channel *channel = (QCamera3Channel *)bufferInfo.stream->priv;
5638 LOGD("frame = %d, buffer = %p, streamTypeMask = %d, stream format = %d",
5639 frameNumber, bufferInfo.buffer,
5640 channel->getStreamTypeMask(), bufferInfo.stream->format);
5641 }
5642 // Add this request packet into mPendingBuffersMap
5643 mPendingBuffersMap.mPendingBuffersInRequest.push_back(bufsForCurRequest);
5644 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
5645 mPendingBuffersMap.get_num_overall_buffers());
5646
5647 latestRequest = mPendingRequestsList.insert(
5648 mPendingRequestsList.end(), pendingRequest);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07005649
5650 // Let shutter dispatcher and buffer dispatcher know shutter and output buffers are expected
5651 // for the frame number.
Chien-Yu Chena7f98612017-06-20 16:54:10 -07005652 mShutterDispatcher.expectShutter(frameNumber, request->input_buffer != nullptr);
Chien-Yu Chen3f303522017-05-19 15:21:45 -07005653 for (size_t i = 0; i < request->num_output_buffers; i++) {
5654 mOutputBufferDispatcher.expectBuffer(frameNumber, request->output_buffers[i].stream);
5655 }
5656
Thierry Strudel3d639192016-09-09 11:52:26 -07005657 if(mFlush) {
5658 LOGI("mFlush is true");
5659 pthread_mutex_unlock(&mMutex);
5660 return NO_ERROR;
5661 }
5662
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005663 // If this is not an HDR+ request, send the request to metadata and each output buffer's
5664 // channel.
5665 if (!hdrPlusRequest) {
5666 int indexUsed;
5667 // Notify metadata channel we receive a request
5668 mMetadataChannel->request(NULL, frameNumber, indexUsed);
Thierry Strudel3d639192016-09-09 11:52:26 -07005669
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005670 if(request->input_buffer != NULL){
5671 LOGD("Input request, frame_number %d", frameNumber);
5672 rc = setReprocParameters(request, &mReprocMeta, snapshotStreamId);
5673 if (NO_ERROR != rc) {
5674 LOGE("fail to set reproc parameters");
5675 pthread_mutex_unlock(&mMutex);
5676 return rc;
5677 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005678 }
5679
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005680 // Call request on other streams
5681 uint32_t streams_need_metadata = 0;
5682 pendingBufferIterator pendingBufferIter = latestRequest->buffers.begin();
5683 for (size_t i = 0; i < request->num_output_buffers; i++) {
5684 const camera3_stream_buffer_t& output = request->output_buffers[i];
5685 QCamera3Channel *channel = (QCamera3Channel *)output.stream->priv;
5686
5687 if (channel == NULL) {
5688 LOGW("invalid channel pointer for stream");
5689 continue;
5690 }
5691
5692 if (output.stream->format == HAL_PIXEL_FORMAT_BLOB) {
5693 LOGD("snapshot request with output buffer %p, input buffer %p, frame_number %d",
5694 output.buffer, request->input_buffer, frameNumber);
5695 if(request->input_buffer != NULL){
Thierry Strudel3d639192016-09-09 11:52:26 -07005696 rc = channel->request(output.buffer, frameNumber,
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005697 pInputBuffer, &mReprocMeta, indexUsed, false, false);
5698 if (rc < 0) {
5699 LOGE("Fail to request on picture channel");
5700 pthread_mutex_unlock(&mMutex);
5701 return rc;
5702 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005703 } else {
Emilian Peev7650c122017-01-19 08:24:33 -08005704 if (HAL_DATASPACE_DEPTH == output.stream->data_space) {
5705 assert(NULL != mDepthChannel);
5706 assert(mDepthChannel == output.stream->priv);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005707
Emilian Peev7650c122017-01-19 08:24:33 -08005708 rc = mDepthChannel->mapBuffer(output.buffer, request->frame_number);
5709 if (rc < 0) {
5710 LOGE("Fail to map on depth buffer");
5711 pthread_mutex_unlock(&mMutex);
5712 return rc;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005713 }
Emilian Peev4e0fe952017-06-30 12:40:09 -07005714 continue;
Emilian Peev7650c122017-01-19 08:24:33 -08005715 } else {
5716 LOGD("snapshot request with buffer %p, frame_number %d",
5717 output.buffer, frameNumber);
5718 if (!request->settings) {
5719 rc = channel->request(output.buffer, frameNumber,
5720 NULL, mPrevParameters, indexUsed);
5721 } else {
5722 rc = channel->request(output.buffer, frameNumber,
5723 NULL, mParameters, indexUsed);
5724 }
5725 if (rc < 0) {
5726 LOGE("Fail to request on picture channel");
5727 pthread_mutex_unlock(&mMutex);
5728 return rc;
5729 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005730
Emilian Peev7650c122017-01-19 08:24:33 -08005731 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5732 uint32_t j = 0;
5733 for (j = 0; j < streamsArray.num_streams; j++) {
5734 if (streamsArray.stream_request[j].streamID == streamId) {
5735 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5736 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5737 else
5738 streamsArray.stream_request[j].buf_index = indexUsed;
5739 break;
5740 }
5741 }
5742 if (j == streamsArray.num_streams) {
5743 LOGE("Did not find matching stream to update index");
5744 assert(0);
5745 }
5746
5747 pendingBufferIter->need_metadata = true;
5748 streams_need_metadata++;
5749 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005750 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005751 } else if (output.stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888) {
5752 bool needMetadata = false;
5753 QCamera3YUVChannel *yuvChannel = (QCamera3YUVChannel *)channel;
5754 rc = yuvChannel->request(output.buffer, frameNumber,
5755 pInputBuffer, (pInputBuffer ? &mReprocMeta : mParameters),
5756 needMetadata, indexUsed, false, false);
Thierry Strudel3d639192016-09-09 11:52:26 -07005757 if (rc < 0) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005758 LOGE("Fail to request on YUV channel");
Thierry Strudel3d639192016-09-09 11:52:26 -07005759 pthread_mutex_unlock(&mMutex);
5760 return rc;
5761 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005762
5763 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5764 uint32_t j = 0;
5765 for (j = 0; j < streamsArray.num_streams; j++) {
5766 if (streamsArray.stream_request[j].streamID == streamId) {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005767 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5768 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5769 else
5770 streamsArray.stream_request[j].buf_index = indexUsed;
5771 break;
5772 }
5773 }
5774 if (j == streamsArray.num_streams) {
5775 LOGE("Did not find matching stream to update index");
5776 assert(0);
5777 }
5778
5779 pendingBufferIter->need_metadata = needMetadata;
5780 if (needMetadata)
5781 streams_need_metadata += 1;
5782 LOGD("calling YUV channel request, need_metadata is %d",
5783 needMetadata);
5784 } else {
5785 LOGD("request with buffer %p, frame_number %d",
5786 output.buffer, frameNumber);
5787
5788 rc = channel->request(output.buffer, frameNumber, indexUsed);
5789
5790 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5791 uint32_t j = 0;
5792 for (j = 0; j < streamsArray.num_streams; j++) {
5793 if (streamsArray.stream_request[j].streamID == streamId) {
5794 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5795 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5796 else
5797 streamsArray.stream_request[j].buf_index = indexUsed;
5798 break;
5799 }
5800 }
5801 if (j == streamsArray.num_streams) {
5802 LOGE("Did not find matching stream to update index");
5803 assert(0);
5804 }
5805
5806 if (((1U << CAM_STREAM_TYPE_VIDEO) == channel->getStreamTypeMask())
5807 && mBatchSize) {
5808 mToBeQueuedVidBufs++;
5809 if (mToBeQueuedVidBufs == mBatchSize) {
5810 channel->queueBatchBuf();
5811 }
5812 }
5813 if (rc < 0) {
5814 LOGE("request failed");
5815 pthread_mutex_unlock(&mMutex);
5816 return rc;
5817 }
5818 }
5819 pendingBufferIter++;
5820 }
5821
5822 for (auto itr = internallyRequestedStreams.begin(); itr != internallyRequestedStreams.end();
5823 itr++) {
5824 QCamera3Channel *channel = (QCamera3Channel *)(*itr).stream->priv;
5825
5826 if (channel == NULL) {
5827 LOGE("invalid channel pointer for stream");
5828 assert(0);
Shuzhen Wang3a1b92d2017-08-09 13:39:47 -07005829 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005830 return BAD_VALUE;
5831 }
5832
5833 InternalRequest requestedStream;
5834 requestedStream = (*itr);
5835
5836
5837 if ((*itr).stream->format == HAL_PIXEL_FORMAT_BLOB) {
5838 LOGD("snapshot request internally input buffer %p, frame_number %d",
5839 request->input_buffer, frameNumber);
5840 if(request->input_buffer != NULL){
5841 rc = channel->request(NULL, frameNumber,
5842 pInputBuffer, &mReprocMeta, indexUsed, true,
5843 requestedStream.meteringOnly);
5844 if (rc < 0) {
5845 LOGE("Fail to request on picture channel");
5846 pthread_mutex_unlock(&mMutex);
5847 return rc;
5848 }
5849 } else {
5850 LOGD("snapshot request with frame_number %d", frameNumber);
5851 if (!request->settings) {
5852 rc = channel->request(NULL, frameNumber,
5853 NULL, mPrevParameters, indexUsed, true,
5854 requestedStream.meteringOnly);
5855 } else {
5856 rc = channel->request(NULL, frameNumber,
5857 NULL, mParameters, indexUsed, true, requestedStream.meteringOnly);
5858 }
5859 if (rc < 0) {
5860 LOGE("Fail to request on picture channel");
5861 pthread_mutex_unlock(&mMutex);
5862 return rc;
5863 }
5864
5865 if ((*itr).meteringOnly != 1) {
5866 requestedStream.need_metadata = 1;
5867 streams_need_metadata++;
5868 }
5869 }
5870
5871 uint32_t streamId = channel->getStreamID(channel->getStreamTypeMask());
5872 uint32_t j = 0;
5873 for (j = 0; j < streamsArray.num_streams; j++) {
5874 if (streamsArray.stream_request[j].streamID == streamId) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005875 if (mOpMode == CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE)
5876 streamsArray.stream_request[j].buf_index = CAM_FREERUN_IDX;
5877 else
5878 streamsArray.stream_request[j].buf_index = indexUsed;
5879 break;
5880 }
5881 }
5882 if (j == streamsArray.num_streams) {
5883 LOGE("Did not find matching stream to update index");
5884 assert(0);
5885 }
5886
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005887 } else {
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005888 LOGE("Internal requests not supported on this stream type");
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005889 assert(0);
Shuzhen Wang3a1b92d2017-08-09 13:39:47 -07005890 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005891 return INVALID_OPERATION;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005892 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005893 latestRequest->internalRequestList.push_back(requestedStream);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005894 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -08005895
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005896 //If 2 streams have need_metadata set to true, fail the request, unless
5897 //we copy/reference count the metadata buffer
5898 if (streams_need_metadata > 1) {
5899 LOGE("not supporting request in which two streams requires"
5900 " 2 HAL metadata for reprocessing");
5901 pthread_mutex_unlock(&mMutex);
5902 return -EINVAL;
5903 }
Thierry Strudel3d639192016-09-09 11:52:26 -07005904
Emilian Peev656e4fa2017-06-02 16:47:04 +01005905 cam_sensor_pd_data_t pdafEnable = (nullptr != mDepthChannel) ?
5906 CAM_PD_DATA_SKIP : CAM_PD_DATA_DISABLED;
5907 if (depthRequestPresent && mDepthChannel) {
5908 if (request->settings) {
5909 camera_metadata_ro_entry entry;
5910 if (find_camera_metadata_ro_entry(request->settings,
5911 NEXUS_EXPERIMENTAL_2017_PD_DATA_ENABLE, &entry) == 0) {
5912 if (entry.data.u8[0]) {
5913 pdafEnable = CAM_PD_DATA_ENABLED;
5914 } else {
5915 pdafEnable = CAM_PD_DATA_SKIP;
5916 }
5917 mDepthCloudMode = pdafEnable;
5918 } else {
5919 pdafEnable = mDepthCloudMode;
5920 }
5921 } else {
5922 pdafEnable = mDepthCloudMode;
5923 }
5924 }
5925
Emilian Peev7650c122017-01-19 08:24:33 -08005926 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
5927 CAM_INTF_META_PDAF_DATA_ENABLE, pdafEnable)) {
5928 LOGE("%s: Failed to enable PDAF data in parameters!", __func__);
5929 pthread_mutex_unlock(&mMutex);
5930 return BAD_VALUE;
5931 }
Emilian Peev656e4fa2017-06-02 16:47:04 +01005932
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005933 if (request->input_buffer == NULL) {
5934 /* Set the parameters to backend:
5935 * - For every request in NORMAL MODE
5936 * - For every request in HFR mode during preview only case
5937 * - Once every batch in HFR mode during video recording
5938 */
5939 if (!mBatchSize ||
5940 (mBatchSize && !isVidBufRequested) ||
5941 (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize))) {
5942 LOGD("set_parms batchSz: %d IsVidBufReq: %d vidBufTobeQd: %d ",
5943 mBatchSize, isVidBufRequested,
5944 mToBeQueuedVidBufs);
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005945
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005946 if(mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs == mBatchSize)) {
5947 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5948 uint32_t m = 0;
5949 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5950 if (streamsArray.stream_request[k].streamID ==
5951 mBatchedStreamsArray.stream_request[m].streamID)
5952 break;
5953 }
5954 if (m == mBatchedStreamsArray.num_streams) {
5955 mBatchedStreamsArray.stream_request\
5956 [mBatchedStreamsArray.num_streams].streamID =
5957 streamsArray.stream_request[k].streamID;
5958 mBatchedStreamsArray.stream_request\
5959 [mBatchedStreamsArray.num_streams].buf_index =
5960 streamsArray.stream_request[k].buf_index;
5961 mBatchedStreamsArray.num_streams =
5962 mBatchedStreamsArray.num_streams + 1;
5963 }
5964 }
5965 streamsArray = mBatchedStreamsArray;
5966 }
5967 /* Update stream id of all the requested buffers */
5968 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID,
5969 streamsArray)) {
5970 LOGE("Failed to set stream type mask in the parameters");
Shuzhen Wang3c077d72017-04-20 22:48:59 -07005971 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005972 return BAD_VALUE;
5973 }
5974
5975 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
5976 mParameters);
5977 if (rc < 0) {
5978 LOGE("set_parms failed");
5979 }
5980 /* reset to zero coz, the batch is queued */
5981 mToBeQueuedVidBufs = 0;
5982 mPendingBatchMap.add(frameNumber, mFirstFrameNumberInBatch);
5983 memset(&mBatchedStreamsArray, 0, sizeof(cam_stream_ID_t));
5984 } else if (mBatchSize && isVidBufRequested && (mToBeQueuedVidBufs != mBatchSize)) {
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005985 for (uint32_t k = 0; k < streamsArray.num_streams; k++) {
5986 uint32_t m = 0;
5987 for (m = 0; m < mBatchedStreamsArray.num_streams; m++) {
5988 if (streamsArray.stream_request[k].streamID ==
5989 mBatchedStreamsArray.stream_request[m].streamID)
5990 break;
Chien-Yu Chene687bd02016-12-07 18:30:26 -08005991 }
5992 if (m == mBatchedStreamsArray.num_streams) {
5993 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5994 streamID = streamsArray.stream_request[k].streamID;
5995 mBatchedStreamsArray.stream_request[mBatchedStreamsArray.num_streams].
5996 buf_index = streamsArray.stream_request[k].buf_index;
5997 mBatchedStreamsArray.num_streams = mBatchedStreamsArray.num_streams + 1;
5998 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08005999 }
6000 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -08006001 mPendingLiveRequest++;
Shuzhen Wang3c077d72017-04-20 22:48:59 -07006002
6003 // Start all streams after the first setting is sent, so that the
6004 // setting can be applied sooner: (0 + apply_delay)th frame.
6005 if (mState == CONFIGURED && mChannelHandle) {
6006 //Then start them.
6007 LOGH("Start META Channel");
6008 rc = mMetadataChannel->start();
6009 if (rc < 0) {
6010 LOGE("META channel start failed");
6011 pthread_mutex_unlock(&mMutex);
6012 return rc;
6013 }
6014
6015 if (mAnalysisChannel) {
6016 rc = mAnalysisChannel->start();
6017 if (rc < 0) {
6018 LOGE("Analysis channel start failed");
6019 mMetadataChannel->stop();
6020 pthread_mutex_unlock(&mMutex);
6021 return rc;
6022 }
6023 }
6024
6025 if (mSupportChannel) {
6026 rc = mSupportChannel->start();
6027 if (rc < 0) {
6028 LOGE("Support channel start failed");
6029 mMetadataChannel->stop();
6030 /* Although support and analysis are mutually exclusive today
6031 adding it in anycase for future proofing */
6032 if (mAnalysisChannel) {
6033 mAnalysisChannel->stop();
6034 }
6035 pthread_mutex_unlock(&mMutex);
6036 return rc;
6037 }
6038 }
6039 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6040 it != mStreamInfo.end(); it++) {
6041 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
6042 LOGH("Start Processing Channel mask=%d",
6043 channel->getStreamTypeMask());
6044 rc = channel->start();
6045 if (rc < 0) {
6046 LOGE("channel start failed");
6047 pthread_mutex_unlock(&mMutex);
6048 return rc;
6049 }
6050 }
6051
6052 if (mRawDumpChannel) {
6053 LOGD("Starting raw dump stream");
6054 rc = mRawDumpChannel->start();
6055 if (rc != NO_ERROR) {
6056 LOGE("Error Starting Raw Dump Channel");
6057 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6058 it != mStreamInfo.end(); it++) {
6059 QCamera3Channel *channel =
6060 (QCamera3Channel *)(*it)->stream->priv;
6061 LOGH("Stopping Processing Channel mask=%d",
6062 channel->getStreamTypeMask());
6063 channel->stop();
6064 }
6065 if (mSupportChannel)
6066 mSupportChannel->stop();
6067 if (mAnalysisChannel) {
6068 mAnalysisChannel->stop();
6069 }
6070 mMetadataChannel->stop();
6071 pthread_mutex_unlock(&mMutex);
6072 return rc;
6073 }
6074 }
6075
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006076 // Configure modules for stream on.
Chien-Yu Chen153c5172017-09-08 11:33:19 -07006077 rc = startChannelLocked();
Shuzhen Wang3c077d72017-04-20 22:48:59 -07006078 if (rc != NO_ERROR) {
Chien-Yu Chen153c5172017-09-08 11:33:19 -07006079 LOGE("startChannelLocked failed %d", rc);
Chien-Yu Chence5b8662017-05-09 17:17:17 -07006080 pthread_mutex_unlock(&mMutex);
6081 return rc;
6082 }
Shuzhen Wang3c077d72017-04-20 22:48:59 -07006083 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006084 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006085 }
6086
Chien-Yu Chenfadf40e2017-09-15 14:33:57 -07006087 // Enable HDR+ mode for the first PREVIEW_INTENT request that doesn't disable HDR+.
Chenjie Luo4a761802017-06-13 17:35:54 +00006088 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -07006089 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chend77a5462017-06-02 18:00:38 -07006090 if (gEaselManagerClient != nullptr && gEaselManagerClient->isEaselPresentOnDevice() &&
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07006091 !gEaselBypassOnly && !mFirstPreviewIntentSeen &&
6092 meta.exists(ANDROID_CONTROL_CAPTURE_INTENT) &&
6093 meta.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0] ==
Chien-Yu Chenfadf40e2017-09-15 14:33:57 -07006094 ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW &&
6095 meta.exists(NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS) &&
6096 meta.find(NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS).data.i32[0] == 0) {
Chien-Yu Chendeaebad2017-06-30 11:46:34 -07006097
6098 if (isSessionHdrPlusModeCompatible()) {
6099 rc = enableHdrPlusModeLocked();
6100 if (rc != OK) {
6101 LOGE("%s: Failed to open HDR+ asynchronously", __FUNCTION__);
6102 pthread_mutex_unlock(&mMutex);
6103 return rc;
6104 }
Chien-Yu Chenfe023bc2017-05-12 17:19:26 -07006105 }
6106
6107 mFirstPreviewIntentSeen = true;
6108 }
6109 }
6110
Thierry Strudel3d639192016-09-09 11:52:26 -07006111 LOGD("mPendingLiveRequest = %d", mPendingLiveRequest);
6112
6113 mState = STARTED;
6114 // Added a timed condition wait
6115 struct timespec ts;
6116 uint8_t isValidTimeout = 1;
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006117 rc = clock_gettime(CLOCK_MONOTONIC, &ts);
Thierry Strudel3d639192016-09-09 11:52:26 -07006118 if (rc < 0) {
6119 isValidTimeout = 0;
6120 LOGE("Error reading the real time clock!!");
6121 }
6122 else {
6123 // Make timeout as 5 sec for request to be honored
Chien-Yu Chene687bd02016-12-07 18:30:26 -08006124 int64_t timeout = 5;
6125 {
6126 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
6127 // If there is a pending HDR+ request, the following requests may be blocked until the
6128 // HDR+ request is done. So allow a longer timeout.
6129 if (mHdrPlusPendingRequests.size() > 0) {
6130 timeout = MISSING_HDRPLUS_REQUEST_BUF_TIMEOUT;
6131 }
6132 }
6133 ts.tv_sec += timeout;
Thierry Strudel3d639192016-09-09 11:52:26 -07006134 }
6135 //Block on conditional variable
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006136 while ((mPendingLiveRequest >= mMinInFlightRequests) && !pInputBuffer &&
Thierry Strudel3d639192016-09-09 11:52:26 -07006137 (mState != ERROR) && (mState != DEINIT)) {
6138 if (!isValidTimeout) {
6139 LOGD("Blocking on conditional wait");
6140 pthread_cond_wait(&mRequestCond, &mMutex);
6141 }
6142 else {
6143 LOGD("Blocking on timed conditional wait");
6144 rc = pthread_cond_timedwait(&mRequestCond, &mMutex, &ts);
6145 if (rc == ETIMEDOUT) {
6146 rc = -ENODEV;
6147 LOGE("Unblocked on timeout!!!!");
6148 break;
6149 }
6150 }
6151 LOGD("Unblocked");
6152 if (mWokenUpByDaemon) {
6153 mWokenUpByDaemon = false;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006154 if (mPendingLiveRequest < mMaxInFlightRequests)
Thierry Strudel3d639192016-09-09 11:52:26 -07006155 break;
6156 }
6157 }
6158 pthread_mutex_unlock(&mMutex);
6159
6160 return rc;
6161}
6162
Chien-Yu Chen153c5172017-09-08 11:33:19 -07006163int32_t QCamera3HardwareInterface::startChannelLocked()
6164{
6165 // Configure modules for stream on.
6166 int32_t rc = mCameraHandle->ops->start_channel(mCameraHandle->camera_handle,
6167 mChannelHandle, /*start_sensor_streaming*/false);
6168 if (rc != NO_ERROR) {
6169 LOGE("start_channel failed %d", rc);
6170 return rc;
6171 }
6172
6173 {
6174 // Configure Easel for stream on.
6175 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
6176
6177 // Now that sensor mode should have been selected, get the selected sensor mode
6178 // info.
6179 memset(&mSensorModeInfo, 0, sizeof(mSensorModeInfo));
6180 getCurrentSensorModeInfo(mSensorModeInfo);
6181
6182 if (EaselManagerClientOpened) {
6183 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI");
6184 rc = gEaselManagerClient->startMipi(mCameraId, mSensorModeInfo.op_pixel_clk,
6185 /*enableCapture*/true);
6186 if (rc != OK) {
6187 ALOGE("%s: Failed to start MIPI rate for camera %u to %u", __FUNCTION__,
6188 mCameraId, mSensorModeInfo.op_pixel_clk);
6189 return rc;
6190 }
6191 logEaselEvent("EASEL_STARTUP_LATENCY", "Starting MIPI done");
6192 mEaselMipiStarted = true;
6193 }
6194 }
6195
6196 // Start sensor streaming.
6197 rc = mCameraHandle->ops->start_sensor_streaming(mCameraHandle->camera_handle,
6198 mChannelHandle);
6199 if (rc != NO_ERROR) {
6200 LOGE("start_sensor_stream_on failed %d", rc);
6201 return rc;
6202 }
6203
6204 return 0;
6205}
6206
6207void QCamera3HardwareInterface::stopChannelLocked(bool stopChannelImmediately)
6208{
6209 mCameraHandle->ops->stop_channel(mCameraHandle->camera_handle,
6210 mChannelHandle, stopChannelImmediately);
6211
6212 {
6213 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
6214 if (EaselManagerClientOpened && mEaselMipiStarted) {
6215 int32_t rc = gEaselManagerClient->stopMipi(mCameraId);
6216 if (rc != 0) {
6217 ALOGE("%s: Stopping MIPI failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
6218 }
6219 mEaselMipiStarted = false;
6220 }
6221 }
6222}
6223
Thierry Strudel3d639192016-09-09 11:52:26 -07006224/*===========================================================================
6225 * FUNCTION : dump
6226 *
6227 * DESCRIPTION:
6228 *
6229 * PARAMETERS :
6230 *
6231 *
6232 * RETURN :
6233 *==========================================================================*/
6234void QCamera3HardwareInterface::dump(int fd)
6235{
6236 pthread_mutex_lock(&mMutex);
6237 dprintf(fd, "\n Camera HAL3 information Begin \n");
6238
6239 dprintf(fd, "\nNumber of pending requests: %zu \n",
6240 mPendingRequestsList.size());
6241 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6242 dprintf(fd, " Frame | Number of Buffers | Req Id: | Blob Req | Input buffer present\n");
6243 dprintf(fd, "-------+-------------------+-------------+----------+---------------------\n");
6244 for(pendingRequestIterator i = mPendingRequestsList.begin();
6245 i != mPendingRequestsList.end(); i++) {
6246 dprintf(fd, " %5d | %17d | %11d | %8d | %p \n",
6247 i->frame_number, i->num_buffers, i->request_id, i->blob_request,
6248 i->input_buffer);
6249 }
6250 dprintf(fd, "\nPending buffer map: Number of buffers: %u\n",
6251 mPendingBuffersMap.get_num_overall_buffers());
6252 dprintf(fd, "-------+------------------\n");
6253 dprintf(fd, " Frame | Stream type mask \n");
6254 dprintf(fd, "-------+------------------\n");
6255 for(auto &req : mPendingBuffersMap.mPendingBuffersInRequest) {
6256 for(auto &j : req.mPendingBufferList) {
6257 QCamera3Channel *channel = (QCamera3Channel *)(j.stream->priv);
6258 dprintf(fd, " %5d | %11d \n",
6259 req.frame_number, channel->getStreamTypeMask());
6260 }
6261 }
6262 dprintf(fd, "-------+------------------\n");
6263
6264 dprintf(fd, "\nPending frame drop list: %zu\n",
6265 mPendingFrameDropList.size());
6266 dprintf(fd, "-------+-----------\n");
6267 dprintf(fd, " Frame | Stream ID \n");
6268 dprintf(fd, "-------+-----------\n");
6269 for(List<PendingFrameDropInfo>::iterator i = mPendingFrameDropList.begin();
6270 i != mPendingFrameDropList.end(); i++) {
6271 dprintf(fd, " %5d | %9d \n",
6272 i->frame_number, i->stream_ID);
6273 }
6274 dprintf(fd, "-------+-----------\n");
6275
6276 dprintf(fd, "\n Camera HAL3 information End \n");
6277
6278 /* use dumpsys media.camera as trigger to send update debug level event */
6279 mUpdateDebugLevel = true;
6280 pthread_mutex_unlock(&mMutex);
6281 return;
6282}
6283
6284/*===========================================================================
6285 * FUNCTION : flush
6286 *
6287 * DESCRIPTION: Calls stopAllChannels, notifyErrorForPendingRequests and
6288 * conditionally restarts channels
6289 *
6290 * PARAMETERS :
6291 * @ restartChannels: re-start all channels
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006292 * @ stopChannelImmediately: stop the channel immediately. This should be used
6293 * when device encountered an error and MIPI may has
6294 * been stopped.
Thierry Strudel3d639192016-09-09 11:52:26 -07006295 *
6296 * RETURN :
6297 * 0 on success
6298 * Error code on failure
6299 *==========================================================================*/
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006300int QCamera3HardwareInterface::flush(bool restartChannels, bool stopChannelImmediately)
Thierry Strudel3d639192016-09-09 11:52:26 -07006301{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006302 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006303 int32_t rc = NO_ERROR;
6304
6305 LOGD("Unblocking Process Capture Request");
6306 pthread_mutex_lock(&mMutex);
6307 mFlush = true;
6308 pthread_mutex_unlock(&mMutex);
6309
Chien-Yu Chen11c8edc2017-09-11 20:54:24 -07006310 // Disable HDR+ if it's enabled;
6311 {
6312 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
6313 finishHdrPlusClientOpeningLocked(l);
6314 disableHdrPlusModeLocked();
6315 }
6316
Thierry Strudel3d639192016-09-09 11:52:26 -07006317 rc = stopAllChannels();
6318 // unlink of dualcam
6319 if (mIsDeviceLinked) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006320 cam_dual_camera_bundle_info_t *m_pRelCamSyncBuf =
6321 &m_pDualCamCmdPtr->bundle_info;
6322 m_pDualCamCmdPtr->cmd_type = CAM_DUAL_CAMERA_BUNDLE_INFO;
Thierry Strudel3d639192016-09-09 11:52:26 -07006323 m_pRelCamSyncBuf->sync_control = CAM_SYNC_RELATED_SENSORS_OFF;
6324 pthread_mutex_lock(&gCamLock);
6325
6326 if (mIsMainCamera == 1) {
6327 m_pRelCamSyncBuf->mode = CAM_MODE_PRIMARY;
6328 m_pRelCamSyncBuf->type = CAM_TYPE_MAIN;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006329 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006330 // related session id should be session id of linked session
6331 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6332 } else {
6333 m_pRelCamSyncBuf->mode = CAM_MODE_SECONDARY;
6334 m_pRelCamSyncBuf->type = CAM_TYPE_AUX;
Thierry Strudel269c81a2016-10-12 12:13:59 -07006335 m_pRelCamSyncBuf->sync_3a_mode = CAM_3A_SYNC_FOLLOW;
Thierry Strudel3d639192016-09-09 11:52:26 -07006336 m_pRelCamSyncBuf->related_sensor_session_id = sessionId[mLinkedCameraId];
6337 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006338 m_pRelCamSyncBuf->is_hw_sync_enabled = DUALCAM_HW_SYNC_ENABLED;
Thierry Strudel3d639192016-09-09 11:52:26 -07006339 pthread_mutex_unlock(&gCamLock);
6340
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006341 rc = mCameraHandle->ops->set_dual_cam_cmd(
6342 mCameraHandle->camera_handle);
Thierry Strudel3d639192016-09-09 11:52:26 -07006343 if (rc < 0) {
6344 LOGE("Dualcam: Unlink failed, but still proceed to close");
6345 }
6346 }
6347
6348 if (rc < 0) {
6349 LOGE("stopAllChannels failed");
6350 return rc;
6351 }
6352 if (mChannelHandle) {
Chien-Yu Chen153c5172017-09-08 11:33:19 -07006353 stopChannelLocked(stopChannelImmediately);
Thierry Strudel3d639192016-09-09 11:52:26 -07006354 }
6355
6356 // Reset bundle info
6357 rc = setBundleInfo();
6358 if (rc < 0) {
6359 LOGE("setBundleInfo failed %d", rc);
6360 return rc;
6361 }
6362
6363 // Mutex Lock
6364 pthread_mutex_lock(&mMutex);
6365
6366 // Unblock process_capture_request
6367 mPendingLiveRequest = 0;
6368 pthread_cond_signal(&mRequestCond);
6369
6370 rc = notifyErrorForPendingRequests();
6371 if (rc < 0) {
6372 LOGE("notifyErrorForPendingRequests failed");
6373 pthread_mutex_unlock(&mMutex);
6374 return rc;
6375 }
6376
6377 mFlush = false;
6378
6379 // Start the Streams/Channels
6380 if (restartChannels) {
6381 rc = startAllChannels();
6382 if (rc < 0) {
6383 LOGE("startAllChannels failed");
6384 pthread_mutex_unlock(&mMutex);
6385 return rc;
6386 }
Thierry Strudel2896d122017-02-23 19:18:03 -08006387 if (mChannelHandle) {
Chien-Yu Chen153c5172017-09-08 11:33:19 -07006388 // Configure modules for stream on.
6389 rc = startChannelLocked();
Thierry Strudel2896d122017-02-23 19:18:03 -08006390 if (rc < 0) {
Chien-Yu Chen153c5172017-09-08 11:33:19 -07006391 LOGE("startChannelLocked failed");
Thierry Strudel2896d122017-02-23 19:18:03 -08006392 pthread_mutex_unlock(&mMutex);
6393 return rc;
6394 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006395 }
6396 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006397 pthread_mutex_unlock(&mMutex);
6398
6399 return 0;
6400}
6401
6402/*===========================================================================
6403 * FUNCTION : flushPerf
6404 *
6405 * DESCRIPTION: This is the performance optimization version of flush that does
6406 * not use stream off, rather flushes the system
6407 *
6408 * PARAMETERS :
6409 *
6410 *
6411 * RETURN : 0 : success
6412 * -EINVAL: input is malformed (device is not valid)
6413 * -ENODEV: if the device has encountered a serious error
6414 *==========================================================================*/
6415int QCamera3HardwareInterface::flushPerf()
6416{
Thierry Strudel9ec39c62016-12-28 11:30:05 -08006417 KPI_ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_STOP_PREVIEW);
Thierry Strudel3d639192016-09-09 11:52:26 -07006418 int32_t rc = 0;
6419 struct timespec timeout;
6420 bool timed_wait = false;
6421
6422 pthread_mutex_lock(&mMutex);
6423 mFlushPerf = true;
6424 mPendingBuffersMap.numPendingBufsAtFlush =
6425 mPendingBuffersMap.get_num_overall_buffers();
6426 LOGD("Calling flush. Wait for %d buffers to return",
6427 mPendingBuffersMap.numPendingBufsAtFlush);
6428
6429 /* send the flush event to the backend */
6430 rc = mCameraHandle->ops->flush(mCameraHandle->camera_handle);
6431 if (rc < 0) {
6432 LOGE("Error in flush: IOCTL failure");
6433 mFlushPerf = false;
6434 pthread_mutex_unlock(&mMutex);
6435 return -ENODEV;
6436 }
6437
6438 if (mPendingBuffersMap.numPendingBufsAtFlush == 0) {
6439 LOGD("No pending buffers in HAL, return flush");
6440 mFlushPerf = false;
6441 pthread_mutex_unlock(&mMutex);
6442 return rc;
6443 }
6444
6445 /* wait on a signal that buffers were received */
Shuzhen Wangfb961e52016-11-28 11:48:02 -08006446 rc = clock_gettime(CLOCK_MONOTONIC, &timeout);
Thierry Strudel3d639192016-09-09 11:52:26 -07006447 if (rc < 0) {
6448 LOGE("Error reading the real time clock, cannot use timed wait");
6449 } else {
6450 timeout.tv_sec += FLUSH_TIMEOUT;
6451 timed_wait = true;
6452 }
6453
6454 //Block on conditional variable
6455 while (mPendingBuffersMap.numPendingBufsAtFlush != 0) {
6456 LOGD("Waiting on mBuffersCond");
6457 if (!timed_wait) {
6458 rc = pthread_cond_wait(&mBuffersCond, &mMutex);
6459 if (rc != 0) {
6460 LOGE("pthread_cond_wait failed due to rc = %s",
6461 strerror(rc));
6462 break;
6463 }
6464 } else {
6465 rc = pthread_cond_timedwait(&mBuffersCond, &mMutex, &timeout);
6466 if (rc != 0) {
6467 LOGE("pthread_cond_timedwait failed due to rc = %s",
6468 strerror(rc));
6469 break;
6470 }
6471 }
6472 }
6473 if (rc != 0) {
6474 mFlushPerf = false;
6475 pthread_mutex_unlock(&mMutex);
6476 return -ENODEV;
6477 }
6478
6479 LOGD("Received buffers, now safe to return them");
6480
6481 //make sure the channels handle flush
6482 //currently only required for the picture channel to release snapshot resources
6483 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6484 it != mStreamInfo.end(); it++) {
6485 QCamera3Channel *channel = (*it)->channel;
6486 if (channel) {
6487 rc = channel->flush();
6488 if (rc) {
6489 LOGE("Flushing the channels failed with error %d", rc);
6490 // even though the channel flush failed we need to continue and
6491 // return the buffers we have to the framework, however the return
6492 // value will be an error
6493 rc = -ENODEV;
6494 }
6495 }
6496 }
6497
6498 /* notify the frameworks and send errored results */
6499 rc = notifyErrorForPendingRequests();
6500 if (rc < 0) {
6501 LOGE("notifyErrorForPendingRequests failed");
6502 pthread_mutex_unlock(&mMutex);
6503 return rc;
6504 }
6505
6506 //unblock process_capture_request
6507 mPendingLiveRequest = 0;
6508 unblockRequestIfNecessary();
6509
6510 mFlushPerf = false;
6511 pthread_mutex_unlock(&mMutex);
6512 LOGD ("Flush Operation complete. rc = %d", rc);
6513 return rc;
6514}
6515
6516/*===========================================================================
6517 * FUNCTION : handleCameraDeviceError
6518 *
6519 * DESCRIPTION: This function calls internal flush and notifies the error to
6520 * framework and updates the state variable.
6521 *
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006522 * PARAMETERS :
6523 * @stopChannelImmediately : stop channels immediately without waiting for
6524 * frame boundary.
Thierry Strudel3d639192016-09-09 11:52:26 -07006525 *
6526 * RETURN : NO_ERROR on Success
6527 * Error code on failure
6528 *==========================================================================*/
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006529int32_t QCamera3HardwareInterface::handleCameraDeviceError(bool stopChannelImmediately)
Thierry Strudel3d639192016-09-09 11:52:26 -07006530{
6531 int32_t rc = NO_ERROR;
6532
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006533 {
6534 Mutex::Autolock lock(mFlushLock);
6535 pthread_mutex_lock(&mMutex);
6536 if (mState != ERROR) {
6537 //if mState != ERROR, nothing to be done
6538 pthread_mutex_unlock(&mMutex);
6539 return NO_ERROR;
6540 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006541 pthread_mutex_unlock(&mMutex);
Thierry Strudel3d639192016-09-09 11:52:26 -07006542
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -07006543 rc = flush(false /* restart channels */, stopChannelImmediately);
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006544 if (NO_ERROR != rc) {
6545 LOGE("internal flush to handle mState = ERROR failed");
6546 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006547
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006548 pthread_mutex_lock(&mMutex);
6549 mState = DEINIT;
6550 pthread_mutex_unlock(&mMutex);
6551 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006552
6553 camera3_notify_msg_t notify_msg;
6554 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
6555 notify_msg.type = CAMERA3_MSG_ERROR;
6556 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_DEVICE;
6557 notify_msg.message.error.error_stream = NULL;
6558 notify_msg.message.error.frame_number = 0;
Thierry Strudele80ad7c2016-12-06 10:16:27 -08006559 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -07006560
6561 return rc;
6562}
6563
6564/*===========================================================================
6565 * FUNCTION : captureResultCb
6566 *
6567 * DESCRIPTION: Callback handler for all capture result
6568 * (streams, as well as metadata)
6569 *
6570 * PARAMETERS :
6571 * @metadata : metadata information
6572 * @buffer : actual gralloc buffer to be returned to frameworks.
6573 * NULL if metadata.
6574 *
6575 * RETURN : NONE
6576 *==========================================================================*/
6577void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata_buf,
6578 camera3_stream_buffer_t *buffer, uint32_t frame_number, bool isInputBuffer)
6579{
6580 if (metadata_buf) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -07006581 pthread_mutex_lock(&mMutex);
6582 uint8_t batchSize = mBatchSize;
6583 pthread_mutex_unlock(&mMutex);
6584 if (batchSize) {
Thierry Strudel3d639192016-09-09 11:52:26 -07006585 handleBatchMetadata(metadata_buf,
6586 true /* free_and_bufdone_meta_buf */);
6587 } else { /* mBatchSize = 0 */
6588 hdrPlusPerfLock(metadata_buf);
6589 pthread_mutex_lock(&mMutex);
6590 handleMetadataWithLock(metadata_buf,
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006591 true /* free_and_bufdone_meta_buf */,
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006592 true /* last urgent frame of batch metadata */,
6593 true /* last frame of batch metadata */,
Thierry Strudel54dc9782017-02-15 12:12:10 -08006594 NULL);
Thierry Strudel3d639192016-09-09 11:52:26 -07006595 pthread_mutex_unlock(&mMutex);
6596 }
6597 } else if (isInputBuffer) {
6598 pthread_mutex_lock(&mMutex);
6599 handleInputBufferWithLock(frame_number);
6600 pthread_mutex_unlock(&mMutex);
6601 } else {
6602 pthread_mutex_lock(&mMutex);
6603 handleBufferWithLock(buffer, frame_number);
6604 pthread_mutex_unlock(&mMutex);
6605 }
6606 return;
6607}
6608
6609/*===========================================================================
6610 * FUNCTION : getReprocessibleOutputStreamId
6611 *
6612 * DESCRIPTION: Get source output stream id for the input reprocess stream
6613 * based on size and format, which would be the largest
6614 * output stream if an input stream exists.
6615 *
6616 * PARAMETERS :
6617 * @id : return the stream id if found
6618 *
6619 * RETURN : int32_t type of status
6620 * NO_ERROR -- success
6621 * none-zero failure code
6622 *==========================================================================*/
6623int32_t QCamera3HardwareInterface::getReprocessibleOutputStreamId(uint32_t &id)
6624{
6625 /* check if any output or bidirectional stream with the same size and format
6626 and return that stream */
6627 if ((mInputStreamInfo.dim.width > 0) &&
6628 (mInputStreamInfo.dim.height > 0)) {
6629 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
6630 it != mStreamInfo.end(); it++) {
6631
6632 camera3_stream_t *stream = (*it)->stream;
6633 if ((stream->width == (uint32_t)mInputStreamInfo.dim.width) &&
6634 (stream->height == (uint32_t)mInputStreamInfo.dim.height) &&
6635 (stream->format == mInputStreamInfo.format)) {
6636 // Usage flag for an input stream and the source output stream
6637 // may be different.
6638 LOGD("Found reprocessible output stream! %p", *it);
6639 LOGD("input stream usage 0x%x, current stream usage 0x%x",
6640 stream->usage, mInputStreamInfo.usage);
6641
6642 QCamera3Channel *channel = (QCamera3Channel *)stream->priv;
6643 if (channel != NULL && channel->mStreams[0]) {
6644 id = channel->mStreams[0]->getMyServerID();
6645 return NO_ERROR;
6646 }
6647 }
6648 }
6649 } else {
6650 LOGD("No input stream, so no reprocessible output stream");
6651 }
6652 return NAME_NOT_FOUND;
6653}
6654
6655/*===========================================================================
6656 * FUNCTION : lookupFwkName
6657 *
6658 * DESCRIPTION: In case the enum is not same in fwk and backend
6659 * make sure the parameter is correctly propogated
6660 *
6661 * PARAMETERS :
6662 * @arr : map between the two enums
6663 * @len : len of the map
6664 * @hal_name : name of the hal_parm to map
6665 *
6666 * RETURN : int type of status
6667 * fwk_name -- success
6668 * none-zero failure code
6669 *==========================================================================*/
6670template <typename halType, class mapType> int lookupFwkName(const mapType *arr,
6671 size_t len, halType hal_name)
6672{
6673
6674 for (size_t i = 0; i < len; i++) {
6675 if (arr[i].hal_name == hal_name) {
6676 return arr[i].fwk_name;
6677 }
6678 }
6679
6680 /* Not able to find matching framework type is not necessarily
6681 * an error case. This happens when mm-camera supports more attributes
6682 * than the frameworks do */
6683 LOGH("Cannot find matching framework type");
6684 return NAME_NOT_FOUND;
6685}
6686
6687/*===========================================================================
6688 * FUNCTION : lookupHalName
6689 *
6690 * DESCRIPTION: In case the enum is not same in fwk and backend
6691 * make sure the parameter is correctly propogated
6692 *
6693 * PARAMETERS :
6694 * @arr : map between the two enums
6695 * @len : len of the map
6696 * @fwk_name : name of the hal_parm to map
6697 *
6698 * RETURN : int32_t type of status
6699 * hal_name -- success
6700 * none-zero failure code
6701 *==========================================================================*/
6702template <typename fwkType, class mapType> int lookupHalName(const mapType *arr,
6703 size_t len, fwkType fwk_name)
6704{
6705 for (size_t i = 0; i < len; i++) {
6706 if (arr[i].fwk_name == fwk_name) {
6707 return arr[i].hal_name;
6708 }
6709 }
6710
6711 LOGE("Cannot find matching hal type fwk_name=%d", fwk_name);
6712 return NAME_NOT_FOUND;
6713}
6714
6715/*===========================================================================
6716 * FUNCTION : lookupProp
6717 *
6718 * DESCRIPTION: lookup a value by its name
6719 *
6720 * PARAMETERS :
6721 * @arr : map between the two enums
6722 * @len : size of the map
6723 * @name : name to be looked up
6724 *
6725 * RETURN : Value if found
6726 * CAM_CDS_MODE_MAX if not found
6727 *==========================================================================*/
6728template <class mapType> cam_cds_mode_type_t lookupProp(const mapType *arr,
6729 size_t len, const char *name)
6730{
6731 if (name) {
6732 for (size_t i = 0; i < len; i++) {
6733 if (!strcmp(arr[i].desc, name)) {
6734 return arr[i].val;
6735 }
6736 }
6737 }
6738 return CAM_CDS_MODE_MAX;
6739}
6740
6741/*===========================================================================
6742 *
6743 * DESCRIPTION:
6744 *
6745 * PARAMETERS :
6746 * @metadata : metadata information from callback
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006747 * @pendingRequest: pending request for this metadata
Thierry Strudel3d639192016-09-09 11:52:26 -07006748 * @pprocDone: whether internal offline postprocsesing is done
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006749 * @lastMetadataInBatch: Boolean to indicate whether this is the last metadata
6750 * in a batch. Always true for non-batch mode.
Thierry Strudel3d639192016-09-09 11:52:26 -07006751 *
6752 * RETURN : camera_metadata_t*
6753 * metadata in a format specified by fwk
6754 *==========================================================================*/
6755camera_metadata_t*
6756QCamera3HardwareInterface::translateFromHalMetadata(
6757 metadata_buffer_t *metadata,
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006758 const PendingRequestInfo& pendingRequest,
Thierry Strudel3d639192016-09-09 11:52:26 -07006759 bool pprocDone,
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07006760 bool lastMetadataInBatch,
6761 const bool *enableZsl)
Thierry Strudel3d639192016-09-09 11:52:26 -07006762{
6763 CameraMetadata camMetadata;
6764 camera_metadata_t *resultMetadata;
6765
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006766 if (!lastMetadataInBatch) {
Shuzhen Wang8fe62792017-03-20 16:10:25 -07006767 /* In batch mode, only populate SENSOR_TIMESTAMP if this is not the last in batch.
6768 * Timestamp is needed because it's used for shutter notify calculation.
6769 * */
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006770 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &pendingRequest.timestamp, 1);
Shuzhen Wang8fe62792017-03-20 16:10:25 -07006771 resultMetadata = camMetadata.release();
Shuzhen Wang94ddf072017-03-12 19:47:23 -07006772 return resultMetadata;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07006773 }
6774
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006775 if (pendingRequest.jpegMetadata.entryCount())
6776 camMetadata.append(pendingRequest.jpegMetadata);
Thierry Strudel3d639192016-09-09 11:52:26 -07006777
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006778 camMetadata.update(ANDROID_SENSOR_TIMESTAMP, &pendingRequest.timestamp, 1);
6779 camMetadata.update(ANDROID_REQUEST_ID, &pendingRequest.request_id, 1);
6780 camMetadata.update(ANDROID_REQUEST_PIPELINE_DEPTH, &pendingRequest.pipeline_depth, 1);
6781 camMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &pendingRequest.capture_intent, 1);
6782 camMetadata.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &pendingRequest.hybrid_ae_enable, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08006783 if (mBatchSize == 0) {
6784 // DevCamDebug metadata translateFromHalMetadata. Only update this one for non-HFR mode
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006785 camMetadata.update(DEVCAMDEBUG_META_ENABLE, &pendingRequest.DevCamDebug_meta_enable, 1);
Samuel Ha68ba5172016-12-15 18:41:12 -08006786 }
Thierry Strudel3d639192016-09-09 11:52:26 -07006787
Samuel Ha68ba5172016-12-15 18:41:12 -08006788 // atrace_begin(ATRACE_TAG_ALWAYS, "DevCamDebugInfo");
6789 // Only update DevCameraDebug metadta conditionally: non-HFR mode and it is enabled.
Shuzhen Wang181c57b2017-07-21 11:39:44 -07006790 if (mBatchSize == 0 && pendingRequest.DevCamDebug_meta_enable != 0) {
Samuel Ha68ba5172016-12-15 18:41:12 -08006791 // DevCamDebug metadata translateFromHalMetadata AF
6792 IF_META_AVAILABLE(int32_t, DevCamDebug_af_lens_position,
6793 CAM_INTF_META_DEV_CAM_AF_LENS_POSITION, metadata) {
6794 int32_t fwk_DevCamDebug_af_lens_position = *DevCamDebug_af_lens_position;
6795 camMetadata.update(DEVCAMDEBUG_AF_LENS_POSITION, &fwk_DevCamDebug_af_lens_position, 1);
6796 }
6797 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_confidence,
Shuzhen Wang3569d4a2017-09-04 19:10:28 -07006798 CAM_INTF_META_AF_TOF_CONFIDENCE, metadata) {
Samuel Ha68ba5172016-12-15 18:41:12 -08006799 int32_t fwk_DevCamDebug_af_tof_confidence = *DevCamDebug_af_tof_confidence;
6800 camMetadata.update(DEVCAMDEBUG_AF_TOF_CONFIDENCE, &fwk_DevCamDebug_af_tof_confidence, 1);
6801 }
6802 IF_META_AVAILABLE(int32_t, DevCamDebug_af_tof_distance,
Shuzhen Wang3569d4a2017-09-04 19:10:28 -07006803 CAM_INTF_META_AF_TOF_DISTANCE, metadata) {
Samuel Ha68ba5172016-12-15 18:41:12 -08006804 int32_t fwk_DevCamDebug_af_tof_distance = *DevCamDebug_af_tof_distance;
6805 camMetadata.update(DEVCAMDEBUG_AF_TOF_DISTANCE, &fwk_DevCamDebug_af_tof_distance, 1);
6806 }
6807 IF_META_AVAILABLE(int32_t, DevCamDebug_af_luma,
6808 CAM_INTF_META_DEV_CAM_AF_LUMA, metadata) {
6809 int32_t fwk_DevCamDebug_af_luma = *DevCamDebug_af_luma;
6810 camMetadata.update(DEVCAMDEBUG_AF_LUMA, &fwk_DevCamDebug_af_luma, 1);
6811 }
6812 IF_META_AVAILABLE(int32_t, DevCamDebug_af_haf_state,
6813 CAM_INTF_META_DEV_CAM_AF_HAF_STATE, metadata) {
6814 int32_t fwk_DevCamDebug_af_haf_state = *DevCamDebug_af_haf_state;
6815 camMetadata.update(DEVCAMDEBUG_AF_HAF_STATE, &fwk_DevCamDebug_af_haf_state, 1);
6816 }
6817 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_target_pos,
6818 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_TARGET_POS, metadata) {
6819 int32_t fwk_DevCamDebug_af_monitor_pdaf_target_pos =
6820 *DevCamDebug_af_monitor_pdaf_target_pos;
6821 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
6822 &fwk_DevCamDebug_af_monitor_pdaf_target_pos, 1);
6823 }
6824 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_confidence,
6825 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_CONFIDENCE, metadata) {
6826 int32_t fwk_DevCamDebug_af_monitor_pdaf_confidence =
6827 *DevCamDebug_af_monitor_pdaf_confidence;
6828 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
6829 &fwk_DevCamDebug_af_monitor_pdaf_confidence, 1);
6830 }
6831 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_pdaf_refocus,
6832 CAM_INTF_META_DEV_CAM_AF_MONITOR_PDAF_REFOCUS, metadata) {
6833 int32_t fwk_DevCamDebug_af_monitor_pdaf_refocus = *DevCamDebug_af_monitor_pdaf_refocus;
6834 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
6835 &fwk_DevCamDebug_af_monitor_pdaf_refocus, 1);
6836 }
6837 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_target_pos,
6838 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_TARGET_POS, metadata) {
6839 int32_t fwk_DevCamDebug_af_monitor_tof_target_pos =
6840 *DevCamDebug_af_monitor_tof_target_pos;
6841 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
6842 &fwk_DevCamDebug_af_monitor_tof_target_pos, 1);
6843 }
6844 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_confidence,
6845 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_CONFIDENCE, metadata) {
6846 int32_t fwk_DevCamDebug_af_monitor_tof_confidence =
6847 *DevCamDebug_af_monitor_tof_confidence;
6848 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
6849 &fwk_DevCamDebug_af_monitor_tof_confidence, 1);
6850 }
6851 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_tof_refocus,
6852 CAM_INTF_META_DEV_CAM_AF_MONITOR_TOF_REFOCUS, metadata) {
6853 int32_t fwk_DevCamDebug_af_monitor_tof_refocus = *DevCamDebug_af_monitor_tof_refocus;
6854 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
6855 &fwk_DevCamDebug_af_monitor_tof_refocus, 1);
6856 }
6857 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_type_select,
6858 CAM_INTF_META_DEV_CAM_AF_MONITOR_TYPE_SELECT, metadata) {
6859 int32_t fwk_DevCamDebug_af_monitor_type_select = *DevCamDebug_af_monitor_type_select;
6860 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
6861 &fwk_DevCamDebug_af_monitor_type_select, 1);
6862 }
6863 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_refocus,
6864 CAM_INTF_META_DEV_CAM_AF_MONITOR_REFOCUS, metadata) {
6865 int32_t fwk_DevCamDebug_af_monitor_refocus = *DevCamDebug_af_monitor_refocus;
6866 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_REFOCUS,
6867 &fwk_DevCamDebug_af_monitor_refocus, 1);
6868 }
6869 IF_META_AVAILABLE(int32_t, DevCamDebug_af_monitor_target_pos,
6870 CAM_INTF_META_DEV_CAM_AF_MONITOR_TARGET_POS, metadata) {
6871 int32_t fwk_DevCamDebug_af_monitor_target_pos = *DevCamDebug_af_monitor_target_pos;
6872 camMetadata.update(DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
6873 &fwk_DevCamDebug_af_monitor_target_pos, 1);
6874 }
6875 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_target_pos,
6876 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_TARGET_POS, metadata) {
6877 int32_t fwk_DevCamDebug_af_search_pdaf_target_pos =
6878 *DevCamDebug_af_search_pdaf_target_pos;
6879 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
6880 &fwk_DevCamDebug_af_search_pdaf_target_pos, 1);
6881 }
6882 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_next_pos,
6883 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEXT_POS, metadata) {
6884 int32_t fwk_DevCamDebug_af_search_pdaf_next_pos = *DevCamDebug_af_search_pdaf_next_pos;
6885 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
6886 &fwk_DevCamDebug_af_search_pdaf_next_pos, 1);
6887 }
6888 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_near_pos,
6889 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_NEAR_POS, metadata) {
6890 int32_t fwk_DevCamDebug_af_search_pdaf_near_pos = *DevCamDebug_af_search_pdaf_near_pos;
6891 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
6892 &fwk_DevCamDebug_af_search_pdaf_near_pos, 1);
6893 }
6894 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_far_pos,
6895 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_FAR_POS, metadata) {
6896 int32_t fwk_DevCamDebug_af_search_pdaf_far_pos = *DevCamDebug_af_search_pdaf_far_pos;
6897 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
6898 &fwk_DevCamDebug_af_search_pdaf_far_pos, 1);
6899 }
6900 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_pdaf_confidence,
6901 CAM_INTF_META_DEV_CAM_AF_SEARCH_PDAF_CONFIDENCE, metadata) {
6902 int32_t fwk_DevCamDebug_af_search_pdaf_confidence = *DevCamDebug_af_search_pdaf_confidence;
6903 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
6904 &fwk_DevCamDebug_af_search_pdaf_confidence, 1);
6905 }
6906 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_target_pos,
6907 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_TARGET_POS, metadata) {
6908 int32_t fwk_DevCamDebug_af_search_tof_target_pos =
6909 *DevCamDebug_af_search_tof_target_pos;
6910 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
6911 &fwk_DevCamDebug_af_search_tof_target_pos, 1);
6912 }
6913 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_next_pos,
6914 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEXT_POS, metadata) {
6915 int32_t fwk_DevCamDebug_af_search_tof_next_pos = *DevCamDebug_af_search_tof_next_pos;
6916 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
6917 &fwk_DevCamDebug_af_search_tof_next_pos, 1);
6918 }
6919 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_near_pos,
6920 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_NEAR_POS, metadata) {
6921 int32_t fwk_DevCamDebug_af_search_tof_near_pos = *DevCamDebug_af_search_tof_near_pos;
6922 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
6923 &fwk_DevCamDebug_af_search_tof_near_pos, 1);
6924 }
6925 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_far_pos,
6926 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_FAR_POS, metadata) {
6927 int32_t fwk_DevCamDebug_af_search_tof_far_pos = *DevCamDebug_af_search_tof_far_pos;
6928 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
6929 &fwk_DevCamDebug_af_search_tof_far_pos, 1);
6930 }
6931 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_tof_confidence,
6932 CAM_INTF_META_DEV_CAM_AF_SEARCH_TOF_CONFIDENCE, metadata) {
6933 int32_t fwk_DevCamDebug_af_search_tof_confidence = *DevCamDebug_af_search_tof_confidence;
6934 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
6935 &fwk_DevCamDebug_af_search_tof_confidence, 1);
6936 }
6937 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_type_select,
6938 CAM_INTF_META_DEV_CAM_AF_SEARCH_TYPE_SELECT, metadata) {
6939 int32_t fwk_DevCamDebug_af_search_type_select = *DevCamDebug_af_search_type_select;
6940 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
6941 &fwk_DevCamDebug_af_search_type_select, 1);
6942 }
6943 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_next_pos,
6944 CAM_INTF_META_DEV_CAM_AF_SEARCH_NEXT_POS, metadata) {
6945 int32_t fwk_DevCamDebug_af_search_next_pos = *DevCamDebug_af_search_next_pos;
6946 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
6947 &fwk_DevCamDebug_af_search_next_pos, 1);
6948 }
6949 IF_META_AVAILABLE(int32_t, DevCamDebug_af_search_target_pos,
6950 CAM_INTF_META_DEV_CAM_AF_SEARCH_TARGET_POS, metadata) {
6951 int32_t fwk_DevCamDebug_af_search_target_pos = *DevCamDebug_af_search_target_pos;
6952 camMetadata.update(DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
6953 &fwk_DevCamDebug_af_search_target_pos, 1);
6954 }
6955 // DevCamDebug metadata translateFromHalMetadata AEC
6956 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_target_luma,
6957 CAM_INTF_META_DEV_CAM_AEC_TARGET_LUMA, metadata) {
6958 int32_t fwk_DevCamDebug_aec_target_luma = *DevCamDebug_aec_target_luma;
6959 camMetadata.update(DEVCAMDEBUG_AEC_TARGET_LUMA, &fwk_DevCamDebug_aec_target_luma, 1);
6960 }
6961 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_comp_luma,
6962 CAM_INTF_META_DEV_CAM_AEC_COMP_LUMA, metadata) {
6963 int32_t fwk_DevCamDebug_aec_comp_luma = *DevCamDebug_aec_comp_luma;
6964 camMetadata.update(DEVCAMDEBUG_AEC_COMP_LUMA, &fwk_DevCamDebug_aec_comp_luma, 1);
6965 }
6966 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_avg_luma,
6967 CAM_INTF_META_DEV_CAM_AEC_AVG_LUMA, metadata) {
6968 int32_t fwk_DevCamDebug_aec_avg_luma = *DevCamDebug_aec_avg_luma;
6969 camMetadata.update(DEVCAMDEBUG_AEC_AVG_LUMA, &fwk_DevCamDebug_aec_avg_luma, 1);
6970 }
6971 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_cur_luma,
6972 CAM_INTF_META_DEV_CAM_AEC_CUR_LUMA, metadata) {
6973 int32_t fwk_DevCamDebug_aec_cur_luma = *DevCamDebug_aec_cur_luma;
6974 camMetadata.update(DEVCAMDEBUG_AEC_CUR_LUMA, &fwk_DevCamDebug_aec_cur_luma, 1);
6975 }
6976 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_linecount,
6977 CAM_INTF_META_DEV_CAM_AEC_LINECOUNT, metadata) {
6978 int32_t fwk_DevCamDebug_aec_linecount = *DevCamDebug_aec_linecount;
6979 camMetadata.update(DEVCAMDEBUG_AEC_LINECOUNT, &fwk_DevCamDebug_aec_linecount, 1);
6980 }
6981 IF_META_AVAILABLE(float, DevCamDebug_aec_real_gain,
6982 CAM_INTF_META_DEV_CAM_AEC_REAL_GAIN, metadata) {
6983 float fwk_DevCamDebug_aec_real_gain = *DevCamDebug_aec_real_gain;
6984 camMetadata.update(DEVCAMDEBUG_AEC_REAL_GAIN, &fwk_DevCamDebug_aec_real_gain, 1);
6985 }
6986 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_exp_index,
6987 CAM_INTF_META_DEV_CAM_AEC_EXP_INDEX, metadata) {
6988 int32_t fwk_DevCamDebug_aec_exp_index = *DevCamDebug_aec_exp_index;
6989 camMetadata.update(DEVCAMDEBUG_AEC_EXP_INDEX, &fwk_DevCamDebug_aec_exp_index, 1);
6990 }
6991 IF_META_AVAILABLE(float, DevCamDebug_aec_lux_idx,
6992 CAM_INTF_META_DEV_CAM_AEC_LUX_IDX, metadata) {
6993 float fwk_DevCamDebug_aec_lux_idx = *DevCamDebug_aec_lux_idx;
6994 camMetadata.update(DEVCAMDEBUG_AEC_LUX_IDX, &fwk_DevCamDebug_aec_lux_idx, 1);
6995 }
Samuel Ha34229982017-02-17 13:51:11 -08006996 // DevCamDebug metadata translateFromHalMetadata zzHDR
6997 IF_META_AVAILABLE(float, DevCamDebug_aec_l_real_gain,
6998 CAM_INTF_META_DEV_CAM_AEC_L_REAL_GAIN, metadata) {
6999 float fwk_DevCamDebug_aec_l_real_gain = *DevCamDebug_aec_l_real_gain;
7000 camMetadata.update(DEVCAMDEBUG_AEC_L_REAL_GAIN, &fwk_DevCamDebug_aec_l_real_gain, 1);
7001 }
7002 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_l_linecount,
7003 CAM_INTF_META_DEV_CAM_AEC_L_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07007004 int32_t fwk_DevCamDebug_aec_l_linecount = *DevCamDebug_aec_l_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08007005 camMetadata.update(DEVCAMDEBUG_AEC_L_LINECOUNT, &fwk_DevCamDebug_aec_l_linecount, 1);
7006 }
7007 IF_META_AVAILABLE(float, DevCamDebug_aec_s_real_gain,
7008 CAM_INTF_META_DEV_CAM_AEC_S_REAL_GAIN, metadata) {
7009 float fwk_DevCamDebug_aec_s_real_gain = *DevCamDebug_aec_s_real_gain;
7010 camMetadata.update(DEVCAMDEBUG_AEC_S_REAL_GAIN, &fwk_DevCamDebug_aec_s_real_gain, 1);
7011 }
7012 IF_META_AVAILABLE(int32_t, DevCamDebug_aec_s_linecount,
7013 CAM_INTF_META_DEV_CAM_AEC_S_LINECOUNT, metadata) {
Samuel Hae80fbd52017-03-29 16:14:46 -07007014 int32_t fwk_DevCamDebug_aec_s_linecount = *DevCamDebug_aec_s_linecount;
Samuel Ha34229982017-02-17 13:51:11 -08007015 camMetadata.update(DEVCAMDEBUG_AEC_S_LINECOUNT, &fwk_DevCamDebug_aec_s_linecount, 1);
7016 }
7017 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_sensitivity_ratio,
7018 CAM_INTF_META_DEV_CAM_AEC_HDR_SENSITIVITY_RATIO, metadata) {
7019 float fwk_DevCamDebug_aec_hdr_sensitivity_ratio =
7020 *DevCamDebug_aec_hdr_sensitivity_ratio;
7021 camMetadata.update(DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
7022 &fwk_DevCamDebug_aec_hdr_sensitivity_ratio, 1);
7023 }
7024 IF_META_AVAILABLE(float, DevCamDebug_aec_hdr_exp_time_ratio,
7025 CAM_INTF_META_DEV_CAM_AEC_HDR_EXP_TIME_RATIO, metadata) {
7026 float fwk_DevCamDebug_aec_hdr_exp_time_ratio = *DevCamDebug_aec_hdr_exp_time_ratio;
7027 camMetadata.update(DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
7028 &fwk_DevCamDebug_aec_hdr_exp_time_ratio, 1);
7029 }
7030 // DevCamDebug metadata translateFromHalMetadata ADRC
7031 IF_META_AVAILABLE(float, DevCamDebug_aec_total_drc_gain,
7032 CAM_INTF_META_DEV_CAM_AEC_TOTAL_DRC_GAIN, metadata) {
7033 float fwk_DevCamDebug_aec_total_drc_gain = *DevCamDebug_aec_total_drc_gain;
7034 camMetadata.update(DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
7035 &fwk_DevCamDebug_aec_total_drc_gain, 1);
7036 }
7037 IF_META_AVAILABLE(float, DevCamDebug_aec_color_drc_gain,
7038 CAM_INTF_META_DEV_CAM_AEC_COLOR_DRC_GAIN, metadata) {
7039 float fwk_DevCamDebug_aec_color_drc_gain = *DevCamDebug_aec_color_drc_gain;
7040 camMetadata.update(DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
7041 &fwk_DevCamDebug_aec_color_drc_gain, 1);
7042 }
7043 IF_META_AVAILABLE(float, DevCamDebug_aec_gtm_ratio,
7044 CAM_INTF_META_DEV_CAM_AEC_GTM_RATIO, metadata) {
7045 float fwk_DevCamDebug_aec_gtm_ratio = *DevCamDebug_aec_gtm_ratio;
7046 camMetadata.update(DEVCAMDEBUG_AEC_GTM_RATIO, &fwk_DevCamDebug_aec_gtm_ratio, 1);
7047 }
7048 IF_META_AVAILABLE(float, DevCamDebug_aec_ltm_ratio,
7049 CAM_INTF_META_DEV_CAM_AEC_LTM_RATIO, metadata) {
7050 float fwk_DevCamDebug_aec_ltm_ratio = *DevCamDebug_aec_ltm_ratio;
7051 camMetadata.update(DEVCAMDEBUG_AEC_LTM_RATIO, &fwk_DevCamDebug_aec_ltm_ratio, 1);
7052 }
7053 IF_META_AVAILABLE(float, DevCamDebug_aec_la_ratio,
7054 CAM_INTF_META_DEV_CAM_AEC_LA_RATIO, metadata) {
7055 float fwk_DevCamDebug_aec_la_ratio = *DevCamDebug_aec_la_ratio;
7056 camMetadata.update(DEVCAMDEBUG_AEC_LA_RATIO, &fwk_DevCamDebug_aec_la_ratio, 1);
7057 }
7058 IF_META_AVAILABLE(float, DevCamDebug_aec_gamma_ratio,
7059 CAM_INTF_META_DEV_CAM_AEC_GAMMA_RATIO, metadata) {
7060 float fwk_DevCamDebug_aec_gamma_ratio = *DevCamDebug_aec_gamma_ratio;
7061 camMetadata.update(DEVCAMDEBUG_AEC_GAMMA_RATIO, &fwk_DevCamDebug_aec_gamma_ratio, 1);
7062 }
Samuel Habdf4fac2017-07-28 17:21:18 -07007063 // DevCamDebug metadata translateFromHalMetadata AEC MOTION
7064 IF_META_AVAILABLE(float, DevCamDebug_aec_camera_motion_dx,
7065 CAM_INTF_META_DEV_CAM_AEC_CAMERA_MOTION_DX, metadata) {
7066 float fwk_DevCamDebug_aec_camera_motion_dx = *DevCamDebug_aec_camera_motion_dx;
7067 camMetadata.update(DEVCAMDEBUG_AEC_CAMERA_MOTION_DX,
7068 &fwk_DevCamDebug_aec_camera_motion_dx, 1);
7069 }
7070 IF_META_AVAILABLE(float, DevCamDebug_aec_camera_motion_dy,
7071 CAM_INTF_META_DEV_CAM_AEC_CAMERA_MOTION_DY, metadata) {
7072 float fwk_DevCamDebug_aec_camera_motion_dy = *DevCamDebug_aec_camera_motion_dy;
7073 camMetadata.update(DEVCAMDEBUG_AEC_CAMERA_MOTION_DY,
7074 &fwk_DevCamDebug_aec_camera_motion_dy, 1);
7075 }
7076 IF_META_AVAILABLE(float, DevCamDebug_aec_subject_motion,
7077 CAM_INTF_META_DEV_CAM_AEC_SUBJECT_MOTION, metadata) {
7078 float fwk_DevCamDebug_aec_subject_motion = *DevCamDebug_aec_subject_motion;
7079 camMetadata.update(DEVCAMDEBUG_AEC_SUBJECT_MOTION,
7080 &fwk_DevCamDebug_aec_subject_motion, 1);
7081 }
Samuel Ha68ba5172016-12-15 18:41:12 -08007082 // DevCamDebug metadata translateFromHalMetadata AWB
7083 IF_META_AVAILABLE(float, DevCamDebug_awb_r_gain,
7084 CAM_INTF_META_DEV_CAM_AWB_R_GAIN, metadata) {
7085 float fwk_DevCamDebug_awb_r_gain = *DevCamDebug_awb_r_gain;
7086 camMetadata.update(DEVCAMDEBUG_AWB_R_GAIN, &fwk_DevCamDebug_awb_r_gain, 1);
7087 }
7088 IF_META_AVAILABLE(float, DevCamDebug_awb_g_gain,
7089 CAM_INTF_META_DEV_CAM_AWB_G_GAIN, metadata) {
7090 float fwk_DevCamDebug_awb_g_gain = *DevCamDebug_awb_g_gain;
7091 camMetadata.update(DEVCAMDEBUG_AWB_G_GAIN, &fwk_DevCamDebug_awb_g_gain, 1);
7092 }
7093 IF_META_AVAILABLE(float, DevCamDebug_awb_b_gain,
7094 CAM_INTF_META_DEV_CAM_AWB_B_GAIN, metadata) {
7095 float fwk_DevCamDebug_awb_b_gain = *DevCamDebug_awb_b_gain;
7096 camMetadata.update(DEVCAMDEBUG_AWB_B_GAIN, &fwk_DevCamDebug_awb_b_gain, 1);
7097 }
7098 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_cct,
7099 CAM_INTF_META_DEV_CAM_AWB_CCT, metadata) {
7100 int32_t fwk_DevCamDebug_awb_cct = *DevCamDebug_awb_cct;
7101 camMetadata.update(DEVCAMDEBUG_AWB_CCT, &fwk_DevCamDebug_awb_cct, 1);
7102 }
7103 IF_META_AVAILABLE(int32_t, DevCamDebug_awb_decision,
7104 CAM_INTF_META_DEV_CAM_AWB_DECISION, metadata) {
7105 int32_t fwk_DevCamDebug_awb_decision = *DevCamDebug_awb_decision;
7106 camMetadata.update(DEVCAMDEBUG_AWB_DECISION, &fwk_DevCamDebug_awb_decision, 1);
7107 }
7108 }
7109 // atrace_end(ATRACE_TAG_ALWAYS);
7110
Thierry Strudel3d639192016-09-09 11:52:26 -07007111 IF_META_AVAILABLE(uint32_t, frame_number, CAM_INTF_META_FRAME_NUMBER, metadata) {
7112 int64_t fwk_frame_number = *frame_number;
7113 camMetadata.update(ANDROID_SYNC_FRAME_NUMBER, &fwk_frame_number, 1);
7114 }
7115
7116 IF_META_AVAILABLE(cam_fps_range_t, float_range, CAM_INTF_PARM_FPS_RANGE, metadata) {
7117 int32_t fps_range[2];
7118 fps_range[0] = (int32_t)float_range->min_fps;
7119 fps_range[1] = (int32_t)float_range->max_fps;
7120 camMetadata.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
7121 fps_range, 2);
7122 LOGD("urgent Metadata : ANDROID_CONTROL_AE_TARGET_FPS_RANGE [%d, %d]",
7123 fps_range[0], fps_range[1]);
7124 }
7125
7126 IF_META_AVAILABLE(int32_t, expCompensation, CAM_INTF_PARM_EXPOSURE_COMPENSATION, metadata) {
7127 camMetadata.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION, expCompensation, 1);
7128 }
7129
7130 IF_META_AVAILABLE(uint32_t, sceneMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7131 int val = (uint8_t)lookupFwkName(SCENE_MODES_MAP,
7132 METADATA_MAP_SIZE(SCENE_MODES_MAP),
7133 *sceneMode);
7134 if (NAME_NOT_FOUND != val) {
7135 uint8_t fwkSceneMode = (uint8_t)val;
7136 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkSceneMode, 1);
7137 LOGD("urgent Metadata : ANDROID_CONTROL_SCENE_MODE: %d",
7138 fwkSceneMode);
7139 }
7140 }
7141
7142 IF_META_AVAILABLE(uint32_t, ae_lock, CAM_INTF_PARM_AEC_LOCK, metadata) {
7143 uint8_t fwk_ae_lock = (uint8_t) *ae_lock;
7144 camMetadata.update(ANDROID_CONTROL_AE_LOCK, &fwk_ae_lock, 1);
7145 }
7146
7147 IF_META_AVAILABLE(uint32_t, awb_lock, CAM_INTF_PARM_AWB_LOCK, metadata) {
7148 uint8_t fwk_awb_lock = (uint8_t) *awb_lock;
7149 camMetadata.update(ANDROID_CONTROL_AWB_LOCK, &fwk_awb_lock, 1);
7150 }
7151
7152 IF_META_AVAILABLE(uint32_t, color_correct_mode, CAM_INTF_META_COLOR_CORRECT_MODE, metadata) {
7153 uint8_t fwk_color_correct_mode = (uint8_t) *color_correct_mode;
7154 camMetadata.update(ANDROID_COLOR_CORRECTION_MODE, &fwk_color_correct_mode, 1);
7155 }
7156
7157 IF_META_AVAILABLE(cam_edge_application_t, edgeApplication,
7158 CAM_INTF_META_EDGE_MODE, metadata) {
7159 camMetadata.update(ANDROID_EDGE_MODE, &(edgeApplication->edge_mode), 1);
7160 }
7161
7162 IF_META_AVAILABLE(uint32_t, flashPower, CAM_INTF_META_FLASH_POWER, metadata) {
7163 uint8_t fwk_flashPower = (uint8_t) *flashPower;
7164 camMetadata.update(ANDROID_FLASH_FIRING_POWER, &fwk_flashPower, 1);
7165 }
7166
7167 IF_META_AVAILABLE(int64_t, flashFiringTime, CAM_INTF_META_FLASH_FIRING_TIME, metadata) {
7168 camMetadata.update(ANDROID_FLASH_FIRING_TIME, flashFiringTime, 1);
7169 }
7170
7171 IF_META_AVAILABLE(int32_t, flashState, CAM_INTF_META_FLASH_STATE, metadata) {
7172 if (0 <= *flashState) {
7173 uint8_t fwk_flashState = (uint8_t) *flashState;
7174 if (!gCamCapability[mCameraId]->flash_available) {
7175 fwk_flashState = ANDROID_FLASH_STATE_UNAVAILABLE;
7176 }
7177 camMetadata.update(ANDROID_FLASH_STATE, &fwk_flashState, 1);
7178 }
7179 }
7180
7181 IF_META_AVAILABLE(uint32_t, flashMode, CAM_INTF_META_FLASH_MODE, metadata) {
7182 int val = lookupFwkName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP), *flashMode);
7183 if (NAME_NOT_FOUND != val) {
7184 uint8_t fwk_flashMode = (uint8_t)val;
7185 camMetadata.update(ANDROID_FLASH_MODE, &fwk_flashMode, 1);
7186 }
7187 }
7188
7189 IF_META_AVAILABLE(uint32_t, hotPixelMode, CAM_INTF_META_HOTPIXEL_MODE, metadata) {
7190 uint8_t fwk_hotPixelMode = (uint8_t) *hotPixelMode;
7191 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &fwk_hotPixelMode, 1);
7192 }
7193
7194 IF_META_AVAILABLE(float, lensAperture, CAM_INTF_META_LENS_APERTURE, metadata) {
7195 camMetadata.update(ANDROID_LENS_APERTURE , lensAperture, 1);
7196 }
7197
7198 IF_META_AVAILABLE(float, filterDensity, CAM_INTF_META_LENS_FILTERDENSITY, metadata) {
7199 camMetadata.update(ANDROID_LENS_FILTER_DENSITY , filterDensity, 1);
7200 }
7201
7202 IF_META_AVAILABLE(float, focalLength, CAM_INTF_META_LENS_FOCAL_LENGTH, metadata) {
7203 camMetadata.update(ANDROID_LENS_FOCAL_LENGTH, focalLength, 1);
7204 }
7205
7206 IF_META_AVAILABLE(uint32_t, opticalStab, CAM_INTF_META_LENS_OPT_STAB_MODE, metadata) {
7207 uint8_t fwk_opticalStab = (uint8_t) *opticalStab;
7208 camMetadata.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &fwk_opticalStab, 1);
7209 }
7210
7211 IF_META_AVAILABLE(uint32_t, videoStab, CAM_INTF_META_VIDEO_STAB_MODE, metadata) {
7212 uint8_t fwk_videoStab = (uint8_t) *videoStab;
7213 LOGD("fwk_videoStab = %d", fwk_videoStab);
7214 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwk_videoStab, 1);
7215 } else {
7216 // Regardless of Video stab supports or not, CTS is expecting the EIS result to be non NULL
7217 // and so hardcoding the Video Stab result to OFF mode.
7218 uint8_t fwkVideoStabMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
7219 camMetadata.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &fwkVideoStabMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007220 LOGD("EIS result default to OFF mode");
Thierry Strudel3d639192016-09-09 11:52:26 -07007221 }
7222
7223 IF_META_AVAILABLE(uint32_t, noiseRedMode, CAM_INTF_META_NOISE_REDUCTION_MODE, metadata) {
7224 uint8_t fwk_noiseRedMode = (uint8_t) *noiseRedMode;
7225 camMetadata.update(ANDROID_NOISE_REDUCTION_MODE, &fwk_noiseRedMode, 1);
7226 }
7227
7228 IF_META_AVAILABLE(float, effectiveExposureFactor, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR, metadata) {
7229 camMetadata.update(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR, effectiveExposureFactor, 1);
7230 }
7231
Thierry Strudel3d639192016-09-09 11:52:26 -07007232 IF_META_AVAILABLE(cam_black_level_metadata_t, blackLevelAppliedPattern,
7233 CAM_INTF_META_BLACK_LEVEL_APPLIED_PATTERN, metadata) {
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007234 float fwk_blackLevelInd[BLACK_LEVEL_PATTERN_CNT];
Thierry Strudel3d639192016-09-09 11:52:26 -07007235
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007236 adjustBlackLevelForCFA(blackLevelAppliedPattern->cam_black_level, fwk_blackLevelInd,
7237 gCamCapability[mCameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07007238
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007239 LOGD("applied dynamicblackLevel in RGGB order = %f %f %f %f",
Thierry Strudel3d639192016-09-09 11:52:26 -07007240 blackLevelAppliedPattern->cam_black_level[0],
7241 blackLevelAppliedPattern->cam_black_level[1],
7242 blackLevelAppliedPattern->cam_black_level[2],
7243 blackLevelAppliedPattern->cam_black_level[3]);
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007244 camMetadata.update(QCAMERA3_SENSOR_DYNAMIC_BLACK_LEVEL_PATTERN, fwk_blackLevelInd,
7245 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007246
7247#ifndef USE_HAL_3_3
7248 // Update the ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL
Jason Lee4f3d96e2017-02-28 19:24:14 +05307249 // Need convert the internal 14 bit depth to sensor 10 bit sensor raw
Zhijun Heb753c672016-06-15 14:50:48 -07007250 // depth space.
Jason Lee4f3d96e2017-02-28 19:24:14 +05307251 fwk_blackLevelInd[0] /= 16.0;
7252 fwk_blackLevelInd[1] /= 16.0;
7253 fwk_blackLevelInd[2] /= 16.0;
7254 fwk_blackLevelInd[3] /= 16.0;
Shuzhen Wanga5da1022016-07-13 20:18:42 -07007255 camMetadata.update(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL, fwk_blackLevelInd,
7256 BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007257#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007258 }
7259
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007260#ifndef USE_HAL_3_3
7261 // Fixed whitelevel is used by ISP/Sensor
7262 camMetadata.update(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL,
7263 &gCamCapability[mCameraId]->white_level, 1);
7264#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07007265
7266 IF_META_AVAILABLE(cam_crop_region_t, hScalerCropRegion,
7267 CAM_INTF_META_SCALER_CROP_REGION, metadata) {
7268 int32_t scalerCropRegion[4];
7269 scalerCropRegion[0] = hScalerCropRegion->left;
7270 scalerCropRegion[1] = hScalerCropRegion->top;
7271 scalerCropRegion[2] = hScalerCropRegion->width;
7272 scalerCropRegion[3] = hScalerCropRegion->height;
7273
7274 // Adjust crop region from sensor output coordinate system to active
7275 // array coordinate system.
7276 mCropRegionMapper.toActiveArray(scalerCropRegion[0], scalerCropRegion[1],
7277 scalerCropRegion[2], scalerCropRegion[3]);
7278
7279 camMetadata.update(ANDROID_SCALER_CROP_REGION, scalerCropRegion, 4);
7280 }
7281
7282 IF_META_AVAILABLE(int64_t, sensorExpTime, CAM_INTF_META_SENSOR_EXPOSURE_TIME, metadata) {
7283 LOGD("sensorExpTime = %lld", *sensorExpTime);
7284 camMetadata.update(ANDROID_SENSOR_EXPOSURE_TIME , sensorExpTime, 1);
7285 }
7286
Shuzhen Wang6a1dd612017-08-05 15:03:53 -07007287 IF_META_AVAILABLE(float, expTimeBoost, CAM_INTF_META_EXP_TIME_BOOST, metadata) {
7288 LOGD("expTimeBoost = %f", *expTimeBoost);
7289 camMetadata.update(NEXUS_EXPERIMENTAL_2017_EXP_TIME_BOOST, expTimeBoost, 1);
7290 }
7291
Thierry Strudel3d639192016-09-09 11:52:26 -07007292 IF_META_AVAILABLE(int64_t, sensorFameDuration,
7293 CAM_INTF_META_SENSOR_FRAME_DURATION, metadata) {
7294 LOGD("sensorFameDuration = %lld", *sensorFameDuration);
7295 camMetadata.update(ANDROID_SENSOR_FRAME_DURATION, sensorFameDuration, 1);
7296 }
7297
7298 IF_META_AVAILABLE(int64_t, sensorRollingShutterSkew,
7299 CAM_INTF_META_SENSOR_ROLLING_SHUTTER_SKEW, metadata) {
7300 LOGD("sensorRollingShutterSkew = %lld", *sensorRollingShutterSkew);
7301 camMetadata.update(ANDROID_SENSOR_ROLLING_SHUTTER_SKEW,
7302 sensorRollingShutterSkew, 1);
7303 }
7304
7305 IF_META_AVAILABLE(int32_t, sensorSensitivity, CAM_INTF_META_SENSOR_SENSITIVITY, metadata) {
7306 LOGD("sensorSensitivity = %d", *sensorSensitivity);
7307 camMetadata.update(ANDROID_SENSOR_SENSITIVITY, sensorSensitivity, 1);
7308
7309 //calculate the noise profile based on sensitivity
7310 double noise_profile_S = computeNoiseModelEntryS(*sensorSensitivity);
7311 double noise_profile_O = computeNoiseModelEntryO(*sensorSensitivity);
7312 double noise_profile[2 * gCamCapability[mCameraId]->num_color_channels];
7313 for (int i = 0; i < 2 * gCamCapability[mCameraId]->num_color_channels; i += 2) {
7314 noise_profile[i] = noise_profile_S;
7315 noise_profile[i+1] = noise_profile_O;
7316 }
7317 LOGD("noise model entry (S, O) is (%f, %f)",
7318 noise_profile_S, noise_profile_O);
7319 camMetadata.update(ANDROID_SENSOR_NOISE_PROFILE, noise_profile,
7320 (size_t) (2 * gCamCapability[mCameraId]->num_color_channels));
7321 }
7322
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007323#ifndef USE_HAL_3_3
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007324 int32_t fwk_ispSensitivity = 100;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007325 IF_META_AVAILABLE(int32_t, ispSensitivity, CAM_INTF_META_ISP_SENSITIVITY, metadata) {
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007326 fwk_ispSensitivity = (int32_t) *ispSensitivity;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007327 }
Shuzhen Wang6cf631c2016-06-03 15:06:16 -07007328 IF_META_AVAILABLE(float, postStatsSensitivity, CAM_INTF_META_ISP_POST_STATS_SENSITIVITY, metadata) {
7329 fwk_ispSensitivity = (int32_t) (*postStatsSensitivity * fwk_ispSensitivity);
7330 }
7331 camMetadata.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &fwk_ispSensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007332#endif
7333
Thierry Strudel3d639192016-09-09 11:52:26 -07007334 IF_META_AVAILABLE(uint32_t, shadingMode, CAM_INTF_META_SHADING_MODE, metadata) {
7335 uint8_t fwk_shadingMode = (uint8_t) *shadingMode;
7336 camMetadata.update(ANDROID_SHADING_MODE, &fwk_shadingMode, 1);
7337 }
7338
7339 IF_META_AVAILABLE(uint32_t, faceDetectMode, CAM_INTF_META_STATS_FACEDETECT_MODE, metadata) {
7340 int val = lookupFwkName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
7341 *faceDetectMode);
7342 if (NAME_NOT_FOUND != val) {
7343 uint8_t fwk_faceDetectMode = (uint8_t)val;
7344 camMetadata.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &fwk_faceDetectMode, 1);
7345
7346 if (fwk_faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
7347 IF_META_AVAILABLE(cam_face_detection_data_t, faceDetectionInfo,
7348 CAM_INTF_META_FACE_DETECTION, metadata) {
7349 uint8_t numFaces = MIN(
7350 faceDetectionInfo->num_faces_detected, MAX_ROI);
7351 int32_t faceIds[MAX_ROI];
7352 uint8_t faceScores[MAX_ROI];
7353 int32_t faceRectangles[MAX_ROI * 4];
7354 int32_t faceLandmarks[MAX_ROI * 6];
7355 size_t j = 0, k = 0;
7356
7357 for (size_t i = 0; i < numFaces; i++) {
7358 faceScores[i] = (uint8_t)faceDetectionInfo->faces[i].score;
7359 // Adjust crop region from sensor output coordinate system to active
7360 // array coordinate system.
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007361 cam_rect_t rect = faceDetectionInfo->faces[i].face_boundary;
Thierry Strudel3d639192016-09-09 11:52:26 -07007362 mCropRegionMapper.toActiveArray(rect.left, rect.top,
7363 rect.width, rect.height);
7364
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007365 convertToRegions(rect, faceRectangles+j, -1);
Thierry Strudel3d639192016-09-09 11:52:26 -07007366
Jason Lee8ce36fa2017-04-19 19:40:37 -07007367 LOGL("FD_DEBUG : Frame[%d] Face[%d] : top-left (%d, %d), "
7368 "bottom-right (%d, %d)",
7369 faceDetectionInfo->frame_id, i,
7370 faceRectangles[j + FACE_LEFT], faceRectangles[j + FACE_TOP],
7371 faceRectangles[j + FACE_RIGHT], faceRectangles[j + FACE_BOTTOM]);
7372
Thierry Strudel3d639192016-09-09 11:52:26 -07007373 j+= 4;
7374 }
7375 if (numFaces <= 0) {
7376 memset(faceIds, 0, sizeof(int32_t) * MAX_ROI);
7377 memset(faceScores, 0, sizeof(uint8_t) * MAX_ROI);
7378 memset(faceRectangles, 0, sizeof(int32_t) * MAX_ROI * 4);
7379 memset(faceLandmarks, 0, sizeof(int32_t) * MAX_ROI * 6);
7380 }
7381
7382 camMetadata.update(ANDROID_STATISTICS_FACE_SCORES, faceScores,
7383 numFaces);
7384 camMetadata.update(ANDROID_STATISTICS_FACE_RECTANGLES,
7385 faceRectangles, numFaces * 4U);
7386 if (fwk_faceDetectMode ==
7387 ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
7388 IF_META_AVAILABLE(cam_face_landmarks_data_t, landmarks,
7389 CAM_INTF_META_FACE_LANDMARK, metadata) {
7390
7391 for (size_t i = 0; i < numFaces; i++) {
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007392 cam_face_landmarks_info_t face_landmarks = landmarks->face_landmarks[i];
Thierry Strudel3d639192016-09-09 11:52:26 -07007393 // Map the co-ordinate sensor output coordinate system to active
7394 // array coordinate system.
7395 mCropRegionMapper.toActiveArray(
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007396 face_landmarks.left_eye_center.x,
7397 face_landmarks.left_eye_center.y);
Thierry Strudel3d639192016-09-09 11:52:26 -07007398 mCropRegionMapper.toActiveArray(
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007399 face_landmarks.right_eye_center.x,
7400 face_landmarks.right_eye_center.y);
Thierry Strudel3d639192016-09-09 11:52:26 -07007401 mCropRegionMapper.toActiveArray(
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007402 face_landmarks.mouth_center.x,
7403 face_landmarks.mouth_center.y);
Thierry Strudel3d639192016-09-09 11:52:26 -07007404
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007405 convertLandmarks(face_landmarks, faceLandmarks+k);
Jason Lee8ce36fa2017-04-19 19:40:37 -07007406
7407 LOGL("FD_DEBUG LANDMARK : Frame[%d] Face[%d] : "
7408 "left-eye (%d, %d), right-eye (%d, %d), mouth (%d, %d)",
7409 faceDetectionInfo->frame_id, i,
7410 faceLandmarks[k + LEFT_EYE_X],
7411 faceLandmarks[k + LEFT_EYE_Y],
7412 faceLandmarks[k + RIGHT_EYE_X],
7413 faceLandmarks[k + RIGHT_EYE_Y],
7414 faceLandmarks[k + MOUTH_X],
7415 faceLandmarks[k + MOUTH_Y]);
7416
Thierry Strudel04e026f2016-10-10 11:27:36 -07007417 k+= TOTAL_LANDMARK_INDICES;
7418 }
7419 } else {
7420 for (size_t i = 0; i < numFaces; i++) {
7421 setInvalidLandmarks(faceLandmarks+k);
7422 k+= TOTAL_LANDMARK_INDICES;
Thierry Strudel3d639192016-09-09 11:52:26 -07007423 }
7424 }
7425
Jason Lee49619db2017-04-13 12:07:22 -07007426 for (size_t i = 0; i < numFaces; i++) {
7427 faceIds[i] = faceDetectionInfo->faces[i].face_id;
7428
7429 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : faceIds=%d",
7430 faceDetectionInfo->frame_id, i, faceIds[i]);
7431 }
7432
Thierry Strudel3d639192016-09-09 11:52:26 -07007433 camMetadata.update(ANDROID_STATISTICS_FACE_IDS, faceIds, numFaces);
7434 camMetadata.update(ANDROID_STATISTICS_FACE_LANDMARKS,
7435 faceLandmarks, numFaces * 6U);
Jason Lee49619db2017-04-13 12:07:22 -07007436 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007437 IF_META_AVAILABLE(cam_face_blink_data_t, blinks,
7438 CAM_INTF_META_FACE_BLINK, metadata) {
7439 uint8_t detected[MAX_ROI];
7440 uint8_t degree[MAX_ROI * 2];
7441 for (size_t i = 0; i < numFaces; i++) {
7442 detected[i] = blinks->blink[i].blink_detected;
7443 degree[2 * i] = blinks->blink[i].left_blink;
7444 degree[2 * i + 1] = blinks->blink[i].right_blink;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007445
Jason Lee49619db2017-04-13 12:07:22 -07007446 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7447 "blink_detected=%d, leye_blink=%d, reye_blink=%d",
7448 faceDetectionInfo->frame_id, i, detected[i], degree[2 * i],
7449 degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007450 }
7451 camMetadata.update(QCAMERA3_STATS_BLINK_DETECTED,
7452 detected, numFaces);
7453 camMetadata.update(QCAMERA3_STATS_BLINK_DEGREE,
7454 degree, numFaces * 2);
7455 }
7456 IF_META_AVAILABLE(cam_face_smile_data_t, smiles,
7457 CAM_INTF_META_FACE_SMILE, metadata) {
7458 uint8_t degree[MAX_ROI];
7459 uint8_t confidence[MAX_ROI];
7460 for (size_t i = 0; i < numFaces; i++) {
7461 degree[i] = smiles->smile[i].smile_degree;
7462 confidence[i] = smiles->smile[i].smile_confidence;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007463
Jason Lee49619db2017-04-13 12:07:22 -07007464 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : "
7465 "smile_degree=%d, smile_score=%d",
7466 faceDetectionInfo->frame_id, i, degree[i], confidence[i]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007467 }
7468 camMetadata.update(QCAMERA3_STATS_SMILE_DEGREE,
7469 degree, numFaces);
7470 camMetadata.update(QCAMERA3_STATS_SMILE_CONFIDENCE,
7471 confidence, numFaces);
7472 }
7473 IF_META_AVAILABLE(cam_face_gaze_data_t, gazes,
7474 CAM_INTF_META_FACE_GAZE, metadata) {
7475 int8_t angle[MAX_ROI];
7476 int32_t direction[MAX_ROI * 3];
7477 int8_t degree[MAX_ROI * 2];
7478 for (size_t i = 0; i < numFaces; i++) {
7479 angle[i] = gazes->gaze[i].gaze_angle;
7480 direction[3 * i] = gazes->gaze[i].updown_dir;
7481 direction[3 * i + 1] = gazes->gaze[i].leftright_dir;
7482 direction[3 * i + 2] = gazes->gaze[i].roll_dir;
7483 degree[2 * i] = gazes->gaze[i].left_right_gaze;
7484 degree[2 * i + 1] = gazes->gaze[i].top_bottom_gaze;
Jason Lee8ce36fa2017-04-19 19:40:37 -07007485
7486 LOGL("FD_DEBUG LANDMARK : Frame[%d] : Face[%d] : gaze_angle=%d, "
7487 "updown_dir=%d, leftright_dir=%d,, roll_dir=%d, "
7488 "left_right_gaze=%d, top_bottom_gaze=%d",
7489 faceDetectionInfo->frame_id, i, angle[i],
7490 direction[3 * i], direction[3 * i + 1],
7491 direction[3 * i + 2],
7492 degree[2 * i], degree[2 * i + 1]);
Thierry Strudel54dc9782017-02-15 12:12:10 -08007493 }
7494 camMetadata.update(QCAMERA3_STATS_GAZE_ANGLE,
7495 (uint8_t *)angle, numFaces);
7496 camMetadata.update(QCAMERA3_STATS_GAZE_DIRECTION,
7497 direction, numFaces * 3);
7498 camMetadata.update(QCAMERA3_STATS_GAZE_DEGREE,
7499 (uint8_t *)degree, numFaces * 2);
7500 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007501 }
7502 }
7503 }
7504 }
7505
7506 IF_META_AVAILABLE(uint32_t, histogramMode, CAM_INTF_META_STATS_HISTOGRAM_MODE, metadata) {
7507 uint8_t fwk_histogramMode = (uint8_t) *histogramMode;
Shuzhen Wang14415f52016-11-16 18:26:18 -08007508 int32_t histogramBins = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007509 camMetadata.update(QCAMERA3_HISTOGRAM_MODE, &fwk_histogramMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -08007510 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &fwk_histogramMode, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007511
Shuzhen Wang14415f52016-11-16 18:26:18 -08007512 IF_META_AVAILABLE(int32_t, histBins, CAM_INTF_META_STATS_HISTOGRAM_BINS, metadata) {
7513 histogramBins = *histBins;
7514 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS, &histogramBins, 1);
7515 }
7516
7517 if (fwk_histogramMode == QCAMERA3_HISTOGRAM_MODE_ON && histogramBins > 0) {
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007518 IF_META_AVAILABLE(cam_hist_stats_t, stats_data, CAM_INTF_META_HISTOGRAM, metadata) {
7519 // process histogram statistics info
Shuzhen Wang14415f52016-11-16 18:26:18 -08007520 int32_t* histogramData = NULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007521
7522 switch (stats_data->type) {
7523 case CAM_HISTOGRAM_TYPE_BAYER:
7524 switch (stats_data->bayer_stats.data_type) {
7525 case CAM_STATS_CHANNEL_GR:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007526 histogramData = (int32_t *)stats_data->bayer_stats.gr_stats.hist_buf;
7527 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007528 case CAM_STATS_CHANNEL_GB:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007529 histogramData = (int32_t *)stats_data->bayer_stats.gb_stats.hist_buf;
7530 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007531 case CAM_STATS_CHANNEL_B:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007532 histogramData = (int32_t *)stats_data->bayer_stats.b_stats.hist_buf;
7533 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007534 case CAM_STATS_CHANNEL_Y:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007535 case CAM_STATS_CHANNEL_ALL:
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007536 case CAM_STATS_CHANNEL_R:
7537 default:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007538 histogramData = (int32_t *)stats_data->bayer_stats.r_stats.hist_buf;
7539 break;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007540 }
7541 break;
7542 case CAM_HISTOGRAM_TYPE_YUV:
Shuzhen Wang14415f52016-11-16 18:26:18 -08007543 histogramData = (int32_t *)stats_data->yuv_stats.hist_buf;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007544 break;
7545 }
7546
Shuzhen Wang14415f52016-11-16 18:26:18 -08007547 camMetadata.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM, histogramData, histogramBins);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07007548 }
7549 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007550 }
7551
7552 IF_META_AVAILABLE(uint32_t, sharpnessMapMode,
7553 CAM_INTF_META_STATS_SHARPNESS_MAP_MODE, metadata) {
7554 uint8_t fwk_sharpnessMapMode = (uint8_t) *sharpnessMapMode;
7555 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &fwk_sharpnessMapMode, 1);
7556 }
7557
7558 IF_META_AVAILABLE(cam_sharpness_map_t, sharpnessMap,
7559 CAM_INTF_META_STATS_SHARPNESS_MAP, metadata) {
7560 camMetadata.update(ANDROID_STATISTICS_SHARPNESS_MAP, (int32_t *)sharpnessMap->sharpness,
7561 CAM_MAX_MAP_WIDTH * CAM_MAX_MAP_HEIGHT * 3);
7562 }
7563
7564 IF_META_AVAILABLE(cam_lens_shading_map_t, lensShadingMap,
7565 CAM_INTF_META_LENS_SHADING_MAP, metadata) {
7566 size_t map_height = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.height,
7567 CAM_MAX_SHADING_MAP_HEIGHT);
7568 size_t map_width = MIN((size_t)gCamCapability[mCameraId]->lens_shading_map_size.width,
7569 CAM_MAX_SHADING_MAP_WIDTH);
7570 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP,
7571 lensShadingMap->lens_shading, 4U * map_width * map_height);
7572 }
7573
7574 IF_META_AVAILABLE(uint32_t, toneMapMode, CAM_INTF_META_TONEMAP_MODE, metadata) {
7575 uint8_t fwk_toneMapMode = (uint8_t) *toneMapMode;
7576 camMetadata.update(ANDROID_TONEMAP_MODE, &fwk_toneMapMode, 1);
7577 }
7578
7579 IF_META_AVAILABLE(cam_rgb_tonemap_curves, tonemap, CAM_INTF_META_TONEMAP_CURVES, metadata) {
7580 //Populate CAM_INTF_META_TONEMAP_CURVES
7581 /* ch0 = G, ch 1 = B, ch 2 = R*/
7582 if (tonemap->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7583 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7584 tonemap->tonemap_points_cnt,
7585 CAM_MAX_TONEMAP_CURVE_SIZE);
7586 tonemap->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7587 }
7588
7589 camMetadata.update(ANDROID_TONEMAP_CURVE_GREEN,
7590 &tonemap->curves[0].tonemap_points[0][0],
7591 tonemap->tonemap_points_cnt * 2);
7592
7593 camMetadata.update(ANDROID_TONEMAP_CURVE_BLUE,
7594 &tonemap->curves[1].tonemap_points[0][0],
7595 tonemap->tonemap_points_cnt * 2);
7596
7597 camMetadata.update(ANDROID_TONEMAP_CURVE_RED,
7598 &tonemap->curves[2].tonemap_points[0][0],
7599 tonemap->tonemap_points_cnt * 2);
7600 }
7601
7602 IF_META_AVAILABLE(cam_color_correct_gains_t, colorCorrectionGains,
7603 CAM_INTF_META_COLOR_CORRECT_GAINS, metadata) {
7604 camMetadata.update(ANDROID_COLOR_CORRECTION_GAINS, colorCorrectionGains->gains,
7605 CC_GAIN_MAX);
7606 }
7607
7608 IF_META_AVAILABLE(cam_color_correct_matrix_t, colorCorrectionMatrix,
7609 CAM_INTF_META_COLOR_CORRECT_TRANSFORM, metadata) {
7610 camMetadata.update(ANDROID_COLOR_CORRECTION_TRANSFORM,
7611 (camera_metadata_rational_t *)(void *)colorCorrectionMatrix->transform_matrix,
7612 CC_MATRIX_COLS * CC_MATRIX_ROWS);
7613 }
7614
7615 IF_META_AVAILABLE(cam_profile_tone_curve, toneCurve,
7616 CAM_INTF_META_PROFILE_TONE_CURVE, metadata) {
7617 if (toneCurve->tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
7618 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
7619 toneCurve->tonemap_points_cnt,
7620 CAM_MAX_TONEMAP_CURVE_SIZE);
7621 toneCurve->tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
7622 }
7623 camMetadata.update(ANDROID_SENSOR_PROFILE_TONE_CURVE,
7624 (float*)toneCurve->curve.tonemap_points,
7625 toneCurve->tonemap_points_cnt * 2);
7626 }
7627
7628 IF_META_AVAILABLE(cam_color_correct_gains_t, predColorCorrectionGains,
7629 CAM_INTF_META_PRED_COLOR_CORRECT_GAINS, metadata) {
7630 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_GAINS,
7631 predColorCorrectionGains->gains, 4);
7632 }
7633
7634 IF_META_AVAILABLE(cam_color_correct_matrix_t, predColorCorrectionMatrix,
7635 CAM_INTF_META_PRED_COLOR_CORRECT_TRANSFORM, metadata) {
7636 camMetadata.update(ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
7637 (camera_metadata_rational_t *)(void *)predColorCorrectionMatrix->transform_matrix,
7638 CC_MATRIX_ROWS * CC_MATRIX_COLS);
7639 }
7640
7641 IF_META_AVAILABLE(float, otpWbGrGb, CAM_INTF_META_OTP_WB_GRGB, metadata) {
7642 camMetadata.update(ANDROID_SENSOR_GREEN_SPLIT, otpWbGrGb, 1);
7643 }
7644
7645 IF_META_AVAILABLE(uint32_t, blackLevelLock, CAM_INTF_META_BLACK_LEVEL_LOCK, metadata) {
7646 uint8_t fwk_blackLevelLock = (uint8_t) *blackLevelLock;
7647 camMetadata.update(ANDROID_BLACK_LEVEL_LOCK, &fwk_blackLevelLock, 1);
7648 }
7649
7650 IF_META_AVAILABLE(uint32_t, sceneFlicker, CAM_INTF_META_SCENE_FLICKER, metadata) {
7651 uint8_t fwk_sceneFlicker = (uint8_t) *sceneFlicker;
7652 camMetadata.update(ANDROID_STATISTICS_SCENE_FLICKER, &fwk_sceneFlicker, 1);
7653 }
7654
7655 IF_META_AVAILABLE(uint32_t, effectMode, CAM_INTF_PARM_EFFECT, metadata) {
7656 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
7657 *effectMode);
7658 if (NAME_NOT_FOUND != val) {
7659 uint8_t fwk_effectMode = (uint8_t)val;
7660 camMetadata.update(ANDROID_CONTROL_EFFECT_MODE, &fwk_effectMode, 1);
7661 }
7662 }
7663
7664 IF_META_AVAILABLE(cam_test_pattern_data_t, testPatternData,
7665 CAM_INTF_META_TEST_PATTERN_DATA, metadata) {
7666 int32_t fwk_testPatternMode = lookupFwkName(TEST_PATTERN_MAP,
7667 METADATA_MAP_SIZE(TEST_PATTERN_MAP), testPatternData->mode);
7668 if (NAME_NOT_FOUND != fwk_testPatternMode) {
7669 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &fwk_testPatternMode, 1);
7670 }
7671 int32_t fwk_testPatternData[4];
7672 fwk_testPatternData[0] = testPatternData->r;
7673 fwk_testPatternData[3] = testPatternData->b;
7674 switch (gCamCapability[mCameraId]->color_arrangement) {
7675 case CAM_FILTER_ARRANGEMENT_RGGB:
7676 case CAM_FILTER_ARRANGEMENT_GRBG:
7677 fwk_testPatternData[1] = testPatternData->gr;
7678 fwk_testPatternData[2] = testPatternData->gb;
7679 break;
7680 case CAM_FILTER_ARRANGEMENT_GBRG:
7681 case CAM_FILTER_ARRANGEMENT_BGGR:
7682 fwk_testPatternData[2] = testPatternData->gr;
7683 fwk_testPatternData[1] = testPatternData->gb;
7684 break;
7685 default:
7686 LOGE("color arrangement %d is not supported",
7687 gCamCapability[mCameraId]->color_arrangement);
7688 break;
7689 }
7690 camMetadata.update(ANDROID_SENSOR_TEST_PATTERN_DATA, fwk_testPatternData, 4);
7691 }
7692
7693 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, metadata) {
7694 camMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
7695 }
7696
7697 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, metadata) {
7698 String8 str((const char *)gps_methods);
7699 camMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
7700 }
7701
7702 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, metadata) {
7703 camMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
7704 }
7705
7706 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, metadata) {
7707 camMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
7708 }
7709
7710 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, metadata) {
7711 uint8_t fwk_jpeg_quality = (uint8_t) *jpeg_quality;
7712 camMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
7713 }
7714
7715 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, metadata) {
7716 uint8_t fwk_thumb_quality = (uint8_t) *thumb_quality;
7717 camMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
7718 }
7719
7720 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, metadata) {
7721 int32_t fwk_thumb_size[2];
7722 fwk_thumb_size[0] = thumb_size->width;
7723 fwk_thumb_size[1] = thumb_size->height;
7724 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
7725 }
7726
Shuzhen Wang2fea89e2017-05-08 17:02:15 -07007727 // Skip reprocess metadata if there is no input stream.
7728 if (mInputStreamInfo.dim.width > 0 && mInputStreamInfo.dim.height > 0) {
7729 IF_META_AVAILABLE(int32_t, privateData, CAM_INTF_META_PRIVATE_DATA, metadata) {
7730 camMetadata.update(QCAMERA3_PRIVATEDATA_REPROCESS,
7731 privateData,
7732 MAX_METADATA_PRIVATE_PAYLOAD_SIZE_IN_BYTES / sizeof(int32_t));
7733 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007734 }
7735
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007736 IF_META_AVAILABLE(int32_t, meteringMode, CAM_INTF_PARM_AEC_ALGO_TYPE, metadata) {
Thierry Strudel54dc9782017-02-15 12:12:10 -08007737 camMetadata.update(QCAMERA3_EXPOSURE_METER,
Thierry Strudel295a0ca2016-11-03 18:38:47 -07007738 meteringMode, 1);
7739 }
7740
Thierry Strudel54dc9782017-02-15 12:12:10 -08007741 IF_META_AVAILABLE(cam_asd_hdr_scene_data_t, hdr_scene_data,
7742 CAM_INTF_META_ASD_HDR_SCENE_DATA, metadata) {
7743 LOGD("hdr_scene_data: %d %f\n",
7744 hdr_scene_data->is_hdr_scene, hdr_scene_data->hdr_confidence);
7745 uint8_t isHdr = hdr_scene_data->is_hdr_scene;
7746 float isHdrConfidence = hdr_scene_data->hdr_confidence;
7747 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE,
7748 &isHdr, 1);
7749 camMetadata.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE,
7750 &isHdrConfidence, 1);
7751 }
7752
7753
7754
Thierry Strudel3d639192016-09-09 11:52:26 -07007755 if (metadata->is_tuning_params_valid) {
7756 uint8_t tuning_meta_data_blob[sizeof(tuning_params_t)];
7757 uint8_t *data = (uint8_t *)&tuning_meta_data_blob[0];
7758 metadata->tuning_params.tuning_data_version = TUNING_DATA_VERSION;
7759
7760
7761 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_data_version),
7762 sizeof(uint32_t));
7763 data += sizeof(uint32_t);
7764
7765 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_sensor_data_size),
7766 sizeof(uint32_t));
7767 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
7768 data += sizeof(uint32_t);
7769
7770 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_vfe_data_size),
7771 sizeof(uint32_t));
7772 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
7773 data += sizeof(uint32_t);
7774
7775 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cpp_data_size),
7776 sizeof(uint32_t));
7777 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
7778 data += sizeof(uint32_t);
7779
7780 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_cac_data_size),
7781 sizeof(uint32_t));
7782 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
7783 data += sizeof(uint32_t);
7784
7785 metadata->tuning_params.tuning_mod3_data_size = 0;
7786 memcpy(data, ((uint8_t *)&metadata->tuning_params.tuning_mod3_data_size),
7787 sizeof(uint32_t));
7788 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
7789 data += sizeof(uint32_t);
7790
7791 size_t count = MIN(metadata->tuning_params.tuning_sensor_data_size,
7792 TUNING_SENSOR_DATA_MAX);
7793 memcpy(data, ((uint8_t *)&metadata->tuning_params.data),
7794 count);
7795 data += count;
7796
7797 count = MIN(metadata->tuning_params.tuning_vfe_data_size,
7798 TUNING_VFE_DATA_MAX);
7799 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_VFE_DATA_OFFSET]),
7800 count);
7801 data += count;
7802
7803 count = MIN(metadata->tuning_params.tuning_cpp_data_size,
7804 TUNING_CPP_DATA_MAX);
7805 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CPP_DATA_OFFSET]),
7806 count);
7807 data += count;
7808
7809 count = MIN(metadata->tuning_params.tuning_cac_data_size,
7810 TUNING_CAC_DATA_MAX);
7811 memcpy(data, ((uint8_t *)&metadata->tuning_params.data[TUNING_CAC_DATA_OFFSET]),
7812 count);
7813 data += count;
7814
7815 camMetadata.update(QCAMERA3_TUNING_META_DATA_BLOB,
7816 (int32_t *)(void *)tuning_meta_data_blob,
7817 (size_t)(data-tuning_meta_data_blob) / sizeof(uint32_t));
7818 }
7819
7820 IF_META_AVAILABLE(cam_neutral_col_point_t, neuColPoint,
7821 CAM_INTF_META_NEUTRAL_COL_POINT, metadata) {
7822 camMetadata.update(ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
7823 (camera_metadata_rational_t *)(void *)neuColPoint->neutral_col_point,
7824 NEUTRAL_COL_POINTS);
7825 }
7826
7827 IF_META_AVAILABLE(uint32_t, shadingMapMode, CAM_INTF_META_LENS_SHADING_MAP_MODE, metadata) {
7828 uint8_t fwk_shadingMapMode = (uint8_t) *shadingMapMode;
7829 camMetadata.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &fwk_shadingMapMode, 1);
7830 }
7831
7832 IF_META_AVAILABLE(cam_area_t, hAeRegions, CAM_INTF_META_AEC_ROI, metadata) {
7833 int32_t aeRegions[REGIONS_TUPLE_COUNT];
7834 // Adjust crop region from sensor output coordinate system to active
7835 // array coordinate system.
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007836 cam_rect_t hAeRect = hAeRegions->rect;
7837 mCropRegionMapper.toActiveArray(hAeRect.left, hAeRect.top,
7838 hAeRect.width, hAeRect.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07007839
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007840 convertToRegions(hAeRect, aeRegions, hAeRegions->weight);
Thierry Strudel3d639192016-09-09 11:52:26 -07007841 camMetadata.update(ANDROID_CONTROL_AE_REGIONS, aeRegions,
7842 REGIONS_TUPLE_COUNT);
7843 LOGD("Metadata : ANDROID_CONTROL_AE_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
7844 aeRegions[0], aeRegions[1], aeRegions[2], aeRegions[3],
Shuzhen Wang91c14e82017-08-21 17:56:57 -07007845 hAeRect.left, hAeRect.top, hAeRect.width,
7846 hAeRect.height);
Thierry Strudel3d639192016-09-09 11:52:26 -07007847 }
7848
Shuzhen Wang181c57b2017-07-21 11:39:44 -07007849 if (!pendingRequest.focusStateSent) {
7850 if (pendingRequest.focusStateValid) {
7851 camMetadata.update(ANDROID_CONTROL_AF_STATE, &pendingRequest.focusState, 1);
7852 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", pendingRequest.focusState);
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007853 } else {
Shuzhen Wang181c57b2017-07-21 11:39:44 -07007854 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
7855 uint8_t fwk_afState = (uint8_t) *afState;
7856 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwk_afState, 1);
7857 LOGD("Metadata : ANDROID_CONTROL_AF_STATE %u", *afState);
7858 }
Shuzhen Wang0cb8cdf2016-07-14 11:56:49 -07007859 }
7860 }
7861
Thierry Strudel3d639192016-09-09 11:52:26 -07007862 IF_META_AVAILABLE(float, focusDistance, CAM_INTF_META_LENS_FOCUS_DISTANCE, metadata) {
7863 camMetadata.update(ANDROID_LENS_FOCUS_DISTANCE , focusDistance, 1);
7864 }
7865
7866 IF_META_AVAILABLE(float, focusRange, CAM_INTF_META_LENS_FOCUS_RANGE, metadata) {
7867 camMetadata.update(ANDROID_LENS_FOCUS_RANGE , focusRange, 2);
7868 }
7869
7870 IF_META_AVAILABLE(cam_af_lens_state_t, lensState, CAM_INTF_META_LENS_STATE, metadata) {
7871 uint8_t fwk_lensState = *lensState;
7872 camMetadata.update(ANDROID_LENS_STATE , &fwk_lensState, 1);
7873 }
7874
Thierry Strudel3d639192016-09-09 11:52:26 -07007875 IF_META_AVAILABLE(uint32_t, hal_ab_mode, CAM_INTF_PARM_ANTIBANDING, metadata) {
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007876 uint32_t ab_mode = *hal_ab_mode;
7877 if (ab_mode == CAM_ANTIBANDING_MODE_AUTO_60HZ ||
7878 ab_mode == CAM_ANTIBANDING_MODE_AUTO_50HZ) {
7879 ab_mode = CAM_ANTIBANDING_MODE_AUTO;
7880 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007881 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
Shuzhen Wangf6890e02016-08-12 14:28:54 -07007882 ab_mode);
Thierry Strudel3d639192016-09-09 11:52:26 -07007883 if (NAME_NOT_FOUND != val) {
7884 uint8_t fwk_ab_mode = (uint8_t)val;
7885 camMetadata.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &fwk_ab_mode, 1);
7886 }
7887 }
7888
7889 IF_META_AVAILABLE(uint32_t, bestshotMode, CAM_INTF_PARM_BESTSHOT_MODE, metadata) {
7890 int val = lookupFwkName(SCENE_MODES_MAP,
7891 METADATA_MAP_SIZE(SCENE_MODES_MAP), *bestshotMode);
7892 if (NAME_NOT_FOUND != val) {
7893 uint8_t fwkBestshotMode = (uint8_t)val;
7894 camMetadata.update(ANDROID_CONTROL_SCENE_MODE, &fwkBestshotMode, 1);
7895 LOGD("Metadata : ANDROID_CONTROL_SCENE_MODE");
7896 } else {
7897 LOGH("Metadata not found : ANDROID_CONTROL_SCENE_MODE");
7898 }
7899 }
7900
7901 IF_META_AVAILABLE(uint32_t, mode, CAM_INTF_META_MODE, metadata) {
7902 uint8_t fwk_mode = (uint8_t) *mode;
7903 camMetadata.update(ANDROID_CONTROL_MODE, &fwk_mode, 1);
7904 }
7905
7906 /* Constant metadata values to be update*/
7907 uint8_t hotPixelModeFast = ANDROID_HOT_PIXEL_MODE_FAST;
7908 camMetadata.update(ANDROID_HOT_PIXEL_MODE, &hotPixelModeFast, 1);
7909
7910 uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
7911 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
7912
7913 int32_t hotPixelMap[2];
7914 camMetadata.update(ANDROID_STATISTICS_HOT_PIXEL_MAP, &hotPixelMap[0], 0);
7915
7916 // CDS
7917 IF_META_AVAILABLE(int32_t, cds, CAM_INTF_PARM_CDS_MODE, metadata) {
7918 camMetadata.update(QCAMERA3_CDS_MODE, cds, 1);
7919 }
7920
Thierry Strudel04e026f2016-10-10 11:27:36 -07007921 IF_META_AVAILABLE(cam_sensor_hdr_type_t, vhdr, CAM_INTF_PARM_SENSOR_HDR, metadata) {
7922 int32_t fwk_hdr;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007923 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007924 if(*vhdr == CAM_SENSOR_HDR_OFF) {
7925 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_OFF;
7926 } else {
7927 fwk_hdr = QCAMERA3_VIDEO_HDR_MODE_ON;
7928 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08007929
7930 if(fwk_hdr != curr_hdr_state) {
7931 LOGH("PROFILE_META_HDR_TOGGLED value=%d", fwk_hdr);
7932 if(fwk_hdr)
7933 mCurrFeatureState |= CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7934 else
7935 mCurrFeatureState &= ~CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR;
7936 }
Thierry Strudel04e026f2016-10-10 11:27:36 -07007937 camMetadata.update(QCAMERA3_VIDEO_HDR_MODE, &fwk_hdr, 1);
7938 }
7939
Thierry Strudel54dc9782017-02-15 12:12:10 -08007940 //binning correction
7941 IF_META_AVAILABLE(cam_binning_correction_mode_t, bin_correction,
7942 CAM_INTF_META_BINNING_CORRECTION_MODE, metadata) {
7943 int32_t fwk_bin_mode = (int32_t) *bin_correction;
7944 camMetadata.update(QCAMERA3_BINNING_CORRECTION_MODE, &fwk_bin_mode, 1);
7945 }
7946
Thierry Strudel04e026f2016-10-10 11:27:36 -07007947 IF_META_AVAILABLE(cam_ir_mode_type_t, ir, CAM_INTF_META_IR_MODE, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007948 int32_t fwk_ir = (int32_t) *ir;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007949 int8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR ) != 0);
7950 int8_t is_ir_on = 0;
7951
7952 (fwk_ir > 0) ? (is_ir_on = 1) : (is_ir_on = 0) ;
7953 if(is_ir_on != curr_ir_state) {
7954 LOGH("PROFILE_META_IR_TOGGLED value=%d", fwk_ir);
7955 if(is_ir_on)
7956 mCurrFeatureState |= CAM_QCOM_FEATURE_IR;
7957 else
7958 mCurrFeatureState &= ~CAM_QCOM_FEATURE_IR;
7959 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07007960 camMetadata.update(QCAMERA3_IR_MODE, &fwk_ir, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -07007961 }
7962
Thierry Strudel269c81a2016-10-12 12:13:59 -07007963 // AEC SPEED
7964 IF_META_AVAILABLE(float, aec, CAM_INTF_META_AEC_CONVERGENCE_SPEED, metadata) {
7965 camMetadata.update(QCAMERA3_AEC_CONVERGENCE_SPEED, aec, 1);
7966 }
7967
7968 // AWB SPEED
7969 IF_META_AVAILABLE(float, awb, CAM_INTF_META_AWB_CONVERGENCE_SPEED, metadata) {
7970 camMetadata.update(QCAMERA3_AWB_CONVERGENCE_SPEED, awb, 1);
7971 }
7972
Thierry Strudel3d639192016-09-09 11:52:26 -07007973 // TNR
7974 IF_META_AVAILABLE(cam_denoise_param_t, tnr, CAM_INTF_PARM_TEMPORAL_DENOISE, metadata) {
7975 uint8_t tnr_enable = tnr->denoise_enable;
7976 int32_t tnr_process_type = (int32_t)tnr->process_plates;
Thierry Strudel54dc9782017-02-15 12:12:10 -08007977 int8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0) ;
7978 int8_t is_tnr_on = 0;
7979
7980 (tnr_enable > 0) ? (is_tnr_on = 1) : (is_tnr_on = 0);
7981 if(is_tnr_on != curr_tnr_state) {
7982 LOGH("PROFILE_META_TNR_TOGGLED value=%d", tnr_enable);
7983 if(is_tnr_on)
7984 mCurrFeatureState |= CAM_QTI_FEATURE_SW_TNR;
7985 else
7986 mCurrFeatureState &= ~CAM_QTI_FEATURE_SW_TNR;
7987 }
Thierry Strudel3d639192016-09-09 11:52:26 -07007988
7989 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
7990 camMetadata.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
7991 }
7992
7993 // Reprocess crop data
7994 IF_META_AVAILABLE(cam_crop_data_t, crop_data, CAM_INTF_META_CROP_DATA, metadata) {
7995 uint8_t cnt = crop_data->num_of_streams;
7996 if ( (0 >= cnt) || (cnt > MAX_NUM_STREAMS)) {
7997 // mm-qcamera-daemon only posts crop_data for streams
7998 // not linked to pproc. So no valid crop metadata is not
7999 // necessarily an error case.
8000 LOGD("No valid crop metadata entries");
8001 } else {
8002 uint32_t reproc_stream_id;
8003 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
8004 LOGD("No reprocessible stream found, ignore crop data");
8005 } else {
8006 int rc = NO_ERROR;
8007 Vector<int32_t> roi_map;
8008 int32_t *crop = new int32_t[cnt*4];
8009 if (NULL == crop) {
8010 rc = NO_MEMORY;
8011 }
8012 if (NO_ERROR == rc) {
8013 int32_t streams_found = 0;
8014 for (size_t i = 0; i < cnt; i++) {
8015 if (crop_data->crop_info[i].stream_id == reproc_stream_id) {
8016 if (pprocDone) {
8017 // HAL already does internal reprocessing,
8018 // either via reprocessing before JPEG encoding,
8019 // or offline postprocessing for pproc bypass case.
8020 crop[0] = 0;
8021 crop[1] = 0;
8022 crop[2] = mInputStreamInfo.dim.width;
8023 crop[3] = mInputStreamInfo.dim.height;
8024 } else {
8025 crop[0] = crop_data->crop_info[i].crop.left;
8026 crop[1] = crop_data->crop_info[i].crop.top;
8027 crop[2] = crop_data->crop_info[i].crop.width;
8028 crop[3] = crop_data->crop_info[i].crop.height;
8029 }
8030 roi_map.add(crop_data->crop_info[i].roi_map.left);
8031 roi_map.add(crop_data->crop_info[i].roi_map.top);
8032 roi_map.add(crop_data->crop_info[i].roi_map.width);
8033 roi_map.add(crop_data->crop_info[i].roi_map.height);
8034 streams_found++;
8035 LOGD("Adding reprocess crop data for stream %dx%d, %dx%d",
8036 crop[0], crop[1], crop[2], crop[3]);
8037 LOGD("Adding reprocess crop roi map for stream %dx%d, %dx%d",
8038 crop_data->crop_info[i].roi_map.left,
8039 crop_data->crop_info[i].roi_map.top,
8040 crop_data->crop_info[i].roi_map.width,
8041 crop_data->crop_info[i].roi_map.height);
8042 break;
8043
8044 }
8045 }
8046 camMetadata.update(QCAMERA3_CROP_COUNT_REPROCESS,
8047 &streams_found, 1);
8048 camMetadata.update(QCAMERA3_CROP_REPROCESS,
8049 crop, (size_t)(streams_found * 4));
8050 if (roi_map.array()) {
8051 camMetadata.update(QCAMERA3_CROP_ROI_MAP_REPROCESS,
8052 roi_map.array(), roi_map.size());
8053 }
8054 }
8055 if (crop) {
8056 delete [] crop;
8057 }
8058 }
8059 }
8060 }
8061
8062 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
8063 // Regardless of CAC supports or not, CTS is expecting the CAC result to be non NULL and
8064 // so hardcoding the CAC result to OFF mode.
8065 uint8_t fwkCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
8066 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &fwkCacMode, 1);
8067 } else {
8068 IF_META_AVAILABLE(cam_aberration_mode_t, cacMode, CAM_INTF_PARM_CAC, metadata) {
8069 int val = lookupFwkName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
8070 *cacMode);
8071 if (NAME_NOT_FOUND != val) {
8072 uint8_t resultCacMode = (uint8_t)val;
8073 // check whether CAC result from CB is equal to Framework set CAC mode
8074 // If not equal then set the CAC mode came in corresponding request
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008075 if (pendingRequest.fwkCacMode != resultCacMode) {
8076 resultCacMode = pendingRequest.fwkCacMode;
Thierry Strudel3d639192016-09-09 11:52:26 -07008077 }
Thierry Strudel54dc9782017-02-15 12:12:10 -08008078 //Check if CAC is disabled by property
8079 if (m_cacModeDisabled) {
8080 resultCacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
8081 }
8082
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008083 LOGD("fwk_cacMode=%d resultCacMode=%d", pendingRequest.fwkCacMode, resultCacMode);
Thierry Strudel3d639192016-09-09 11:52:26 -07008084 camMetadata.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &resultCacMode, 1);
8085 } else {
8086 LOGE("Invalid CAC camera parameter: %d", *cacMode);
8087 }
8088 }
8089 }
8090
8091 // Post blob of cam_cds_data through vendor tag.
8092 IF_META_AVAILABLE(cam_cds_data_t, cdsInfo, CAM_INTF_META_CDS_DATA, metadata) {
8093 uint8_t cnt = cdsInfo->num_of_streams;
8094 cam_cds_data_t cdsDataOverride;
8095 memset(&cdsDataOverride, 0, sizeof(cdsDataOverride));
8096 cdsDataOverride.session_cds_enable = cdsInfo->session_cds_enable;
8097 cdsDataOverride.num_of_streams = 1;
8098 if ((0 < cnt) && (cnt <= MAX_NUM_STREAMS)) {
8099 uint32_t reproc_stream_id;
8100 if ( NO_ERROR != getReprocessibleOutputStreamId(reproc_stream_id)) {
8101 LOGD("No reprocessible stream found, ignore cds data");
8102 } else {
8103 for (size_t i = 0; i < cnt; i++) {
8104 if (cdsInfo->cds_info[i].stream_id ==
8105 reproc_stream_id) {
8106 cdsDataOverride.cds_info[0].cds_enable =
8107 cdsInfo->cds_info[i].cds_enable;
8108 break;
8109 }
8110 }
8111 }
8112 } else {
8113 LOGD("Invalid stream count %d in CDS_DATA", cnt);
8114 }
8115 camMetadata.update(QCAMERA3_CDS_INFO,
8116 (uint8_t *)&cdsDataOverride,
8117 sizeof(cam_cds_data_t));
8118 }
8119
8120 // Ldaf calibration data
8121 if (!mLdafCalibExist) {
8122 IF_META_AVAILABLE(uint32_t, ldafCalib,
8123 CAM_INTF_META_LDAF_EXIF, metadata) {
8124 mLdafCalibExist = true;
8125 mLdafCalib[0] = ldafCalib[0];
8126 mLdafCalib[1] = ldafCalib[1];
8127 LOGD("ldafCalib[0] is %d, ldafCalib[1] is %d",
8128 ldafCalib[0], ldafCalib[1]);
8129 }
8130 }
8131
Thierry Strudel54dc9782017-02-15 12:12:10 -08008132 // EXIF debug data through vendor tag
8133 /*
8134 * Mobicat Mask can assume 3 values:
8135 * 1 refers to Mobicat data,
8136 * 2 refers to Stats Debug and Exif Debug Data
8137 * 3 refers to Mobicat and Stats Debug Data
8138 * We want to make sure that we are sending Exif debug data
8139 * only when Mobicat Mask is 2.
8140 */
8141 if ((mExifParams.debug_params != NULL) && (getMobicatMask() == 2)) {
8142 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB,
8143 (uint8_t *)(void *)mExifParams.debug_params,
8144 sizeof(mm_jpeg_debug_exif_params_t));
8145 }
8146
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008147 // Reprocess and DDM debug data through vendor tag
8148 cam_reprocess_info_t repro_info;
8149 memset(&repro_info, 0, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008150 IF_META_AVAILABLE(cam_stream_crop_info_t, sensorCropInfo,
8151 CAM_INTF_META_SNAP_CROP_INFO_SENSOR, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008152 memcpy(&(repro_info.sensor_crop_info), sensorCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008153 }
8154 IF_META_AVAILABLE(cam_stream_crop_info_t, camifCropInfo,
8155 CAM_INTF_META_SNAP_CROP_INFO_CAMIF, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008156 memcpy(&(repro_info.camif_crop_info), camifCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008157 }
8158 IF_META_AVAILABLE(cam_stream_crop_info_t, ispCropInfo,
8159 CAM_INTF_META_SNAP_CROP_INFO_ISP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008160 memcpy(&(repro_info.isp_crop_info), ispCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008161 }
8162 IF_META_AVAILABLE(cam_stream_crop_info_t, cppCropInfo,
8163 CAM_INTF_META_SNAP_CROP_INFO_CPP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008164 memcpy(&(repro_info.cpp_crop_info), cppCropInfo, sizeof(cam_stream_crop_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008165 }
8166 IF_META_AVAILABLE(cam_focal_length_ratio_t, ratio,
8167 CAM_INTF_META_AF_FOCAL_LENGTH_RATIO, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008168 memcpy(&(repro_info.af_focal_length_ratio), ratio, sizeof(cam_focal_length_ratio_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008169 }
8170 IF_META_AVAILABLE(int32_t, flip, CAM_INTF_PARM_FLIP, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008171 memcpy(&(repro_info.pipeline_flip), flip, sizeof(int32_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008172 }
8173 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
8174 CAM_INTF_PARM_ROTATION, metadata) {
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008175 memcpy(&(repro_info.rotation_info), rotationInfo, sizeof(cam_rotation_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008176 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008177 IF_META_AVAILABLE(cam_area_t, afRoi, CAM_INTF_META_AF_ROI, metadata) {
8178 memcpy(&(repro_info.af_roi), afRoi, sizeof(cam_area_t));
8179 }
8180 IF_META_AVAILABLE(cam_dyn_img_data_t, dynMask, CAM_INTF_META_IMG_DYN_FEAT, metadata) {
8181 memcpy(&(repro_info.dyn_mask), dynMask, sizeof(cam_dyn_img_data_t));
8182 }
8183 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB,
8184 (uint8_t *)&repro_info, sizeof(cam_reprocess_info_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07008185
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008186 // INSTANT AEC MODE
8187 IF_META_AVAILABLE(uint8_t, instant_aec_mode,
8188 CAM_INTF_PARM_INSTANT_AEC, metadata) {
8189 camMetadata.update(QCAMERA3_INSTANT_AEC_MODE, instant_aec_mode, 1);
8190 }
8191
Shuzhen Wange763e802016-03-31 10:24:29 -07008192 // AF scene change
8193 IF_META_AVAILABLE(uint8_t, afSceneChange, CAM_INTF_META_AF_SCENE_CHANGE, metadata) {
8194 camMetadata.update(NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE, afSceneChange, 1);
8195 }
8196
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -07008197 // Enable ZSL
8198 if (enableZsl != nullptr) {
8199 uint8_t value = *enableZsl ?
8200 ANDROID_CONTROL_ENABLE_ZSL_TRUE : ANDROID_CONTROL_ENABLE_ZSL_FALSE;
8201 camMetadata.update(ANDROID_CONTROL_ENABLE_ZSL, &value, 1);
8202 }
8203
Xu Han821ea9c2017-05-23 09:00:40 -07008204 // OIS Data
8205 IF_META_AVAILABLE(cam_frame_ois_info_t, frame_ois_data, CAM_INTF_META_FRAME_OIS_DATA, metadata) {
8206 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_VSYNC,
8207 &(frame_ois_data->frame_sof_timestamp_vsync), 1);
8208 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_FRAME_TIMESTAMP_BOOTTIME,
8209 &(frame_ois_data->frame_sof_timestamp_boottime), 1);
8210 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_TIMESTAMPS_BOOTTIME,
8211 frame_ois_data->ois_sample_timestamp_boottime, frame_ois_data->num_ois_sample);
8212 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_X,
8213 frame_ois_data->ois_sample_shift_x, frame_ois_data->num_ois_sample);
8214 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_Y,
8215 frame_ois_data->ois_sample_shift_y, frame_ois_data->num_ois_sample);
Xue Tu2c3e9142017-08-18 16:23:52 -07008216 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_PIXEL_X,
8217 frame_ois_data->ois_sample_shift_pixel_x, frame_ois_data->num_ois_sample);
8218 camMetadata.update(NEXUS_EXPERIMENTAL_2017_OIS_SHIFT_PIXEL_Y,
8219 frame_ois_data->ois_sample_shift_pixel_y, frame_ois_data->num_ois_sample);
Xu Han821ea9c2017-05-23 09:00:40 -07008220 }
8221
Thierry Strudel3d639192016-09-09 11:52:26 -07008222 resultMetadata = camMetadata.release();
8223 return resultMetadata;
8224}
8225
8226/*===========================================================================
8227 * FUNCTION : saveExifParams
8228 *
8229 * DESCRIPTION:
8230 *
8231 * PARAMETERS :
8232 * @metadata : metadata information from callback
8233 *
8234 * RETURN : none
8235 *
8236 *==========================================================================*/
8237void QCamera3HardwareInterface::saveExifParams(metadata_buffer_t *metadata)
8238{
8239 IF_META_AVAILABLE(cam_ae_exif_debug_t, ae_exif_debug_params,
8240 CAM_INTF_META_EXIF_DEBUG_AE, metadata) {
8241 if (mExifParams.debug_params) {
8242 mExifParams.debug_params->ae_debug_params = *ae_exif_debug_params;
8243 mExifParams.debug_params->ae_debug_params_valid = TRUE;
8244 }
8245 }
8246 IF_META_AVAILABLE(cam_awb_exif_debug_t,awb_exif_debug_params,
8247 CAM_INTF_META_EXIF_DEBUG_AWB, metadata) {
8248 if (mExifParams.debug_params) {
8249 mExifParams.debug_params->awb_debug_params = *awb_exif_debug_params;
8250 mExifParams.debug_params->awb_debug_params_valid = TRUE;
8251 }
8252 }
8253 IF_META_AVAILABLE(cam_af_exif_debug_t,af_exif_debug_params,
8254 CAM_INTF_META_EXIF_DEBUG_AF, metadata) {
8255 if (mExifParams.debug_params) {
8256 mExifParams.debug_params->af_debug_params = *af_exif_debug_params;
8257 mExifParams.debug_params->af_debug_params_valid = TRUE;
8258 }
8259 }
8260 IF_META_AVAILABLE(cam_asd_exif_debug_t, asd_exif_debug_params,
8261 CAM_INTF_META_EXIF_DEBUG_ASD, metadata) {
8262 if (mExifParams.debug_params) {
8263 mExifParams.debug_params->asd_debug_params = *asd_exif_debug_params;
8264 mExifParams.debug_params->asd_debug_params_valid = TRUE;
8265 }
8266 }
8267 IF_META_AVAILABLE(cam_stats_buffer_exif_debug_t,stats_exif_debug_params,
8268 CAM_INTF_META_EXIF_DEBUG_STATS, metadata) {
8269 if (mExifParams.debug_params) {
8270 mExifParams.debug_params->stats_debug_params = *stats_exif_debug_params;
8271 mExifParams.debug_params->stats_debug_params_valid = TRUE;
8272 }
8273 }
8274 IF_META_AVAILABLE(cam_bestats_buffer_exif_debug_t,bestats_exif_debug_params,
8275 CAM_INTF_META_EXIF_DEBUG_BESTATS, metadata) {
8276 if (mExifParams.debug_params) {
8277 mExifParams.debug_params->bestats_debug_params = *bestats_exif_debug_params;
8278 mExifParams.debug_params->bestats_debug_params_valid = TRUE;
8279 }
8280 }
8281 IF_META_AVAILABLE(cam_bhist_buffer_exif_debug_t, bhist_exif_debug_params,
8282 CAM_INTF_META_EXIF_DEBUG_BHIST, metadata) {
8283 if (mExifParams.debug_params) {
8284 mExifParams.debug_params->bhist_debug_params = *bhist_exif_debug_params;
8285 mExifParams.debug_params->bhist_debug_params_valid = TRUE;
8286 }
8287 }
8288 IF_META_AVAILABLE(cam_q3a_tuning_info_t, q3a_tuning_exif_debug_params,
8289 CAM_INTF_META_EXIF_DEBUG_3A_TUNING, metadata) {
8290 if (mExifParams.debug_params) {
8291 mExifParams.debug_params->q3a_tuning_debug_params = *q3a_tuning_exif_debug_params;
8292 mExifParams.debug_params->q3a_tuning_debug_params_valid = TRUE;
8293 }
8294 }
8295}
8296
8297/*===========================================================================
8298 * FUNCTION : get3AExifParams
8299 *
8300 * DESCRIPTION:
8301 *
8302 * PARAMETERS : none
8303 *
8304 *
8305 * RETURN : mm_jpeg_exif_params_t
8306 *
8307 *==========================================================================*/
8308mm_jpeg_exif_params_t QCamera3HardwareInterface::get3AExifParams()
8309{
8310 return mExifParams;
8311}
8312
8313/*===========================================================================
8314 * FUNCTION : translateCbUrgentMetadataToResultMetadata
8315 *
8316 * DESCRIPTION:
8317 *
8318 * PARAMETERS :
8319 * @metadata : metadata information from callback
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008320 * @lastUrgentMetadataInBatch: Boolean to indicate whether this is the last
8321 * urgent metadata in a batch. Always true for
8322 * non-batch mode.
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008323 * @frame_number : frame number for this urgent metadata
Shuzhen Wang485e2442017-08-02 12:21:08 -07008324 * @isJumpstartMetadata: Whether this is a partial metadata for jumpstart,
8325 * i.e. even though it doesn't map to a valid partial
8326 * frame number, its metadata entries should be kept.
Thierry Strudel3d639192016-09-09 11:52:26 -07008327 * RETURN : camera_metadata_t*
8328 * metadata in a format specified by fwk
8329 *==========================================================================*/
8330camera_metadata_t*
8331QCamera3HardwareInterface::translateCbUrgentMetadataToResultMetadata
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008332 (metadata_buffer_t *metadata, bool lastUrgentMetadataInBatch,
Shuzhen Wang485e2442017-08-02 12:21:08 -07008333 uint32_t frame_number, bool isJumpstartMetadata)
Thierry Strudel3d639192016-09-09 11:52:26 -07008334{
8335 CameraMetadata camMetadata;
8336 camera_metadata_t *resultMetadata;
8337
Shuzhen Wang485e2442017-08-02 12:21:08 -07008338 if (!lastUrgentMetadataInBatch && !isJumpstartMetadata) {
Shuzhen Wang94ddf072017-03-12 19:47:23 -07008339 /* In batch mode, use empty metadata if this is not the last in batch
8340 */
8341 resultMetadata = allocate_camera_metadata(0, 0);
8342 return resultMetadata;
8343 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008344
8345 IF_META_AVAILABLE(uint32_t, whiteBalanceState, CAM_INTF_META_AWB_STATE, metadata) {
8346 uint8_t fwk_whiteBalanceState = (uint8_t) *whiteBalanceState;
8347 camMetadata.update(ANDROID_CONTROL_AWB_STATE, &fwk_whiteBalanceState, 1);
8348 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_STATE %u", *whiteBalanceState);
8349 }
8350
8351 IF_META_AVAILABLE(cam_trigger_t, aecTrigger, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER, metadata) {
8352 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER,
8353 &aecTrigger->trigger, 1);
8354 camMetadata.update(ANDROID_CONTROL_AE_PRECAPTURE_ID,
8355 &aecTrigger->trigger_id, 1);
8356 LOGD("urgent Metadata : CAM_INTF_META_AEC_PRECAPTURE_TRIGGER: %d",
8357 aecTrigger->trigger);
8358 LOGD("urgent Metadata : ANDROID_CONTROL_AE_PRECAPTURE_ID: %d",
8359 aecTrigger->trigger_id);
8360 }
8361
8362 IF_META_AVAILABLE(uint32_t, ae_state, CAM_INTF_META_AEC_STATE, metadata) {
8363 uint8_t fwk_ae_state = (uint8_t) *ae_state;
8364 camMetadata.update(ANDROID_CONTROL_AE_STATE, &fwk_ae_state, 1);
8365 LOGD("urgent Metadata : ANDROID_CONTROL_AE_STATE %u", *ae_state);
8366 }
8367
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008368 IF_META_AVAILABLE(uint32_t, focusMode, CAM_INTF_PARM_FOCUS_MODE, metadata) {
8369 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP), *focusMode);
8370 if (NAME_NOT_FOUND != val) {
8371 uint8_t fwkAfMode = (uint8_t)val;
8372 camMetadata.update(ANDROID_CONTROL_AF_MODE, &fwkAfMode, 1);
8373 LOGD("urgent Metadata : ANDROID_CONTROL_AF_MODE %d", val);
8374 } else {
8375 LOGH("urgent Metadata not found : ANDROID_CONTROL_AF_MODE %d",
8376 val);
8377 }
Thierry Strudel3d639192016-09-09 11:52:26 -07008378 }
8379
Shuzhen Wang181c57b2017-07-21 11:39:44 -07008380 IF_META_AVAILABLE(cam_trigger_t, af_trigger, CAM_INTF_META_AF_TRIGGER, metadata) {
8381 LOGD("urgent Metadata : CAM_INTF_META_AF_TRIGGER = %d",
8382 af_trigger->trigger);
8383 LOGD("urgent Metadata : ANDROID_CONTROL_AF_TRIGGER_ID = %d",
8384 af_trigger->trigger_id);
8385
8386 IF_META_AVAILABLE(uint32_t, afState, CAM_INTF_META_AF_STATE, metadata) {
8387 mAfTrigger = *af_trigger;
8388 uint32_t fwk_AfState = (uint32_t) *afState;
8389
8390 // If this is the result for a new trigger, check if there is new early
8391 // af state. If there is, use the last af state for all results
8392 // preceding current partial frame number.
8393 for (auto & pendingRequest : mPendingRequestsList) {
8394 if (pendingRequest.frame_number < frame_number) {
8395 pendingRequest.focusStateValid = true;
8396 pendingRequest.focusState = fwk_AfState;
8397 } else if (pendingRequest.frame_number == frame_number) {
8398 IF_META_AVAILABLE(uint32_t, earlyAfState, CAM_INTF_META_EARLY_AF_STATE, metadata) {
8399 // Check if early AF state for trigger exists. If yes, send AF state as
8400 // partial result for better latency.
8401 uint8_t fwkEarlyAfState = (uint8_t) *earlyAfState;
8402 pendingRequest.focusStateSent = true;
8403 camMetadata.update(ANDROID_CONTROL_AF_STATE, &fwkEarlyAfState, 1);
8404 LOGD("urgent Metadata(%d) : ANDROID_CONTROL_AF_STATE %u",
8405 frame_number, fwkEarlyAfState);
8406 }
8407 }
8408 }
8409 }
8410 }
8411 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER,
8412 &mAfTrigger.trigger, 1);
8413 camMetadata.update(ANDROID_CONTROL_AF_TRIGGER_ID, &mAfTrigger.trigger_id, 1);
8414
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008415 IF_META_AVAILABLE(cam_area_t, hAfRegions, CAM_INTF_META_AF_ROI, metadata) {
8416 /*af regions*/
Shuzhen Wang91c14e82017-08-21 17:56:57 -07008417 cam_rect_t hAfRect = hAfRegions->rect;
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008418 int32_t afRegions[REGIONS_TUPLE_COUNT];
8419 // Adjust crop region from sensor output coordinate system to active
8420 // array coordinate system.
Shuzhen Wang91c14e82017-08-21 17:56:57 -07008421 mCropRegionMapper.toActiveArray(hAfRect.left, hAfRect.top,
8422 hAfRect.width, hAfRect.height);
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008423
Shuzhen Wang91c14e82017-08-21 17:56:57 -07008424 convertToRegions(hAfRect, afRegions, hAfRegions->weight);
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008425 camMetadata.update(ANDROID_CONTROL_AF_REGIONS, afRegions,
8426 REGIONS_TUPLE_COUNT);
8427 LOGD("Metadata : ANDROID_CONTROL_AF_REGIONS: FWK: [%d,%d,%d,%d] HAL: [%d,%d,%d,%d]",
8428 afRegions[0], afRegions[1], afRegions[2], afRegions[3],
Shuzhen Wang91c14e82017-08-21 17:56:57 -07008429 hAfRect.left, hAfRect.top, hAfRect.width,
8430 hAfRect.height);
Shuzhen Wang6ce35e62017-03-27 18:00:41 -07008431 }
8432
Shuzhen Wangcc386c52017-03-29 09:28:08 -07008433 // AF region confidence
8434 IF_META_AVAILABLE(int32_t, afRegionConfidence, CAM_INTF_META_AF_REGIONS_CONFIDENCE, metadata) {
8435 camMetadata.update(NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE, afRegionConfidence, 1);
8436 }
8437
Thierry Strudel3d639192016-09-09 11:52:26 -07008438 IF_META_AVAILABLE(int32_t, whiteBalance, CAM_INTF_PARM_WHITE_BALANCE, metadata) {
8439 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
8440 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP), *whiteBalance);
8441 if (NAME_NOT_FOUND != val) {
8442 uint8_t fwkWhiteBalanceMode = (uint8_t)val;
8443 camMetadata.update(ANDROID_CONTROL_AWB_MODE, &fwkWhiteBalanceMode, 1);
8444 LOGD("urgent Metadata : ANDROID_CONTROL_AWB_MODE %d", val);
8445 } else {
8446 LOGH("urgent Metadata not found : ANDROID_CONTROL_AWB_MODE");
8447 }
8448 }
8449
8450 uint8_t fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8451 uint32_t aeMode = CAM_AE_MODE_MAX;
8452 int32_t flashMode = CAM_FLASH_MODE_MAX;
8453 int32_t redeye = -1;
8454 IF_META_AVAILABLE(uint32_t, pAeMode, CAM_INTF_META_AEC_MODE, metadata) {
8455 aeMode = *pAeMode;
8456 }
8457 IF_META_AVAILABLE(int32_t, pFlashMode, CAM_INTF_PARM_LED_MODE, metadata) {
8458 flashMode = *pFlashMode;
8459 }
8460 IF_META_AVAILABLE(int32_t, pRedeye, CAM_INTF_PARM_REDEYE_REDUCTION, metadata) {
8461 redeye = *pRedeye;
8462 }
8463
8464 if (1 == redeye) {
8465 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE;
8466 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8467 } else if ((CAM_FLASH_MODE_AUTO == flashMode) || (CAM_FLASH_MODE_ON == flashMode)) {
8468 int val = lookupFwkName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
8469 flashMode);
8470 if (NAME_NOT_FOUND != val) {
8471 fwk_aeMode = (uint8_t)val;
8472 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8473 } else {
8474 LOGE("Unsupported flash mode %d", flashMode);
8475 }
8476 } else if (aeMode == CAM_AE_MODE_ON) {
8477 fwk_aeMode = ANDROID_CONTROL_AE_MODE_ON;
8478 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
8479 } else if (aeMode == CAM_AE_MODE_OFF) {
8480 fwk_aeMode = ANDROID_CONTROL_AE_MODE_OFF;
8481 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -08008482 } else if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
8483 fwk_aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
8484 camMetadata.update(ANDROID_CONTROL_AE_MODE, &fwk_aeMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07008485 } else {
8486 LOGE("Not enough info to deduce ANDROID_CONTROL_AE_MODE redeye:%d, "
8487 "flashMode:%d, aeMode:%u!!!",
8488 redeye, flashMode, aeMode);
8489 }
Thierry Strudel295a0ca2016-11-03 18:38:47 -07008490 if (mInstantAEC) {
8491 // Increment frame Idx count untill a bound reached for instant AEC.
8492 mInstantAecFrameIdxCount++;
8493 IF_META_AVAILABLE(cam_3a_params_t, ae_params,
8494 CAM_INTF_META_AEC_INFO, metadata) {
8495 LOGH("ae_params->settled = %d",ae_params->settled);
8496 // If AEC settled, or if number of frames reached bound value,
8497 // should reset instant AEC.
8498 if (ae_params->settled ||
8499 (mInstantAecFrameIdxCount > mAecSkipDisplayFrameBound)) {
8500 LOGH("AEC settled or Frames reached instantAEC bound, resetting instantAEC");
8501 mInstantAEC = false;
8502 mResetInstantAEC = true;
8503 mInstantAecFrameIdxCount = 0;
8504 }
8505 }
8506 }
Shuzhen Wang3569d4a2017-09-04 19:10:28 -07008507
8508 IF_META_AVAILABLE(int32_t, af_tof_confidence,
8509 CAM_INTF_META_AF_TOF_CONFIDENCE, metadata) {
8510 IF_META_AVAILABLE(int32_t, af_tof_distance,
8511 CAM_INTF_META_AF_TOF_DISTANCE, metadata) {
8512 int32_t fwk_af_tof_confidence = *af_tof_confidence;
8513 int32_t fwk_af_tof_distance = *af_tof_distance;
8514 if (fwk_af_tof_confidence == 1) {
8515 mSceneDistance = fwk_af_tof_distance;
8516 } else {
8517 mSceneDistance = -1;
8518 }
8519 LOGD("tof_distance %d, tof_confidence %d, mSceneDistance %d",
8520 fwk_af_tof_distance, fwk_af_tof_confidence, mSceneDistance);
8521 }
8522 }
8523 camMetadata.update(NEXUS_EXPERIMENTAL_2017_SCENE_DISTANCE, &mSceneDistance, 1);
8524
Thierry Strudel3d639192016-09-09 11:52:26 -07008525 resultMetadata = camMetadata.release();
8526 return resultMetadata;
8527}
8528
8529/*===========================================================================
8530 * FUNCTION : dumpMetadataToFile
8531 *
8532 * DESCRIPTION: Dumps tuning metadata to file system
8533 *
8534 * PARAMETERS :
8535 * @meta : tuning metadata
8536 * @dumpFrameCount : current dump frame count
8537 * @enabled : Enable mask
8538 *
8539 *==========================================================================*/
8540void QCamera3HardwareInterface::dumpMetadataToFile(tuning_params_t &meta,
8541 uint32_t &dumpFrameCount,
8542 bool enabled,
8543 const char *type,
8544 uint32_t frameNumber)
8545{
8546 //Some sanity checks
8547 if (meta.tuning_sensor_data_size > TUNING_SENSOR_DATA_MAX) {
8548 LOGE("Tuning sensor data size bigger than expected %d: %d",
8549 meta.tuning_sensor_data_size,
8550 TUNING_SENSOR_DATA_MAX);
8551 return;
8552 }
8553
8554 if (meta.tuning_vfe_data_size > TUNING_VFE_DATA_MAX) {
8555 LOGE("Tuning VFE data size bigger than expected %d: %d",
8556 meta.tuning_vfe_data_size,
8557 TUNING_VFE_DATA_MAX);
8558 return;
8559 }
8560
8561 if (meta.tuning_cpp_data_size > TUNING_CPP_DATA_MAX) {
8562 LOGE("Tuning CPP data size bigger than expected %d: %d",
8563 meta.tuning_cpp_data_size,
8564 TUNING_CPP_DATA_MAX);
8565 return;
8566 }
8567
8568 if (meta.tuning_cac_data_size > TUNING_CAC_DATA_MAX) {
8569 LOGE("Tuning CAC data size bigger than expected %d: %d",
8570 meta.tuning_cac_data_size,
8571 TUNING_CAC_DATA_MAX);
8572 return;
8573 }
8574 //
8575
8576 if(enabled){
8577 char timeBuf[FILENAME_MAX];
8578 char buf[FILENAME_MAX];
8579 memset(buf, 0, sizeof(buf));
8580 memset(timeBuf, 0, sizeof(timeBuf));
8581 time_t current_time;
8582 struct tm * timeinfo;
8583 time (&current_time);
8584 timeinfo = localtime (&current_time);
8585 if (timeinfo != NULL) {
8586 strftime (timeBuf, sizeof(timeBuf),
8587 QCAMERA_DUMP_FRM_LOCATION"%Y%m%d%H%M%S", timeinfo);
8588 }
8589 String8 filePath(timeBuf);
8590 snprintf(buf,
8591 sizeof(buf),
8592 "%dm_%s_%d.bin",
8593 dumpFrameCount,
8594 type,
8595 frameNumber);
8596 filePath.append(buf);
8597 int file_fd = open(filePath.string(), O_RDWR | O_CREAT, 0777);
8598 if (file_fd >= 0) {
8599 ssize_t written_len = 0;
8600 meta.tuning_data_version = TUNING_DATA_VERSION;
8601 void *data = (void *)((uint8_t *)&meta.tuning_data_version);
8602 written_len += write(file_fd, data, sizeof(uint32_t));
8603 data = (void *)((uint8_t *)&meta.tuning_sensor_data_size);
8604 LOGD("tuning_sensor_data_size %d",(int)(*(int *)data));
8605 written_len += write(file_fd, data, sizeof(uint32_t));
8606 data = (void *)((uint8_t *)&meta.tuning_vfe_data_size);
8607 LOGD("tuning_vfe_data_size %d",(int)(*(int *)data));
8608 written_len += write(file_fd, data, sizeof(uint32_t));
8609 data = (void *)((uint8_t *)&meta.tuning_cpp_data_size);
8610 LOGD("tuning_cpp_data_size %d",(int)(*(int *)data));
8611 written_len += write(file_fd, data, sizeof(uint32_t));
8612 data = (void *)((uint8_t *)&meta.tuning_cac_data_size);
8613 LOGD("tuning_cac_data_size %d",(int)(*(int *)data));
8614 written_len += write(file_fd, data, sizeof(uint32_t));
8615 meta.tuning_mod3_data_size = 0;
8616 data = (void *)((uint8_t *)&meta.tuning_mod3_data_size);
8617 LOGD("tuning_mod3_data_size %d",(int)(*(int *)data));
8618 written_len += write(file_fd, data, sizeof(uint32_t));
8619 size_t total_size = meta.tuning_sensor_data_size;
8620 data = (void *)((uint8_t *)&meta.data);
8621 written_len += write(file_fd, data, total_size);
8622 total_size = meta.tuning_vfe_data_size;
8623 data = (void *)((uint8_t *)&meta.data[TUNING_VFE_DATA_OFFSET]);
8624 written_len += write(file_fd, data, total_size);
8625 total_size = meta.tuning_cpp_data_size;
8626 data = (void *)((uint8_t *)&meta.data[TUNING_CPP_DATA_OFFSET]);
8627 written_len += write(file_fd, data, total_size);
8628 total_size = meta.tuning_cac_data_size;
8629 data = (void *)((uint8_t *)&meta.data[TUNING_CAC_DATA_OFFSET]);
8630 written_len += write(file_fd, data, total_size);
8631 close(file_fd);
8632 }else {
8633 LOGE("fail to open file for metadata dumping");
8634 }
8635 }
8636}
8637
8638/*===========================================================================
8639 * FUNCTION : cleanAndSortStreamInfo
8640 *
8641 * DESCRIPTION: helper method to clean up invalid streams in stream_info,
8642 * and sort them such that raw stream is at the end of the list
8643 * This is a workaround for camera daemon constraint.
8644 *
8645 * PARAMETERS : None
8646 *
8647 *==========================================================================*/
8648void QCamera3HardwareInterface::cleanAndSortStreamInfo()
8649{
8650 List<stream_info_t *> newStreamInfo;
8651
8652 /*clean up invalid streams*/
8653 for (List<stream_info_t*>::iterator it=mStreamInfo.begin();
8654 it != mStreamInfo.end();) {
8655 if(((*it)->status) == INVALID){
8656 QCamera3Channel *channel = (QCamera3Channel*)(*it)->stream->priv;
8657 delete channel;
8658 free(*it);
8659 it = mStreamInfo.erase(it);
8660 } else {
8661 it++;
8662 }
8663 }
8664
8665 // Move preview/video/callback/snapshot streams into newList
8666 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8667 it != mStreamInfo.end();) {
8668 if ((*it)->stream->format != HAL_PIXEL_FORMAT_RAW_OPAQUE &&
8669 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW10 &&
8670 (*it)->stream->format != HAL_PIXEL_FORMAT_RAW16) {
8671 newStreamInfo.push_back(*it);
8672 it = mStreamInfo.erase(it);
8673 } else
8674 it++;
8675 }
8676 // Move raw streams into newList
8677 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
8678 it != mStreamInfo.end();) {
8679 newStreamInfo.push_back(*it);
8680 it = mStreamInfo.erase(it);
8681 }
8682
8683 mStreamInfo = newStreamInfo;
8684}
8685
8686/*===========================================================================
8687 * FUNCTION : extractJpegMetadata
8688 *
8689 * DESCRIPTION: helper method to extract Jpeg metadata from capture request.
8690 * JPEG metadata is cached in HAL, and return as part of capture
8691 * result when metadata is returned from camera daemon.
8692 *
8693 * PARAMETERS : @jpegMetadata: jpeg metadata to be extracted
8694 * @request: capture request
8695 *
8696 *==========================================================================*/
8697void QCamera3HardwareInterface::extractJpegMetadata(
8698 CameraMetadata& jpegMetadata,
8699 const camera3_capture_request_t *request)
8700{
8701 CameraMetadata frame_settings;
8702 frame_settings = request->settings;
8703
8704 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES))
8705 jpegMetadata.update(ANDROID_JPEG_GPS_COORDINATES,
8706 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).data.d,
8707 frame_settings.find(ANDROID_JPEG_GPS_COORDINATES).count);
8708
8709 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD))
8710 jpegMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD,
8711 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8,
8712 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).count);
8713
8714 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP))
8715 jpegMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP,
8716 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64,
8717 frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).count);
8718
8719 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION))
8720 jpegMetadata.update(ANDROID_JPEG_ORIENTATION,
8721 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32,
8722 frame_settings.find(ANDROID_JPEG_ORIENTATION).count);
8723
8724 if (frame_settings.exists(ANDROID_JPEG_QUALITY))
8725 jpegMetadata.update(ANDROID_JPEG_QUALITY,
8726 frame_settings.find(ANDROID_JPEG_QUALITY).data.u8,
8727 frame_settings.find(ANDROID_JPEG_QUALITY).count);
8728
8729 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY))
8730 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY,
8731 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8,
8732 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).count);
8733
8734 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
8735 int32_t thumbnail_size[2];
8736 thumbnail_size[0] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
8737 thumbnail_size[1] = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
8738 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
8739 int32_t orientation =
8740 frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07008741 if ((!needJpegExifRotation()) && ((orientation == 90) || (orientation == 270))) {
Thierry Strudel3d639192016-09-09 11:52:26 -07008742 //swap thumbnail dimensions for rotations 90 and 270 in jpeg metadata.
8743 int32_t temp;
8744 temp = thumbnail_size[0];
8745 thumbnail_size[0] = thumbnail_size[1];
8746 thumbnail_size[1] = temp;
8747 }
8748 }
8749 jpegMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE,
8750 thumbnail_size,
8751 frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
8752 }
8753
8754}
8755
8756/*===========================================================================
8757 * FUNCTION : convertToRegions
8758 *
8759 * DESCRIPTION: helper method to convert from cam_rect_t into int32_t array
8760 *
8761 * PARAMETERS :
8762 * @rect : cam_rect_t struct to convert
8763 * @region : int32_t destination array
8764 * @weight : if we are converting from cam_area_t, weight is valid
8765 * else weight = -1
8766 *
8767 *==========================================================================*/
8768void QCamera3HardwareInterface::convertToRegions(cam_rect_t rect,
8769 int32_t *region, int weight)
8770{
Jason Lee8ce36fa2017-04-19 19:40:37 -07008771 region[FACE_LEFT] = rect.left;
8772 region[FACE_TOP] = rect.top;
8773 region[FACE_RIGHT] = rect.left + rect.width;
8774 region[FACE_BOTTOM] = rect.top + rect.height;
Thierry Strudel3d639192016-09-09 11:52:26 -07008775 if (weight > -1) {
Jason Lee8ce36fa2017-04-19 19:40:37 -07008776 region[FACE_WEIGHT] = weight;
Thierry Strudel3d639192016-09-09 11:52:26 -07008777 }
8778}
8779
8780/*===========================================================================
8781 * FUNCTION : convertFromRegions
8782 *
8783 * DESCRIPTION: helper method to convert from array to cam_rect_t
8784 *
8785 * PARAMETERS :
8786 * @rect : cam_rect_t struct to convert
8787 * @region : int32_t destination array
8788 * @weight : if we are converting from cam_area_t, weight is valid
8789 * else weight = -1
8790 *
8791 *==========================================================================*/
8792void QCamera3HardwareInterface::convertFromRegions(cam_area_t &roi,
Chien-Yu Chen92724a82017-01-06 11:50:30 -08008793 const CameraMetadata &frame_settings, uint32_t tag)
Thierry Strudel3d639192016-09-09 11:52:26 -07008794{
Thierry Strudel3d639192016-09-09 11:52:26 -07008795 int32_t x_min = frame_settings.find(tag).data.i32[0];
8796 int32_t y_min = frame_settings.find(tag).data.i32[1];
8797 int32_t x_max = frame_settings.find(tag).data.i32[2];
8798 int32_t y_max = frame_settings.find(tag).data.i32[3];
8799 roi.weight = frame_settings.find(tag).data.i32[4];
8800 roi.rect.left = x_min;
8801 roi.rect.top = y_min;
8802 roi.rect.width = x_max - x_min;
8803 roi.rect.height = y_max - y_min;
8804}
8805
8806/*===========================================================================
8807 * FUNCTION : resetIfNeededROI
8808 *
8809 * DESCRIPTION: helper method to reset the roi if it is greater than scaler
8810 * crop region
8811 *
8812 * PARAMETERS :
8813 * @roi : cam_area_t struct to resize
8814 * @scalerCropRegion : cam_crop_region_t region to compare against
8815 *
8816 *
8817 *==========================================================================*/
8818bool QCamera3HardwareInterface::resetIfNeededROI(cam_area_t* roi,
8819 const cam_crop_region_t* scalerCropRegion)
8820{
8821 int32_t roi_x_max = roi->rect.width + roi->rect.left;
8822 int32_t roi_y_max = roi->rect.height + roi->rect.top;
8823 int32_t crop_x_max = scalerCropRegion->width + scalerCropRegion->left;
8824 int32_t crop_y_max = scalerCropRegion->height + scalerCropRegion->top;
8825
8826 /* According to spec weight = 0 is used to indicate roi needs to be disabled
8827 * without having this check the calculations below to validate if the roi
8828 * is inside scalar crop region will fail resulting in the roi not being
8829 * reset causing algorithm to continue to use stale roi window
8830 */
8831 if (roi->weight == 0) {
8832 return true;
8833 }
8834
8835 if ((roi_x_max < scalerCropRegion->left) ||
8836 // right edge of roi window is left of scalar crop's left edge
8837 (roi_y_max < scalerCropRegion->top) ||
8838 // bottom edge of roi window is above scalar crop's top edge
8839 (roi->rect.left > crop_x_max) ||
8840 // left edge of roi window is beyond(right) of scalar crop's right edge
8841 (roi->rect.top > crop_y_max)){
8842 // top edge of roi windo is above scalar crop's top edge
8843 return false;
8844 }
8845 if (roi->rect.left < scalerCropRegion->left) {
8846 roi->rect.left = scalerCropRegion->left;
8847 }
8848 if (roi->rect.top < scalerCropRegion->top) {
8849 roi->rect.top = scalerCropRegion->top;
8850 }
8851 if (roi_x_max > crop_x_max) {
8852 roi_x_max = crop_x_max;
8853 }
8854 if (roi_y_max > crop_y_max) {
8855 roi_y_max = crop_y_max;
8856 }
8857 roi->rect.width = roi_x_max - roi->rect.left;
8858 roi->rect.height = roi_y_max - roi->rect.top;
8859 return true;
8860}
8861
8862/*===========================================================================
8863 * FUNCTION : convertLandmarks
8864 *
8865 * DESCRIPTION: helper method to extract the landmarks from face detection info
8866 *
8867 * PARAMETERS :
8868 * @landmark_data : input landmark data to be converted
8869 * @landmarks : int32_t destination array
8870 *
8871 *
8872 *==========================================================================*/
8873void QCamera3HardwareInterface::convertLandmarks(
8874 cam_face_landmarks_info_t landmark_data,
8875 int32_t *landmarks)
8876{
Thierry Strudel04e026f2016-10-10 11:27:36 -07008877 if (landmark_data.is_left_eye_valid) {
8878 landmarks[LEFT_EYE_X] = (int32_t)landmark_data.left_eye_center.x;
8879 landmarks[LEFT_EYE_Y] = (int32_t)landmark_data.left_eye_center.y;
8880 } else {
8881 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8882 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8883 }
8884
8885 if (landmark_data.is_right_eye_valid) {
8886 landmarks[RIGHT_EYE_X] = (int32_t)landmark_data.right_eye_center.x;
8887 landmarks[RIGHT_EYE_Y] = (int32_t)landmark_data.right_eye_center.y;
8888 } else {
8889 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8890 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8891 }
8892
8893 if (landmark_data.is_mouth_valid) {
8894 landmarks[MOUTH_X] = (int32_t)landmark_data.mouth_center.x;
8895 landmarks[MOUTH_Y] = (int32_t)landmark_data.mouth_center.y;
8896 } else {
8897 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8898 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
8899 }
8900}
8901
8902/*===========================================================================
8903 * FUNCTION : setInvalidLandmarks
8904 *
8905 * DESCRIPTION: helper method to set invalid landmarks
8906 *
8907 * PARAMETERS :
8908 * @landmarks : int32_t destination array
8909 *
8910 *
8911 *==========================================================================*/
8912void QCamera3HardwareInterface::setInvalidLandmarks(
8913 int32_t *landmarks)
8914{
8915 landmarks[LEFT_EYE_X] = FACE_INVALID_POINT;
8916 landmarks[LEFT_EYE_Y] = FACE_INVALID_POINT;
8917 landmarks[RIGHT_EYE_X] = FACE_INVALID_POINT;
8918 landmarks[RIGHT_EYE_Y] = FACE_INVALID_POINT;
8919 landmarks[MOUTH_X] = FACE_INVALID_POINT;
8920 landmarks[MOUTH_Y] = FACE_INVALID_POINT;
Thierry Strudel3d639192016-09-09 11:52:26 -07008921}
8922
8923#define DATA_PTR(MEM_OBJ,INDEX) MEM_OBJ->getPtr( INDEX )
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07008924
8925/*===========================================================================
8926 * FUNCTION : getCapabilities
8927 *
8928 * DESCRIPTION: query camera capability from back-end
8929 *
8930 * PARAMETERS :
8931 * @ops : mm-interface ops structure
8932 * @cam_handle : camera handle for which we need capability
8933 *
8934 * RETURN : ptr type of capability structure
8935 * capability for success
8936 * NULL for failure
8937 *==========================================================================*/
8938cam_capability_t *QCamera3HardwareInterface::getCapabilities(mm_camera_ops_t *ops,
8939 uint32_t cam_handle)
8940{
8941 int rc = NO_ERROR;
8942 QCamera3HeapMemory *capabilityHeap = NULL;
8943 cam_capability_t *cap_ptr = NULL;
8944
8945 if (ops == NULL) {
8946 LOGE("Invalid arguments");
8947 return NULL;
8948 }
8949
8950 capabilityHeap = new QCamera3HeapMemory(1);
8951 if (capabilityHeap == NULL) {
8952 LOGE("creation of capabilityHeap failed");
8953 return NULL;
8954 }
8955
8956 /* Allocate memory for capability buffer */
8957 rc = capabilityHeap->allocate(sizeof(cam_capability_t));
8958 if(rc != OK) {
8959 LOGE("No memory for cappability");
8960 goto allocate_failed;
8961 }
8962
8963 /* Map memory for capability buffer */
8964 memset(DATA_PTR(capabilityHeap,0), 0, sizeof(cam_capability_t));
8965
8966 rc = ops->map_buf(cam_handle,
8967 CAM_MAPPING_BUF_TYPE_CAPABILITY, capabilityHeap->getFd(0),
8968 sizeof(cam_capability_t), capabilityHeap->getPtr(0));
8969 if(rc < 0) {
8970 LOGE("failed to map capability buffer");
8971 rc = FAILED_TRANSACTION;
8972 goto map_failed;
8973 }
8974
8975 /* Query Capability */
8976 rc = ops->query_capability(cam_handle);
8977 if(rc < 0) {
8978 LOGE("failed to query capability");
8979 rc = FAILED_TRANSACTION;
8980 goto query_failed;
8981 }
8982
8983 cap_ptr = (cam_capability_t *)malloc(sizeof(cam_capability_t));
8984 if (cap_ptr == NULL) {
8985 LOGE("out of memory");
8986 rc = NO_MEMORY;
8987 goto query_failed;
8988 }
8989
8990 memset(cap_ptr, 0, sizeof(cam_capability_t));
8991 memcpy(cap_ptr, DATA_PTR(capabilityHeap, 0), sizeof(cam_capability_t));
8992
8993 int index;
8994 for (index = 0; index < CAM_ANALYSIS_INFO_MAX; index++) {
8995 cam_analysis_info_t *p_analysis_info = &cap_ptr->analysis_info[index];
8996 p_analysis_info->analysis_padding_info.offset_info.offset_x = 0;
8997 p_analysis_info->analysis_padding_info.offset_info.offset_y = 0;
8998 }
8999
9000query_failed:
9001 ops->unmap_buf(cam_handle, CAM_MAPPING_BUF_TYPE_CAPABILITY);
9002map_failed:
9003 capabilityHeap->deallocate();
9004allocate_failed:
9005 delete capabilityHeap;
9006
9007 if (rc != NO_ERROR) {
9008 return NULL;
9009 } else {
9010 return cap_ptr;
9011 }
9012}
9013
Thierry Strudel3d639192016-09-09 11:52:26 -07009014/*===========================================================================
9015 * FUNCTION : initCapabilities
9016 *
9017 * DESCRIPTION: initialize camera capabilities in static data struct
9018 *
9019 * PARAMETERS :
9020 * @cameraId : camera Id
9021 *
9022 * RETURN : int32_t type of status
9023 * NO_ERROR -- success
9024 * none-zero failure code
9025 *==========================================================================*/
9026int QCamera3HardwareInterface::initCapabilities(uint32_t cameraId)
9027{
9028 int rc = 0;
9029 mm_camera_vtbl_t *cameraHandle = NULL;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07009030 uint32_t handle = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -07009031
9032 rc = camera_open((uint8_t)cameraId, &cameraHandle);
9033 if (rc) {
9034 LOGE("camera_open failed. rc = %d", rc);
9035 goto open_failed;
9036 }
9037 if (!cameraHandle) {
9038 LOGE("camera_open failed. cameraHandle = %p", cameraHandle);
9039 goto open_failed;
9040 }
9041
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07009042 handle = get_main_camera_handle(cameraHandle->camera_handle);
9043 gCamCapability[cameraId] = getCapabilities(cameraHandle->ops, handle);
9044 if (gCamCapability[cameraId] == NULL) {
9045 rc = FAILED_TRANSACTION;
9046 goto failed_op;
Thierry Strudel3d639192016-09-09 11:52:26 -07009047 }
9048
Thierry Strudel295a0ca2016-11-03 18:38:47 -07009049 gCamCapability[cameraId]->camera_index = cameraId;
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07009050 if (is_dual_camera_by_idx(cameraId)) {
9051 handle = get_aux_camera_handle(cameraHandle->camera_handle);
9052 gCamCapability[cameraId]->aux_cam_cap =
9053 getCapabilities(cameraHandle->ops, handle);
9054 if (gCamCapability[cameraId]->aux_cam_cap == NULL) {
9055 rc = FAILED_TRANSACTION;
9056 free(gCamCapability[cameraId]);
9057 goto failed_op;
9058 }
Thierry Strudelc2ee3302016-11-17 12:33:12 -08009059
9060 // Copy the main camera capability to main_cam_cap struct
9061 gCamCapability[cameraId]->main_cam_cap =
9062 (cam_capability_t *)malloc(sizeof(cam_capability_t));
9063 if (gCamCapability[cameraId]->main_cam_cap == NULL) {
9064 LOGE("out of memory");
9065 rc = NO_MEMORY;
9066 goto failed_op;
9067 }
9068 memcpy(gCamCapability[cameraId]->main_cam_cap, gCamCapability[cameraId],
9069 sizeof(cam_capability_t));
Thierry Strudel3d639192016-09-09 11:52:26 -07009070 }
Thierry Strudelcca4d9c2016-10-20 08:25:53 -07009071failed_op:
Thierry Strudel3d639192016-09-09 11:52:26 -07009072 cameraHandle->ops->close_camera(cameraHandle->camera_handle);
9073 cameraHandle = NULL;
9074open_failed:
9075 return rc;
9076}
9077
9078/*==========================================================================
9079 * FUNCTION : get3Aversion
9080 *
9081 * DESCRIPTION: get the Q3A S/W version
9082 *
9083 * PARAMETERS :
9084 * @sw_version: Reference of Q3A structure which will hold version info upon
9085 * return
9086 *
9087 * RETURN : None
9088 *
9089 *==========================================================================*/
9090void QCamera3HardwareInterface::get3AVersion(cam_q3a_version_t &sw_version)
9091{
9092 if(gCamCapability[mCameraId])
9093 sw_version = gCamCapability[mCameraId]->q3a_version;
9094 else
9095 LOGE("Capability structure NULL!");
9096}
9097
9098
9099/*===========================================================================
9100 * FUNCTION : initParameters
9101 *
9102 * DESCRIPTION: initialize camera parameters
9103 *
9104 * PARAMETERS :
9105 *
9106 * RETURN : int32_t type of status
9107 * NO_ERROR -- success
9108 * none-zero failure code
9109 *==========================================================================*/
9110int QCamera3HardwareInterface::initParameters()
9111{
9112 int rc = 0;
9113
9114 //Allocate Set Param Buffer
9115 mParamHeap = new QCamera3HeapMemory(1);
9116 rc = mParamHeap->allocate(sizeof(metadata_buffer_t));
9117 if(rc != OK) {
9118 rc = NO_MEMORY;
9119 LOGE("Failed to allocate SETPARM Heap memory");
9120 delete mParamHeap;
9121 mParamHeap = NULL;
9122 return rc;
9123 }
9124
9125 //Map memory for parameters buffer
9126 rc = mCameraHandle->ops->map_buf(mCameraHandle->camera_handle,
9127 CAM_MAPPING_BUF_TYPE_PARM_BUF,
9128 mParamHeap->getFd(0),
9129 sizeof(metadata_buffer_t),
9130 (metadata_buffer_t *) DATA_PTR(mParamHeap,0));
9131 if(rc < 0) {
9132 LOGE("failed to map SETPARM buffer");
9133 rc = FAILED_TRANSACTION;
9134 mParamHeap->deallocate();
9135 delete mParamHeap;
9136 mParamHeap = NULL;
9137 return rc;
9138 }
9139
9140 mParameters = (metadata_buffer_t *) DATA_PTR(mParamHeap,0);
9141
9142 mPrevParameters = (metadata_buffer_t *)malloc(sizeof(metadata_buffer_t));
9143 return rc;
9144}
9145
9146/*===========================================================================
9147 * FUNCTION : deinitParameters
9148 *
9149 * DESCRIPTION: de-initialize camera parameters
9150 *
9151 * PARAMETERS :
9152 *
9153 * RETURN : NONE
9154 *==========================================================================*/
9155void QCamera3HardwareInterface::deinitParameters()
9156{
9157 mCameraHandle->ops->unmap_buf(mCameraHandle->camera_handle,
9158 CAM_MAPPING_BUF_TYPE_PARM_BUF);
9159
9160 mParamHeap->deallocate();
9161 delete mParamHeap;
9162 mParamHeap = NULL;
9163
9164 mParameters = NULL;
9165
9166 free(mPrevParameters);
9167 mPrevParameters = NULL;
9168}
9169
9170/*===========================================================================
9171 * FUNCTION : calcMaxJpegSize
9172 *
9173 * DESCRIPTION: Calculates maximum jpeg size supported by the cameraId
9174 *
9175 * PARAMETERS :
9176 *
9177 * RETURN : max_jpeg_size
9178 *==========================================================================*/
9179size_t QCamera3HardwareInterface::calcMaxJpegSize(uint32_t camera_id)
9180{
9181 size_t max_jpeg_size = 0;
9182 size_t temp_width, temp_height;
9183 size_t count = MIN(gCamCapability[camera_id]->picture_sizes_tbl_cnt,
9184 MAX_SIZES_CNT);
9185 for (size_t i = 0; i < count; i++) {
9186 temp_width = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].width;
9187 temp_height = (size_t)gCamCapability[camera_id]->picture_sizes_tbl[i].height;
9188 if (temp_width * temp_height > max_jpeg_size ) {
9189 max_jpeg_size = temp_width * temp_height;
9190 }
9191 }
9192 max_jpeg_size = max_jpeg_size * 3/2 + sizeof(camera3_jpeg_blob_t);
9193 return max_jpeg_size;
9194}
9195
9196/*===========================================================================
9197 * FUNCTION : getMaxRawSize
9198 *
9199 * DESCRIPTION: Fetches maximum raw size supported by the cameraId
9200 *
9201 * PARAMETERS :
9202 *
9203 * RETURN : Largest supported Raw Dimension
9204 *==========================================================================*/
9205cam_dimension_t QCamera3HardwareInterface::getMaxRawSize(uint32_t camera_id)
9206{
9207 int max_width = 0;
9208 cam_dimension_t maxRawSize;
9209
9210 memset(&maxRawSize, 0, sizeof(cam_dimension_t));
9211 for (size_t i = 0; i < gCamCapability[camera_id]->supported_raw_dim_cnt; i++) {
9212 if (max_width < gCamCapability[camera_id]->raw_dim[i].width) {
9213 max_width = gCamCapability[camera_id]->raw_dim[i].width;
9214 maxRawSize = gCamCapability[camera_id]->raw_dim[i];
9215 }
9216 }
9217 return maxRawSize;
9218}
9219
9220
9221/*===========================================================================
9222 * FUNCTION : calcMaxJpegDim
9223 *
9224 * DESCRIPTION: Calculates maximum jpeg dimension supported by the cameraId
9225 *
9226 * PARAMETERS :
9227 *
9228 * RETURN : max_jpeg_dim
9229 *==========================================================================*/
9230cam_dimension_t QCamera3HardwareInterface::calcMaxJpegDim()
9231{
9232 cam_dimension_t max_jpeg_dim;
9233 cam_dimension_t curr_jpeg_dim;
9234 max_jpeg_dim.width = 0;
9235 max_jpeg_dim.height = 0;
9236 curr_jpeg_dim.width = 0;
9237 curr_jpeg_dim.height = 0;
9238 for (size_t i = 0; i < gCamCapability[mCameraId]->picture_sizes_tbl_cnt; i++) {
9239 curr_jpeg_dim.width = gCamCapability[mCameraId]->picture_sizes_tbl[i].width;
9240 curr_jpeg_dim.height = gCamCapability[mCameraId]->picture_sizes_tbl[i].height;
9241 if (curr_jpeg_dim.width * curr_jpeg_dim.height >
9242 max_jpeg_dim.width * max_jpeg_dim.height ) {
9243 max_jpeg_dim.width = curr_jpeg_dim.width;
9244 max_jpeg_dim.height = curr_jpeg_dim.height;
9245 }
9246 }
9247 return max_jpeg_dim;
9248}
9249
9250/*===========================================================================
9251 * FUNCTION : addStreamConfig
9252 *
9253 * DESCRIPTION: adds the stream configuration to the array
9254 *
9255 * PARAMETERS :
9256 * @available_stream_configs : pointer to stream configuration array
9257 * @scalar_format : scalar format
9258 * @dim : configuration dimension
9259 * @config_type : input or output configuration type
9260 *
9261 * RETURN : NONE
9262 *==========================================================================*/
9263void QCamera3HardwareInterface::addStreamConfig(Vector<int32_t> &available_stream_configs,
9264 int32_t scalar_format, const cam_dimension_t &dim, int32_t config_type)
9265{
9266 available_stream_configs.add(scalar_format);
9267 available_stream_configs.add(dim.width);
9268 available_stream_configs.add(dim.height);
9269 available_stream_configs.add(config_type);
9270}
9271
9272/*===========================================================================
9273 * FUNCTION : suppportBurstCapture
9274 *
9275 * DESCRIPTION: Whether a particular camera supports BURST_CAPTURE
9276 *
9277 * PARAMETERS :
9278 * @cameraId : camera Id
9279 *
9280 * RETURN : true if camera supports BURST_CAPTURE
9281 * false otherwise
9282 *==========================================================================*/
9283bool QCamera3HardwareInterface::supportBurstCapture(uint32_t cameraId)
9284{
9285 const int64_t highResDurationBound = 50000000; // 50 ms, 20 fps
9286 const int64_t fullResDurationBound = 100000000; // 100 ms, 10 fps
9287 const int32_t highResWidth = 3264;
9288 const int32_t highResHeight = 2448;
9289
9290 if (gCamCapability[cameraId]->picture_min_duration[0] > fullResDurationBound) {
9291 // Maximum resolution images cannot be captured at >= 10fps
9292 // -> not supporting BURST_CAPTURE
9293 return false;
9294 }
9295
9296 if (gCamCapability[cameraId]->picture_min_duration[0] <= highResDurationBound) {
9297 // Maximum resolution images can be captured at >= 20fps
9298 // --> supporting BURST_CAPTURE
9299 return true;
9300 }
9301
9302 // Find the smallest highRes resolution, or largest resolution if there is none
9303 size_t totalCnt = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt,
9304 MAX_SIZES_CNT);
9305 size_t highRes = 0;
9306 while ((highRes + 1 < totalCnt) &&
9307 (gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].width *
9308 gCamCapability[cameraId]->picture_sizes_tbl[highRes+1].height >=
9309 highResWidth * highResHeight)) {
9310 highRes++;
9311 }
9312 if (gCamCapability[cameraId]->picture_min_duration[highRes] <= highResDurationBound) {
9313 return true;
9314 } else {
9315 return false;
9316 }
9317}
9318
9319/*===========================================================================
Emilian Peev0f3c3162017-03-15 12:57:46 +00009320 * FUNCTION : getPDStatIndex
9321 *
9322 * DESCRIPTION: Return the meta raw phase detection statistics index if present
9323 *
9324 * PARAMETERS :
9325 * @caps : camera capabilities
9326 *
9327 * RETURN : int32_t type
9328 * non-negative - on success
9329 * -1 - on failure
9330 *==========================================================================*/
9331int32_t QCamera3HardwareInterface::getPDStatIndex(cam_capability_t *caps) {
9332 if (nullptr == caps) {
9333 return -1;
9334 }
9335
9336 uint32_t metaRawCount = caps->meta_raw_channel_count;
9337 int32_t ret = -1;
9338 for (size_t i = 0; i < metaRawCount; i++) {
9339 if (CAM_FORMAT_SUBTYPE_PDAF_STATS == caps->sub_fmt[i]) {
9340 ret = i;
9341 break;
9342 }
9343 }
9344
9345 return ret;
9346}
9347
9348/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -07009349 * FUNCTION : initStaticMetadata
9350 *
9351 * DESCRIPTION: initialize the static metadata
9352 *
9353 * PARAMETERS :
9354 * @cameraId : camera Id
9355 *
9356 * RETURN : int32_t type of status
9357 * 0 -- success
9358 * non-zero failure code
9359 *==========================================================================*/
9360int QCamera3HardwareInterface::initStaticMetadata(uint32_t cameraId)
9361{
9362 int rc = 0;
9363 CameraMetadata staticInfo;
9364 size_t count = 0;
9365 bool limitedDevice = false;
9366 char prop[PROPERTY_VALUE_MAX];
9367 bool supportBurst = false;
9368
9369 supportBurst = supportBurstCapture(cameraId);
9370
9371 /* If sensor is YUV sensor (no raw support) or if per-frame control is not
9372 * guaranteed or if min fps of max resolution is less than 20 fps, its
9373 * advertised as limited device*/
9374 limitedDevice = gCamCapability[cameraId]->no_per_frame_control_support ||
9375 (CAM_SENSOR_YUV == gCamCapability[cameraId]->sensor_type.sens_type) ||
9376 (CAM_SENSOR_MONO == gCamCapability[cameraId]->sensor_type.sens_type) ||
9377 !supportBurst;
9378
9379 uint8_t supportedHwLvl = limitedDevice ?
9380 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_LIMITED :
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009381#ifndef USE_HAL_3_3
9382 // LEVEL_3 - This device will support level 3.
9383 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_3;
9384#else
Thierry Strudel3d639192016-09-09 11:52:26 -07009385 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL_FULL;
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009386#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009387
9388 staticInfo.update(ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
9389 &supportedHwLvl, 1);
9390
9391 bool facingBack = false;
9392 if ((gCamCapability[cameraId]->position == CAM_POSITION_BACK) ||
9393 (gCamCapability[cameraId]->position == CAM_POSITION_BACK_AUX)) {
9394 facingBack = true;
9395 }
9396 /*HAL 3 only*/
9397 staticInfo.update(ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
9398 &gCamCapability[cameraId]->min_focus_distance, 1);
9399
9400 staticInfo.update(ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE,
9401 &gCamCapability[cameraId]->hyper_focal_distance, 1);
9402
9403 /*should be using focal lengths but sensor doesn't provide that info now*/
9404 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
9405 &gCamCapability[cameraId]->focal_length,
9406 1);
9407
9408 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_APERTURES,
9409 gCamCapability[cameraId]->apertures,
9410 MIN(CAM_APERTURES_MAX, gCamCapability[cameraId]->apertures_count));
9411
9412 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
9413 gCamCapability[cameraId]->filter_densities,
9414 MIN(CAM_FILTER_DENSITIES_MAX, gCamCapability[cameraId]->filter_densities_count));
9415
9416
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009417 uint8_t available_opt_stab_modes[CAM_OPT_STAB_MAX];
9418 size_t mode_count =
9419 MIN((size_t)CAM_OPT_STAB_MAX, gCamCapability[cameraId]->optical_stab_modes_count);
9420 for (size_t i = 0; i < mode_count; i++) {
9421 available_opt_stab_modes[i] = gCamCapability[cameraId]->optical_stab_modes[i];
9422 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009423 staticInfo.update(ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
Zhijun Hea6ea1d32017-03-10 13:30:00 -08009424 available_opt_stab_modes, mode_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009425
9426 int32_t lens_shading_map_size[] = {
9427 MIN(CAM_MAX_SHADING_MAP_WIDTH, gCamCapability[cameraId]->lens_shading_map_size.width),
9428 MIN(CAM_MAX_SHADING_MAP_HEIGHT, gCamCapability[cameraId]->lens_shading_map_size.height)};
9429 staticInfo.update(ANDROID_LENS_INFO_SHADING_MAP_SIZE,
9430 lens_shading_map_size,
9431 sizeof(lens_shading_map_size)/sizeof(int32_t));
9432
9433 staticInfo.update(ANDROID_SENSOR_INFO_PHYSICAL_SIZE,
9434 gCamCapability[cameraId]->sensor_physical_size, SENSOR_PHYSICAL_SIZE_CNT);
9435
9436 staticInfo.update(ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE,
9437 gCamCapability[cameraId]->exposure_time_range, EXPOSURE_TIME_RANGE_CNT);
9438
9439 staticInfo.update(ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
9440 &gCamCapability[cameraId]->max_frame_duration, 1);
9441
9442 camera_metadata_rational baseGainFactor = {
9443 gCamCapability[cameraId]->base_gain_factor.numerator,
9444 gCamCapability[cameraId]->base_gain_factor.denominator};
9445 staticInfo.update(ANDROID_SENSOR_BASE_GAIN_FACTOR,
9446 &baseGainFactor, 1);
9447
9448 staticInfo.update(ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
9449 (uint8_t *)&gCamCapability[cameraId]->color_arrangement, 1);
9450
9451 int32_t pixel_array_size[] = {gCamCapability[cameraId]->pixel_array_size.width,
9452 gCamCapability[cameraId]->pixel_array_size.height};
9453 staticInfo.update(ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
9454 pixel_array_size, sizeof(pixel_array_size)/sizeof(pixel_array_size[0]));
9455
9456 int32_t active_array_size[] = {gCamCapability[cameraId]->active_array_size.left,
9457 gCamCapability[cameraId]->active_array_size.top,
9458 gCamCapability[cameraId]->active_array_size.width,
9459 gCamCapability[cameraId]->active_array_size.height};
9460 staticInfo.update(ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
9461 active_array_size, sizeof(active_array_size)/sizeof(active_array_size[0]));
9462
9463 staticInfo.update(ANDROID_SENSOR_INFO_WHITE_LEVEL,
9464 &gCamCapability[cameraId]->white_level, 1);
9465
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009466 int32_t adjusted_bl_per_cfa[BLACK_LEVEL_PATTERN_CNT];
9467 adjustBlackLevelForCFA(gCamCapability[cameraId]->black_level_pattern, adjusted_bl_per_cfa,
9468 gCamCapability[cameraId]->color_arrangement);
Thierry Strudel3d639192016-09-09 11:52:26 -07009469 staticInfo.update(ANDROID_SENSOR_BLACK_LEVEL_PATTERN,
Shuzhen Wanga5da1022016-07-13 20:18:42 -07009470 adjusted_bl_per_cfa, BLACK_LEVEL_PATTERN_CNT);
Thierry Strudel3d639192016-09-09 11:52:26 -07009471
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009472#ifndef USE_HAL_3_3
9473 bool hasBlackRegions = false;
9474 if (gCamCapability[cameraId]->optical_black_region_count > MAX_OPTICAL_BLACK_REGIONS) {
9475 LOGW("black_region_count: %d is bounded to %d",
9476 gCamCapability[cameraId]->optical_black_region_count, MAX_OPTICAL_BLACK_REGIONS);
9477 gCamCapability[cameraId]->optical_black_region_count = MAX_OPTICAL_BLACK_REGIONS;
9478 }
9479 if (gCamCapability[cameraId]->optical_black_region_count != 0) {
9480 int32_t opticalBlackRegions[MAX_OPTICAL_BLACK_REGIONS * 4];
9481 for (size_t i = 0; i < gCamCapability[cameraId]->optical_black_region_count * 4; i++) {
9482 opticalBlackRegions[i] = gCamCapability[cameraId]->optical_black_regions[i];
9483 }
9484 staticInfo.update(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS,
9485 opticalBlackRegions, gCamCapability[cameraId]->optical_black_region_count * 4);
9486 hasBlackRegions = true;
9487 }
9488#endif
Thierry Strudel3d639192016-09-09 11:52:26 -07009489 staticInfo.update(ANDROID_FLASH_INFO_CHARGE_DURATION,
9490 &gCamCapability[cameraId]->flash_charge_duration, 1);
9491
9492 staticInfo.update(ANDROID_TONEMAP_MAX_CURVE_POINTS,
9493 &gCamCapability[cameraId]->max_tone_map_curve_points, 1);
9494
Shuzhen Wang98d5efb2016-09-07 18:08:22 -07009495 uint8_t timestampSource = (gCamCapability[cameraId]->timestamp_calibrated ?
9496 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_REALTIME :
9497 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE_UNKNOWN);
Thierry Strudel3d639192016-09-09 11:52:26 -07009498 staticInfo.update(ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
9499 &timestampSource, 1);
9500
Thierry Strudel54dc9782017-02-15 12:12:10 -08009501 //update histogram vendor data
9502 staticInfo.update(QCAMERA3_HISTOGRAM_BUCKETS,
Thierry Strudel3d639192016-09-09 11:52:26 -07009503 &gCamCapability[cameraId]->histogram_size, 1);
9504
Thierry Strudel54dc9782017-02-15 12:12:10 -08009505 staticInfo.update(QCAMERA3_HISTOGRAM_MAX_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -07009506 &gCamCapability[cameraId]->max_histogram_count, 1);
9507
Shuzhen Wang14415f52016-11-16 18:26:18 -08009508 //Set supported bins to be {max_bins, max_bins/2, max_bins/4, ...}
9509 //so that app can request fewer number of bins than the maximum supported.
9510 std::vector<int32_t> histBins;
9511 int32_t maxHistBins = gCamCapability[cameraId]->max_histogram_count;
9512 histBins.push_back(maxHistBins);
9513 while ((maxHistBins >> 1) >= MIN_CAM_HISTOGRAM_STATS_SIZE &&
9514 (maxHistBins & 0x1) == 0) {
9515 histBins.push_back(maxHistBins >> 1);
9516 maxHistBins >>= 1;
9517 }
9518 staticInfo.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_SUPPORTED_BINS,
9519 histBins.data(), histBins.size());
9520
Thierry Strudel3d639192016-09-09 11:52:26 -07009521 int32_t sharpness_map_size[] = {
9522 gCamCapability[cameraId]->sharpness_map_size.width,
9523 gCamCapability[cameraId]->sharpness_map_size.height};
9524
9525 staticInfo.update(ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE,
9526 sharpness_map_size, sizeof(sharpness_map_size)/sizeof(int32_t));
9527
9528 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
9529 &gCamCapability[cameraId]->max_sharpness_map_value, 1);
9530
Emilian Peev0f3c3162017-03-15 12:57:46 +00009531 int32_t indexPD = getPDStatIndex(gCamCapability[cameraId]);
9532 if (0 <= indexPD) {
9533 // Advertise PD stats data as part of the Depth capabilities
9534 int32_t depthWidth =
9535 gCamCapability[cameraId]->raw_meta_dim[indexPD].width;
9536 int32_t depthHeight =
9537 gCamCapability[cameraId]->raw_meta_dim[indexPD].height;
Emilian Peev656e4fa2017-06-02 16:47:04 +01009538 int32_t depthStride =
9539 gCamCapability[cameraId]->raw_meta_dim[indexPD].width * 2;
Emilian Peev0f3c3162017-03-15 12:57:46 +00009540 int32_t depthSamplesCount = (depthWidth * depthHeight * 2) / 16;
9541 assert(0 < depthSamplesCount);
9542 staticInfo.update(ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
9543 &depthSamplesCount, 1);
9544
9545 int32_t depthConfigs[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9546 depthHeight,
9547 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT,
9548 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1,
9549 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT};
9550 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
9551 depthConfigs, sizeof(depthConfigs)/sizeof(depthConfigs[0]));
9552
9553 int64_t depthMinDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9554 depthHeight, 33333333,
9555 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 33333333};
9556 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
9557 depthMinDuration,
9558 sizeof(depthMinDuration) / sizeof(depthMinDuration[0]));
9559
9560 int64_t depthStallDuration[] = {HAL_PIXEL_FORMAT_RAW16, depthWidth,
9561 depthHeight, 0,
9562 HAL_PIXEL_FORMAT_BLOB, depthSamplesCount, 1, 0};
9563 staticInfo.update(ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
9564 depthStallDuration,
9565 sizeof(depthStallDuration) / sizeof(depthStallDuration[0]));
9566
9567 uint8_t depthExclusive = ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE_FALSE;
9568 staticInfo.update(ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE, &depthExclusive, 1);
Emilian Peev656e4fa2017-06-02 16:47:04 +01009569
9570 int32_t pd_dimensions [] = {depthWidth, depthHeight, depthStride};
9571 staticInfo.update(NEXUS_EXPERIMENTAL_2017_PD_DATA_DIMENSIONS,
9572 pd_dimensions, sizeof(pd_dimensions) / sizeof(pd_dimensions[0]));
Emilian Peev835938b2017-08-31 16:59:54 +01009573
9574 staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_PDAF_CALIB_RIGHT_GAINS,
9575 reinterpret_cast<uint8_t *>(gCamCapability[cameraId]->pdaf_cal.right_gain_map),
9576 sizeof(gCamCapability[cameraId]->pdaf_cal.right_gain_map));
9577
9578 staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_PDAF_CALIB_LEFT_GAINS,
9579 reinterpret_cast<uint8_t *>(gCamCapability[cameraId]->pdaf_cal.left_gain_map),
9580 sizeof(gCamCapability[cameraId]->pdaf_cal.left_gain_map));
9581
9582 staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_PDAF_CALIB_CONV_COEFF,
9583 reinterpret_cast<uint8_t *>(gCamCapability[cameraId]->pdaf_cal.conversion_coeff),
9584 sizeof(gCamCapability[cameraId]->pdaf_cal.conversion_coeff));
Emilian Peev0f3c3162017-03-15 12:57:46 +00009585 }
9586
Thierry Strudel3d639192016-09-09 11:52:26 -07009587 int32_t scalar_formats[] = {
9588 ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE,
9589 ANDROID_SCALER_AVAILABLE_FORMATS_RAW16,
9590 ANDROID_SCALER_AVAILABLE_FORMATS_YCbCr_420_888,
9591 ANDROID_SCALER_AVAILABLE_FORMATS_BLOB,
9592 HAL_PIXEL_FORMAT_RAW10,
9593 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED};
Emilian Peev0f3c3162017-03-15 12:57:46 +00009594 size_t scalar_formats_count = sizeof(scalar_formats) / sizeof(scalar_formats[0]);
9595 staticInfo.update(ANDROID_SCALER_AVAILABLE_FORMATS, scalar_formats,
9596 scalar_formats_count);
Thierry Strudel3d639192016-09-09 11:52:26 -07009597
9598 int32_t available_processed_sizes[MAX_SIZES_CNT * 2];
9599 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9600 makeTable(gCamCapability[cameraId]->picture_sizes_tbl,
9601 count, MAX_SIZES_CNT, available_processed_sizes);
9602 staticInfo.update(ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES,
9603 available_processed_sizes, count * 2);
9604
9605 int32_t available_raw_sizes[MAX_SIZES_CNT * 2];
9606 count = MIN(gCamCapability[cameraId]->supported_raw_dim_cnt, MAX_SIZES_CNT);
9607 makeTable(gCamCapability[cameraId]->raw_dim,
9608 count, MAX_SIZES_CNT, available_raw_sizes);
9609 staticInfo.update(ANDROID_SCALER_AVAILABLE_RAW_SIZES,
9610 available_raw_sizes, count * 2);
9611
9612 int32_t available_fps_ranges[MAX_SIZES_CNT * 2];
9613 count = MIN(gCamCapability[cameraId]->fps_ranges_tbl_cnt, MAX_SIZES_CNT);
9614 makeFPSTable(gCamCapability[cameraId]->fps_ranges_tbl,
9615 count, MAX_SIZES_CNT, available_fps_ranges);
9616 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
9617 available_fps_ranges, count * 2);
9618
9619 camera_metadata_rational exposureCompensationStep = {
9620 gCamCapability[cameraId]->exp_compensation_step.numerator,
9621 gCamCapability[cameraId]->exp_compensation_step.denominator};
9622 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_STEP,
9623 &exposureCompensationStep, 1);
9624
9625 Vector<uint8_t> availableVstabModes;
9626 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF);
9627 char eis_prop[PROPERTY_VALUE_MAX];
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009628 bool eisSupported = false;
Thierry Strudel3d639192016-09-09 11:52:26 -07009629 memset(eis_prop, 0, sizeof(eis_prop));
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009630 property_get("persist.camera.eis.enable", eis_prop, "1");
Thierry Strudel3d639192016-09-09 11:52:26 -07009631 uint8_t eis_prop_set = (uint8_t)atoi(eis_prop);
Thierry Strudel9e74aae2016-09-22 17:10:18 -07009632 count = IS_TYPE_MAX;
9633 count = MIN(gCamCapability[cameraId]->supported_is_types_cnt, count);
9634 for (size_t i = 0; i < count; i++) {
9635 if ((gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_2_0) ||
9636 (gCamCapability[cameraId]->supported_is_types[i] == IS_TYPE_EIS_3_0)) {
9637 eisSupported = true;
9638 break;
9639 }
9640 }
9641 if (facingBack && eis_prop_set && eisSupported) {
Thierry Strudel3d639192016-09-09 11:52:26 -07009642 availableVstabModes.add(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_ON);
9643 }
9644 staticInfo.update(ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
9645 availableVstabModes.array(), availableVstabModes.size());
9646
9647 /*HAL 1 and HAL 3 common*/
9648 uint32_t zoomSteps = gCamCapability[cameraId]->zoom_ratio_tbl_cnt;
9649 uint32_t maxZoomStep = gCamCapability[cameraId]->zoom_ratio_tbl[zoomSteps - 1];
9650 uint32_t minZoomStep = 100; //as per HAL1/API1 spec
Zhijun He2a5df222017-04-04 18:20:38 -07009651 // Cap the max zoom to the max preferred value
9652 float maxZoom = MIN(maxZoomStep/minZoomStep, MAX_PREFERRED_ZOOM_RATIO);
Thierry Strudel3d639192016-09-09 11:52:26 -07009653 staticInfo.update(ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
9654 &maxZoom, 1);
9655
9656 uint8_t croppingType = ANDROID_SCALER_CROPPING_TYPE_CENTER_ONLY;
9657 staticInfo.update(ANDROID_SCALER_CROPPING_TYPE, &croppingType, 1);
9658
9659 int32_t max3aRegions[3] = {/*AE*/1,/*AWB*/ 0,/*AF*/ 1};
9660 if (gCamCapability[cameraId]->supported_focus_modes_cnt == 1)
9661 max3aRegions[2] = 0; /* AF not supported */
9662 staticInfo.update(ANDROID_CONTROL_MAX_REGIONS,
9663 max3aRegions, 3);
9664
9665 /* 0: OFF, 1: OFF+SIMPLE, 2: OFF+FULL, 3: OFF+SIMPLE+FULL */
9666 memset(prop, 0, sizeof(prop));
9667 property_get("persist.camera.facedetect", prop, "1");
9668 uint8_t supportedFaceDetectMode = (uint8_t)atoi(prop);
9669 LOGD("Support face detection mode: %d",
9670 supportedFaceDetectMode);
9671
9672 int32_t maxFaces = gCamCapability[cameraId]->max_num_roi;
Thierry Strudel04e026f2016-10-10 11:27:36 -07009673 /* support mode should be OFF if max number of face is 0 */
9674 if (maxFaces <= 0) {
9675 supportedFaceDetectMode = 0;
9676 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009677 Vector<uint8_t> availableFaceDetectModes;
9678 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_OFF);
9679 if (supportedFaceDetectMode == 1) {
9680 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9681 } else if (supportedFaceDetectMode == 2) {
9682 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9683 } else if (supportedFaceDetectMode == 3) {
9684 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_SIMPLE);
9685 availableFaceDetectModes.add(ANDROID_STATISTICS_FACE_DETECT_MODE_FULL);
9686 } else {
9687 maxFaces = 0;
9688 }
9689 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
9690 availableFaceDetectModes.array(),
9691 availableFaceDetectModes.size());
9692 staticInfo.update(ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
9693 (int32_t *)&maxFaces, 1);
Thierry Strudel54dc9782017-02-15 12:12:10 -08009694 uint8_t face_bsgc = gCamCapability[cameraId]->face_bsgc;
9695 staticInfo.update(QCAMERA3_STATS_BSGC_AVAILABLE,
9696 &face_bsgc, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -07009697
9698 int32_t exposureCompensationRange[] = {
9699 gCamCapability[cameraId]->exposure_compensation_min,
9700 gCamCapability[cameraId]->exposure_compensation_max};
9701 staticInfo.update(ANDROID_CONTROL_AE_COMPENSATION_RANGE,
9702 exposureCompensationRange,
9703 sizeof(exposureCompensationRange)/sizeof(int32_t));
9704
9705 uint8_t lensFacing = (facingBack) ?
9706 ANDROID_LENS_FACING_BACK : ANDROID_LENS_FACING_FRONT;
9707 staticInfo.update(ANDROID_LENS_FACING, &lensFacing, 1);
9708
9709 staticInfo.update(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
9710 available_thumbnail_sizes,
9711 sizeof(available_thumbnail_sizes)/sizeof(int32_t));
9712
9713 /*all sizes will be clubbed into this tag*/
9714 count = MIN(gCamCapability[cameraId]->picture_sizes_tbl_cnt, MAX_SIZES_CNT);
9715 /*android.scaler.availableStreamConfigurations*/
9716 Vector<int32_t> available_stream_configs;
9717 cam_dimension_t active_array_dim;
9718 active_array_dim.width = gCamCapability[cameraId]->active_array_size.width;
9719 active_array_dim.height = gCamCapability[cameraId]->active_array_size.height;
Thierry Strudel2896d122017-02-23 19:18:03 -08009720
9721 /*advertise list of input dimensions supported based on below property.
9722 By default all sizes upto 5MP will be advertised.
9723 Note that the setprop resolution format should be WxH.
9724 e.g: adb shell setprop persist.camera.input.minsize 1280x720
9725 To list all supported sizes, setprop needs to be set with "0x0" */
9726 cam_dimension_t minInputSize = {2592,1944}; //5MP
9727 memset(prop, 0, sizeof(prop));
9728 property_get("persist.camera.input.minsize", prop, "2592x1944");
9729 if (strlen(prop) > 0) {
9730 char *saveptr = NULL;
9731 char *token = strtok_r(prop, "x", &saveptr);
9732 if (token != NULL) {
9733 minInputSize.width = atoi(token);
9734 }
9735 token = strtok_r(NULL, "x", &saveptr);
9736 if (token != NULL) {
9737 minInputSize.height = atoi(token);
9738 }
9739 }
9740
Thierry Strudel3d639192016-09-09 11:52:26 -07009741 /* Add input/output stream configurations for each scalar formats*/
9742 for (size_t j = 0; j < scalar_formats_count; j++) {
9743 switch (scalar_formats[j]) {
9744 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9745 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9746 case HAL_PIXEL_FORMAT_RAW10:
9747 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9748 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9749 addStreamConfig(available_stream_configs, scalar_formats[j],
9750 gCamCapability[cameraId]->raw_dim[i],
9751 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9752 }
9753 break;
9754 case HAL_PIXEL_FORMAT_BLOB:
9755 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9756 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9757 addStreamConfig(available_stream_configs, scalar_formats[j],
9758 gCamCapability[cameraId]->picture_sizes_tbl[i],
9759 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
9760 }
9761 break;
9762 case HAL_PIXEL_FORMAT_YCbCr_420_888:
9763 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
9764 default:
9765 cam_dimension_t largest_picture_size;
9766 memset(&largest_picture_size, 0, sizeof(cam_dimension_t));
9767 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9768 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9769 addStreamConfig(available_stream_configs, scalar_formats[j],
9770 gCamCapability[cameraId]->picture_sizes_tbl[i],
9771 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT);
Thierry Strudel2896d122017-02-23 19:18:03 -08009772 /*For below 2 formats we also support i/p streams for reprocessing advertise those*/
Zhijun Hee0cc0ae2017-05-19 22:19:27 -07009773 if ((scalar_formats[j] == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED ||
9774 scalar_formats[j] == HAL_PIXEL_FORMAT_YCbCr_420_888) && i == 0) {
Thierry Strudel2896d122017-02-23 19:18:03 -08009775 if ((gCamCapability[cameraId]->picture_sizes_tbl[i].width
9776 >= minInputSize.width) || (gCamCapability[cameraId]->
9777 picture_sizes_tbl[i].height >= minInputSize.height)) {
9778 addStreamConfig(available_stream_configs, scalar_formats[j],
9779 gCamCapability[cameraId]->picture_sizes_tbl[i],
9780 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_INPUT);
9781 }
9782 }
Thierry Strudel3d639192016-09-09 11:52:26 -07009783 }
Thierry Strudel2896d122017-02-23 19:18:03 -08009784
Thierry Strudel3d639192016-09-09 11:52:26 -07009785 break;
9786 }
9787 }
9788
9789 staticInfo.update(ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
9790 available_stream_configs.array(), available_stream_configs.size());
9791 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
9792 staticInfo.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
9793
9794 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
9795 staticInfo.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
9796
9797 /* android.scaler.availableMinFrameDurations */
9798 Vector<int64_t> available_min_durations;
9799 for (size_t j = 0; j < scalar_formats_count; j++) {
9800 switch (scalar_formats[j]) {
9801 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW16:
9802 case ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE:
9803 case HAL_PIXEL_FORMAT_RAW10:
9804 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9805 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
9806 available_min_durations.add(scalar_formats[j]);
9807 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
9808 available_min_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
9809 available_min_durations.add(gCamCapability[cameraId]->raw_min_duration[i]);
9810 }
9811 break;
9812 default:
9813 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
9814 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
9815 available_min_durations.add(scalar_formats[j]);
9816 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
9817 available_min_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
9818 available_min_durations.add(gCamCapability[cameraId]->picture_min_duration[i]);
9819 }
9820 break;
9821 }
9822 }
9823 staticInfo.update(ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS,
9824 available_min_durations.array(), available_min_durations.size());
9825
9826 Vector<int32_t> available_hfr_configs;
9827 for (size_t i = 0; i < gCamCapability[cameraId]->hfr_tbl_cnt; i++) {
9828 int32_t fps = 0;
9829 switch (gCamCapability[cameraId]->hfr_tbl[i].mode) {
9830 case CAM_HFR_MODE_60FPS:
9831 fps = 60;
9832 break;
9833 case CAM_HFR_MODE_90FPS:
9834 fps = 90;
9835 break;
9836 case CAM_HFR_MODE_120FPS:
9837 fps = 120;
9838 break;
9839 case CAM_HFR_MODE_150FPS:
9840 fps = 150;
9841 break;
9842 case CAM_HFR_MODE_180FPS:
9843 fps = 180;
9844 break;
9845 case CAM_HFR_MODE_210FPS:
9846 fps = 210;
9847 break;
9848 case CAM_HFR_MODE_240FPS:
9849 fps = 240;
9850 break;
9851 case CAM_HFR_MODE_480FPS:
9852 fps = 480;
9853 break;
9854 case CAM_HFR_MODE_OFF:
9855 case CAM_HFR_MODE_MAX:
9856 default:
9857 break;
9858 }
9859
9860 /* Advertise only MIN_FPS_FOR_BATCH_MODE or above as HIGH_SPEED_CONFIGS */
9861 if (fps >= MIN_FPS_FOR_BATCH_MODE) {
9862 /* For each HFR frame rate, need to advertise one variable fps range
9863 * and one fixed fps range per dimension. Eg: for 120 FPS, advertise [30, 120]
9864 * and [120, 120]. While camcorder preview alone is running [30, 120] is
9865 * set by the app. When video recording is started, [120, 120] is
9866 * set. This way sensor configuration does not change when recording
9867 * is started */
9868
9869 /* (width, height, fps_min, fps_max, batch_size_max) */
9870 for (size_t j = 0; j < gCamCapability[cameraId]->hfr_tbl[i].dim_cnt &&
9871 j < MAX_SIZES_CNT; j++) {
9872 available_hfr_configs.add(
9873 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9874 available_hfr_configs.add(
9875 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9876 available_hfr_configs.add(PREVIEW_FPS_FOR_HFR);
9877 available_hfr_configs.add(fps);
9878 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9879
9880 /* (width, height, fps_min, fps_max, batch_size_max) */
9881 available_hfr_configs.add(
9882 gCamCapability[cameraId]->hfr_tbl[i].dim[j].width);
9883 available_hfr_configs.add(
9884 gCamCapability[cameraId]->hfr_tbl[i].dim[j].height);
9885 available_hfr_configs.add(fps);
9886 available_hfr_configs.add(fps);
9887 available_hfr_configs.add(fps / PREVIEW_FPS_FOR_HFR);
9888 }
9889 }
9890 }
9891 //Advertise HFR capability only if the property is set
9892 memset(prop, 0, sizeof(prop));
9893 property_get("persist.camera.hal3hfr.enable", prop, "1");
9894 uint8_t hfrEnable = (uint8_t)atoi(prop);
9895
9896 if(hfrEnable && available_hfr_configs.array()) {
9897 staticInfo.update(
9898 ANDROID_CONTROL_AVAILABLE_HIGH_SPEED_VIDEO_CONFIGURATIONS,
9899 available_hfr_configs.array(), available_hfr_configs.size());
9900 }
9901
9902 int32_t max_jpeg_size = (int32_t)calcMaxJpegSize(cameraId);
9903 staticInfo.update(ANDROID_JPEG_MAX_SIZE,
9904 &max_jpeg_size, 1);
9905
9906 uint8_t avail_effects[CAM_EFFECT_MODE_MAX];
9907 size_t size = 0;
9908 count = CAM_EFFECT_MODE_MAX;
9909 count = MIN(gCamCapability[cameraId]->supported_effects_cnt, count);
9910 for (size_t i = 0; i < count; i++) {
9911 int val = lookupFwkName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
9912 gCamCapability[cameraId]->supported_effects[i]);
9913 if (NAME_NOT_FOUND != val) {
9914 avail_effects[size] = (uint8_t)val;
9915 size++;
9916 }
9917 }
9918 staticInfo.update(ANDROID_CONTROL_AVAILABLE_EFFECTS,
9919 avail_effects,
9920 size);
9921
9922 uint8_t avail_scene_modes[CAM_SCENE_MODE_MAX];
9923 uint8_t supported_indexes[CAM_SCENE_MODE_MAX];
9924 size_t supported_scene_modes_cnt = 0;
9925 count = CAM_SCENE_MODE_MAX;
9926 count = MIN(gCamCapability[cameraId]->supported_scene_modes_cnt, count);
9927 for (size_t i = 0; i < count; i++) {
9928 if (gCamCapability[cameraId]->supported_scene_modes[i] !=
9929 CAM_SCENE_MODE_OFF) {
9930 int val = lookupFwkName(SCENE_MODES_MAP,
9931 METADATA_MAP_SIZE(SCENE_MODES_MAP),
9932 gCamCapability[cameraId]->supported_scene_modes[i]);
Mansoor Aftab58465fa2017-01-26 15:02:44 -08009933
Thierry Strudel3d639192016-09-09 11:52:26 -07009934 if (NAME_NOT_FOUND != val) {
9935 avail_scene_modes[supported_scene_modes_cnt] = (uint8_t)val;
9936 supported_indexes[supported_scene_modes_cnt] = (uint8_t)i;
9937 supported_scene_modes_cnt++;
9938 }
9939 }
9940 }
9941 staticInfo.update(ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
9942 avail_scene_modes,
9943 supported_scene_modes_cnt);
9944
9945 uint8_t scene_mode_overrides[CAM_SCENE_MODE_MAX * 3];
9946 makeOverridesList(gCamCapability[cameraId]->scene_mode_overrides,
9947 supported_scene_modes_cnt,
9948 CAM_SCENE_MODE_MAX,
9949 scene_mode_overrides,
9950 supported_indexes,
9951 cameraId);
9952
9953 if (supported_scene_modes_cnt == 0) {
9954 supported_scene_modes_cnt = 1;
9955 avail_scene_modes[0] = ANDROID_CONTROL_SCENE_MODE_DISABLED;
9956 }
9957
9958 staticInfo.update(ANDROID_CONTROL_SCENE_MODE_OVERRIDES,
9959 scene_mode_overrides, supported_scene_modes_cnt * 3);
9960
9961 uint8_t available_control_modes[] = {ANDROID_CONTROL_MODE_OFF,
9962 ANDROID_CONTROL_MODE_AUTO,
9963 ANDROID_CONTROL_MODE_USE_SCENE_MODE};
9964 staticInfo.update(ANDROID_CONTROL_AVAILABLE_MODES,
9965 available_control_modes,
9966 3);
9967
9968 uint8_t avail_antibanding_modes[CAM_ANTIBANDING_MODE_MAX];
9969 size = 0;
9970 count = CAM_ANTIBANDING_MODE_MAX;
9971 count = MIN(gCamCapability[cameraId]->supported_antibandings_cnt, count);
9972 for (size_t i = 0; i < count; i++) {
9973 int val = lookupFwkName(ANTIBANDING_MODES_MAP, METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP),
9974 gCamCapability[cameraId]->supported_antibandings[i]);
9975 if (NAME_NOT_FOUND != val) {
9976 avail_antibanding_modes[size] = (uint8_t)val;
9977 size++;
9978 }
9979
9980 }
9981 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
9982 avail_antibanding_modes,
9983 size);
9984
9985 uint8_t avail_abberation_modes[] = {
9986 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF,
9987 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST,
9988 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY};
9989 count = CAM_COLOR_CORRECTION_ABERRATION_MAX;
9990 count = MIN(gCamCapability[cameraId]->aberration_modes_count, count);
9991 if (0 == count) {
9992 // If no aberration correction modes are available for a device, this advertise OFF mode
9993 size = 1;
9994 } else {
9995 // If count is not zero then atleast one among the FAST or HIGH quality is supported
9996 // So, advertize all 3 modes if atleast any one mode is supported as per the
9997 // new M requirement
9998 size = 3;
9999 }
10000 staticInfo.update(ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
10001 avail_abberation_modes,
10002 size);
10003
10004 uint8_t avail_af_modes[CAM_FOCUS_MODE_MAX];
10005 size = 0;
10006 count = CAM_FOCUS_MODE_MAX;
10007 count = MIN(gCamCapability[cameraId]->supported_focus_modes_cnt, count);
10008 for (size_t i = 0; i < count; i++) {
10009 int val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
10010 gCamCapability[cameraId]->supported_focus_modes[i]);
10011 if (NAME_NOT_FOUND != val) {
10012 avail_af_modes[size] = (uint8_t)val;
10013 size++;
10014 }
10015 }
10016 staticInfo.update(ANDROID_CONTROL_AF_AVAILABLE_MODES,
10017 avail_af_modes,
10018 size);
10019
10020 uint8_t avail_awb_modes[CAM_WB_MODE_MAX];
10021 size = 0;
10022 count = CAM_WB_MODE_MAX;
10023 count = MIN(gCamCapability[cameraId]->supported_white_balances_cnt, count);
10024 for (size_t i = 0; i < count; i++) {
10025 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
10026 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
10027 gCamCapability[cameraId]->supported_white_balances[i]);
10028 if (NAME_NOT_FOUND != val) {
10029 avail_awb_modes[size] = (uint8_t)val;
10030 size++;
10031 }
10032 }
10033 staticInfo.update(ANDROID_CONTROL_AWB_AVAILABLE_MODES,
10034 avail_awb_modes,
10035 size);
10036
10037 uint8_t available_flash_levels[CAM_FLASH_FIRING_LEVEL_MAX];
10038 count = CAM_FLASH_FIRING_LEVEL_MAX;
10039 count = MIN(gCamCapability[cameraId]->supported_flash_firing_level_cnt,
10040 count);
10041 for (size_t i = 0; i < count; i++) {
10042 available_flash_levels[i] =
10043 gCamCapability[cameraId]->supported_firing_levels[i];
10044 }
10045 staticInfo.update(ANDROID_FLASH_FIRING_POWER,
10046 available_flash_levels, count);
10047
10048 uint8_t flashAvailable;
10049 if (gCamCapability[cameraId]->flash_available)
10050 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_TRUE;
10051 else
10052 flashAvailable = ANDROID_FLASH_INFO_AVAILABLE_FALSE;
10053 staticInfo.update(ANDROID_FLASH_INFO_AVAILABLE,
10054 &flashAvailable, 1);
10055
10056 Vector<uint8_t> avail_ae_modes;
10057 count = CAM_AE_MODE_MAX;
10058 count = MIN(gCamCapability[cameraId]->supported_ae_modes_cnt, count);
10059 for (size_t i = 0; i < count; i++) {
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080010060 uint8_t aeMode = gCamCapability[cameraId]->supported_ae_modes[i];
10061 if (aeMode == CAM_AE_MODE_ON_EXTERNAL_FLASH) {
10062 aeMode = NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH;
10063 }
10064 avail_ae_modes.add(aeMode);
Thierry Strudel3d639192016-09-09 11:52:26 -070010065 }
10066 if (flashAvailable) {
10067 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH);
10068 avail_ae_modes.add(ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH);
10069 }
10070 staticInfo.update(ANDROID_CONTROL_AE_AVAILABLE_MODES,
10071 avail_ae_modes.array(),
10072 avail_ae_modes.size());
10073
10074 int32_t sensitivity_range[2];
10075 sensitivity_range[0] = gCamCapability[cameraId]->sensitivity_range.min_sensitivity;
10076 sensitivity_range[1] = gCamCapability[cameraId]->sensitivity_range.max_sensitivity;
10077 staticInfo.update(ANDROID_SENSOR_INFO_SENSITIVITY_RANGE,
10078 sensitivity_range,
10079 sizeof(sensitivity_range) / sizeof(int32_t));
10080
10081 staticInfo.update(ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
10082 &gCamCapability[cameraId]->max_analog_sensitivity,
10083 1);
10084
10085 int32_t sensor_orientation = (int32_t)gCamCapability[cameraId]->sensor_mount_angle;
10086 staticInfo.update(ANDROID_SENSOR_ORIENTATION,
10087 &sensor_orientation,
10088 1);
10089
10090 int32_t max_output_streams[] = {
10091 MAX_STALLING_STREAMS,
10092 MAX_PROCESSED_STREAMS,
10093 MAX_RAW_STREAMS};
10094 staticInfo.update(ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS,
10095 max_output_streams,
10096 sizeof(max_output_streams)/sizeof(max_output_streams[0]));
10097
10098 uint8_t avail_leds = 0;
10099 staticInfo.update(ANDROID_LED_AVAILABLE_LEDS,
10100 &avail_leds, 0);
10101
10102 uint8_t focus_dist_calibrated;
10103 int val = lookupFwkName(FOCUS_CALIBRATION_MAP, METADATA_MAP_SIZE(FOCUS_CALIBRATION_MAP),
10104 gCamCapability[cameraId]->focus_dist_calibrated);
10105 if (NAME_NOT_FOUND != val) {
10106 focus_dist_calibrated = (uint8_t)val;
10107 staticInfo.update(ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
10108 &focus_dist_calibrated, 1);
10109 }
10110
10111 int32_t avail_testpattern_modes[MAX_TEST_PATTERN_CNT];
10112 size = 0;
10113 count = MIN(gCamCapability[cameraId]->supported_test_pattern_modes_cnt,
10114 MAX_TEST_PATTERN_CNT);
10115 for (size_t i = 0; i < count; i++) {
10116 int testpatternMode = lookupFwkName(TEST_PATTERN_MAP, METADATA_MAP_SIZE(TEST_PATTERN_MAP),
10117 gCamCapability[cameraId]->supported_test_pattern_modes[i]);
10118 if (NAME_NOT_FOUND != testpatternMode) {
10119 avail_testpattern_modes[size] = testpatternMode;
10120 size++;
10121 }
10122 }
10123 staticInfo.update(ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
10124 avail_testpattern_modes,
10125 size);
10126
10127 uint8_t max_pipeline_depth = (uint8_t)(MAX_INFLIGHT_REQUESTS + EMPTY_PIPELINE_DELAY + FRAME_SKIP_DELAY);
10128 staticInfo.update(ANDROID_REQUEST_PIPELINE_MAX_DEPTH,
10129 &max_pipeline_depth,
10130 1);
10131
10132 int32_t partial_result_count = PARTIAL_RESULT_COUNT;
10133 staticInfo.update(ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
10134 &partial_result_count,
10135 1);
10136
10137 int32_t max_stall_duration = MAX_REPROCESS_STALL;
10138 staticInfo.update(ANDROID_REPROCESS_MAX_CAPTURE_STALL, &max_stall_duration, 1);
10139
10140 Vector<uint8_t> available_capabilities;
10141 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BACKWARD_COMPATIBLE);
10142 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_SENSOR);
10143 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_MANUAL_POST_PROCESSING);
10144 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_READ_SENSOR_SETTINGS);
10145 if (supportBurst) {
10146 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_BURST_CAPTURE);
10147 }
10148 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_PRIVATE_REPROCESSING);
10149 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_YUV_REPROCESSING);
10150 if (hfrEnable && available_hfr_configs.array()) {
10151 available_capabilities.add(
10152 ANDROID_REQUEST_AVAILABLE_CAPABILITIES_CONSTRAINED_HIGH_SPEED_VIDEO);
10153 }
10154
10155 if (CAM_SENSOR_YUV != gCamCapability[cameraId]->sensor_type.sens_type) {
10156 available_capabilities.add(ANDROID_REQUEST_AVAILABLE_CAPABILITIES_RAW);
10157 }
10158 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
10159 available_capabilities.array(),
10160 available_capabilities.size());
10161
10162 //aeLockAvailable to be set to true if capabilities has MANUAL_SENSOR or BURST_CAPTURE
10163 //Assumption is that all bayer cameras support MANUAL_SENSOR.
10164 uint8_t aeLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
10165 ANDROID_CONTROL_AE_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AE_LOCK_AVAILABLE_FALSE;
10166
10167 staticInfo.update(ANDROID_CONTROL_AE_LOCK_AVAILABLE,
10168 &aeLockAvailable, 1);
10169
10170 //awbLockAvailable to be set to true if capabilities has MANUAL_POST_PROCESSING or
10171 //BURST_CAPTURE. Assumption is that all bayer cameras support MANUAL_POST_PROCESSING.
10172 uint8_t awbLockAvailable = (gCamCapability[cameraId]->sensor_type.sens_type == CAM_SENSOR_RAW) ?
10173 ANDROID_CONTROL_AWB_LOCK_AVAILABLE_TRUE : ANDROID_CONTROL_AWB_LOCK_AVAILABLE_FALSE;
10174
10175 staticInfo.update(ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
10176 &awbLockAvailable, 1);
10177
10178 int32_t max_input_streams = 1;
10179 staticInfo.update(ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
10180 &max_input_streams,
10181 1);
10182
10183 /* format of the map is : input format, num_output_formats, outputFormat1,..,outputFormatN */
10184 int32_t io_format_map[] = {HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, 2,
10185 HAL_PIXEL_FORMAT_BLOB, HAL_PIXEL_FORMAT_YCbCr_420_888,
10186 HAL_PIXEL_FORMAT_YCbCr_420_888, 2, HAL_PIXEL_FORMAT_BLOB,
10187 HAL_PIXEL_FORMAT_YCbCr_420_888};
10188 staticInfo.update(ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
10189 io_format_map, sizeof(io_format_map)/sizeof(io_format_map[0]));
10190
10191 int32_t max_latency = ANDROID_SYNC_MAX_LATENCY_PER_FRAME_CONTROL;
10192 staticInfo.update(ANDROID_SYNC_MAX_LATENCY,
10193 &max_latency,
10194 1);
10195
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010196#ifndef USE_HAL_3_3
10197 int32_t isp_sensitivity_range[2];
10198 isp_sensitivity_range[0] =
10199 gCamCapability[cameraId]->isp_sensitivity_range.min_sensitivity;
10200 isp_sensitivity_range[1] =
10201 gCamCapability[cameraId]->isp_sensitivity_range.max_sensitivity;
10202 staticInfo.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
10203 isp_sensitivity_range,
10204 sizeof(isp_sensitivity_range) / sizeof(isp_sensitivity_range[0]));
10205#endif
10206
Thierry Strudel3d639192016-09-09 11:52:26 -070010207 uint8_t available_hot_pixel_modes[] = {ANDROID_HOT_PIXEL_MODE_FAST,
10208 ANDROID_HOT_PIXEL_MODE_HIGH_QUALITY};
10209 staticInfo.update(ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
10210 available_hot_pixel_modes,
10211 sizeof(available_hot_pixel_modes)/sizeof(available_hot_pixel_modes[0]));
10212
10213 uint8_t available_shading_modes[] = {ANDROID_SHADING_MODE_OFF,
10214 ANDROID_SHADING_MODE_FAST,
10215 ANDROID_SHADING_MODE_HIGH_QUALITY};
10216 staticInfo.update(ANDROID_SHADING_AVAILABLE_MODES,
10217 available_shading_modes,
10218 3);
10219
10220 uint8_t available_lens_shading_map_modes[] = {ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF,
10221 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON};
10222 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
10223 available_lens_shading_map_modes,
10224 2);
10225
10226 uint8_t available_edge_modes[] = {ANDROID_EDGE_MODE_OFF,
10227 ANDROID_EDGE_MODE_FAST,
10228 ANDROID_EDGE_MODE_HIGH_QUALITY,
10229 ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG};
10230 staticInfo.update(ANDROID_EDGE_AVAILABLE_EDGE_MODES,
10231 available_edge_modes,
10232 sizeof(available_edge_modes)/sizeof(available_edge_modes[0]));
10233
10234 uint8_t available_noise_red_modes[] = {ANDROID_NOISE_REDUCTION_MODE_OFF,
10235 ANDROID_NOISE_REDUCTION_MODE_FAST,
10236 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY,
10237 ANDROID_NOISE_REDUCTION_MODE_MINIMAL,
10238 ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG};
10239 staticInfo.update(ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
10240 available_noise_red_modes,
10241 sizeof(available_noise_red_modes)/sizeof(available_noise_red_modes[0]));
10242
10243 uint8_t available_tonemap_modes[] = {ANDROID_TONEMAP_MODE_CONTRAST_CURVE,
10244 ANDROID_TONEMAP_MODE_FAST,
10245 ANDROID_TONEMAP_MODE_HIGH_QUALITY};
10246 staticInfo.update(ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
10247 available_tonemap_modes,
10248 sizeof(available_tonemap_modes)/sizeof(available_tonemap_modes[0]));
10249
10250 uint8_t available_hot_pixel_map_modes[] = {ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF};
10251 staticInfo.update(ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
10252 available_hot_pixel_map_modes,
10253 sizeof(available_hot_pixel_map_modes)/sizeof(available_hot_pixel_map_modes[0]));
10254
10255 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
10256 gCamCapability[cameraId]->reference_illuminant1);
10257 if (NAME_NOT_FOUND != val) {
10258 uint8_t fwkReferenceIlluminant = (uint8_t)val;
10259 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT1, &fwkReferenceIlluminant, 1);
10260 }
10261
10262 val = lookupFwkName(REFERENCE_ILLUMINANT_MAP, METADATA_MAP_SIZE(REFERENCE_ILLUMINANT_MAP),
10263 gCamCapability[cameraId]->reference_illuminant2);
10264 if (NAME_NOT_FOUND != val) {
10265 uint8_t fwkReferenceIlluminant = (uint8_t)val;
10266 staticInfo.update(ANDROID_SENSOR_REFERENCE_ILLUMINANT2, &fwkReferenceIlluminant, 1);
10267 }
10268
10269 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX1, (camera_metadata_rational_t *)
10270 (void *)gCamCapability[cameraId]->forward_matrix1,
10271 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
10272
10273 staticInfo.update(ANDROID_SENSOR_FORWARD_MATRIX2, (camera_metadata_rational_t *)
10274 (void *)gCamCapability[cameraId]->forward_matrix2,
10275 FORWARD_MATRIX_COLS * FORWARD_MATRIX_ROWS);
10276
10277 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM1, (camera_metadata_rational_t *)
10278 (void *)gCamCapability[cameraId]->color_transform1,
10279 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
10280
10281 staticInfo.update(ANDROID_SENSOR_COLOR_TRANSFORM2, (camera_metadata_rational_t *)
10282 (void *)gCamCapability[cameraId]->color_transform2,
10283 COLOR_TRANSFORM_COLS * COLOR_TRANSFORM_ROWS);
10284
10285 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM1, (camera_metadata_rational_t *)
10286 (void *)gCamCapability[cameraId]->calibration_transform1,
10287 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
10288
10289 staticInfo.update(ANDROID_SENSOR_CALIBRATION_TRANSFORM2, (camera_metadata_rational_t *)
10290 (void *)gCamCapability[cameraId]->calibration_transform2,
10291 CAL_TRANSFORM_COLS * CAL_TRANSFORM_ROWS);
10292
10293 int32_t request_keys_basic[] = {ANDROID_COLOR_CORRECTION_MODE,
10294 ANDROID_COLOR_CORRECTION_TRANSFORM, ANDROID_COLOR_CORRECTION_GAINS,
10295 ANDROID_COLOR_CORRECTION_ABERRATION_MODE,
10296 ANDROID_CONTROL_AE_ANTIBANDING_MODE, ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
10297 ANDROID_CONTROL_AE_LOCK, ANDROID_CONTROL_AE_MODE,
10298 ANDROID_CONTROL_AE_REGIONS, ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
10299 ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, ANDROID_CONTROL_AF_MODE,
10300 ANDROID_CONTROL_AF_TRIGGER, ANDROID_CONTROL_AWB_LOCK,
10301 ANDROID_CONTROL_AWB_MODE, ANDROID_CONTROL_CAPTURE_INTENT,
10302 ANDROID_CONTROL_EFFECT_MODE, ANDROID_CONTROL_MODE,
10303 ANDROID_CONTROL_SCENE_MODE, ANDROID_CONTROL_VIDEO_STABILIZATION_MODE,
10304 ANDROID_DEMOSAIC_MODE, ANDROID_EDGE_MODE,
10305 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
10306 ANDROID_JPEG_GPS_COORDINATES,
10307 ANDROID_JPEG_GPS_PROCESSING_METHOD, ANDROID_JPEG_GPS_TIMESTAMP,
10308 ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY, ANDROID_JPEG_THUMBNAIL_QUALITY,
10309 ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE, ANDROID_LENS_FILTER_DENSITY,
10310 ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10311 ANDROID_LENS_OPTICAL_STABILIZATION_MODE, ANDROID_NOISE_REDUCTION_MODE,
10312 ANDROID_REQUEST_ID, ANDROID_REQUEST_TYPE,
10313 ANDROID_SCALER_CROP_REGION, ANDROID_SENSOR_EXPOSURE_TIME,
10314 ANDROID_SENSOR_FRAME_DURATION, ANDROID_HOT_PIXEL_MODE,
10315 ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE,
10316 ANDROID_SENSOR_SENSITIVITY, ANDROID_SHADING_MODE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010317#ifndef USE_HAL_3_3
10318 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10319#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070010320 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010321 ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -070010322 ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, ANDROID_TONEMAP_CURVE_BLUE,
10323 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Shuzhen Wang2abea3d2016-03-31 11:09:27 -070010324 ANDROID_BLACK_LEVEL_LOCK, NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010325 /* DevCamDebug metadata request_keys_basic */
10326 DEVCAMDEBUG_META_ENABLE,
10327 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010328 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
Ranjith Kagathi Ananda0533b682017-03-24 17:52:46 -070010329 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
Ranjith Kagathi Anandae5df3752017-04-28 11:22:51 -070010330 TANGO_MODE_DATA_SENSOR_FULLFOV,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -070010331 NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER,
Emilian Peev656e4fa2017-06-02 16:47:04 +010010332 NEXUS_EXPERIMENTAL_2017_PD_DATA_ENABLE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010333 };
Thierry Strudel3d639192016-09-09 11:52:26 -070010334
10335 size_t request_keys_cnt =
10336 sizeof(request_keys_basic)/sizeof(request_keys_basic[0]);
10337 Vector<int32_t> available_request_keys;
10338 available_request_keys.appendArray(request_keys_basic, request_keys_cnt);
10339 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10340 available_request_keys.add(ANDROID_CONTROL_AF_REGIONS);
10341 }
10342
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010343 if (gExposeEnableZslKey) {
Chenjie Luo4a761802017-06-13 17:35:54 +000010344 available_request_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
Chien-Yu Chen0a921f92017-08-27 17:25:33 -070010345 available_request_keys.add(NEXUS_EXPERIMENTAL_2017_POSTVIEW);
Chien-Yu Chenb0981e32017-08-28 19:27:35 -070010346 available_request_keys.add(NEXUS_EXPERIMENTAL_2017_CONTINUOUS_ZSL_CAPTURE);
Chien-Yu Chenec328c82017-08-30 16:41:08 -070010347 available_request_keys.add(NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010348 }
10349
Thierry Strudel3d639192016-09-09 11:52:26 -070010350 staticInfo.update(ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS,
10351 available_request_keys.array(), available_request_keys.size());
10352
10353 int32_t result_keys_basic[] = {ANDROID_COLOR_CORRECTION_TRANSFORM,
10354 ANDROID_COLOR_CORRECTION_GAINS, ANDROID_CONTROL_AE_MODE, ANDROID_CONTROL_AE_REGIONS,
10355 ANDROID_CONTROL_AE_STATE, ANDROID_CONTROL_AF_MODE,
10356 ANDROID_CONTROL_AF_STATE, ANDROID_CONTROL_AWB_MODE,
10357 ANDROID_CONTROL_AWB_STATE, ANDROID_CONTROL_MODE, ANDROID_EDGE_MODE,
10358 ANDROID_FLASH_FIRING_POWER, ANDROID_FLASH_FIRING_TIME, ANDROID_FLASH_MODE,
10359 ANDROID_FLASH_STATE, ANDROID_JPEG_GPS_COORDINATES, ANDROID_JPEG_GPS_PROCESSING_METHOD,
10360 ANDROID_JPEG_GPS_TIMESTAMP, ANDROID_JPEG_ORIENTATION, ANDROID_JPEG_QUALITY,
10361 ANDROID_JPEG_THUMBNAIL_QUALITY, ANDROID_JPEG_THUMBNAIL_SIZE, ANDROID_LENS_APERTURE,
10362 ANDROID_LENS_FILTER_DENSITY, ANDROID_LENS_FOCAL_LENGTH, ANDROID_LENS_FOCUS_DISTANCE,
10363 ANDROID_LENS_FOCUS_RANGE, ANDROID_LENS_STATE, ANDROID_LENS_OPTICAL_STABILIZATION_MODE,
10364 ANDROID_NOISE_REDUCTION_MODE, ANDROID_REQUEST_ID,
10365 ANDROID_SCALER_CROP_REGION, ANDROID_SHADING_MODE, ANDROID_SENSOR_EXPOSURE_TIME,
10366 ANDROID_SENSOR_FRAME_DURATION, ANDROID_SENSOR_SENSITIVITY,
10367 ANDROID_SENSOR_TIMESTAMP, ANDROID_SENSOR_NEUTRAL_COLOR_POINT,
10368 ANDROID_SENSOR_PROFILE_TONE_CURVE, ANDROID_BLACK_LEVEL_LOCK, ANDROID_TONEMAP_CURVE_BLUE,
10369 ANDROID_TONEMAP_CURVE_GREEN, ANDROID_TONEMAP_CURVE_RED, ANDROID_TONEMAP_MODE,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010370 ANDROID_STATISTICS_FACE_DETECT_MODE,
Thierry Strudel3d639192016-09-09 11:52:26 -070010371 ANDROID_STATISTICS_SHARPNESS_MAP, ANDROID_STATISTICS_SHARPNESS_MAP_MODE,
10372 ANDROID_STATISTICS_PREDICTED_COLOR_GAINS, ANDROID_STATISTICS_PREDICTED_COLOR_TRANSFORM,
10373 ANDROID_STATISTICS_SCENE_FLICKER, ANDROID_STATISTICS_FACE_RECTANGLES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010374 ANDROID_STATISTICS_FACE_SCORES,
10375#ifndef USE_HAL_3_3
10376 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST,
10377#endif
Shuzhen Wang2abea3d2016-03-31 11:09:27 -070010378 NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE,
Shuzhen Wange763e802016-03-31 10:24:29 -070010379 NEXUS_EXPERIMENTAL_2016_AF_SCENE_CHANGE,
Samuel Ha68ba5172016-12-15 18:41:12 -080010380 // DevCamDebug metadata result_keys_basic
10381 DEVCAMDEBUG_META_ENABLE,
10382 // DevCamDebug metadata result_keys AF
10383 DEVCAMDEBUG_AF_LENS_POSITION,
10384 DEVCAMDEBUG_AF_TOF_CONFIDENCE,
10385 DEVCAMDEBUG_AF_TOF_DISTANCE,
10386 DEVCAMDEBUG_AF_LUMA,
10387 DEVCAMDEBUG_AF_HAF_STATE,
10388 DEVCAMDEBUG_AF_MONITOR_PDAF_TARGET_POS,
10389 DEVCAMDEBUG_AF_MONITOR_PDAF_CONFIDENCE,
10390 DEVCAMDEBUG_AF_MONITOR_PDAF_REFOCUS,
10391 DEVCAMDEBUG_AF_MONITOR_TOF_TARGET_POS,
10392 DEVCAMDEBUG_AF_MONITOR_TOF_CONFIDENCE,
10393 DEVCAMDEBUG_AF_MONITOR_TOF_REFOCUS,
10394 DEVCAMDEBUG_AF_MONITOR_TYPE_SELECT,
10395 DEVCAMDEBUG_AF_MONITOR_REFOCUS,
10396 DEVCAMDEBUG_AF_MONITOR_TARGET_POS,
10397 DEVCAMDEBUG_AF_SEARCH_PDAF_TARGET_POS,
10398 DEVCAMDEBUG_AF_SEARCH_PDAF_NEXT_POS,
10399 DEVCAMDEBUG_AF_SEARCH_PDAF_NEAR_POS,
10400 DEVCAMDEBUG_AF_SEARCH_PDAF_FAR_POS,
10401 DEVCAMDEBUG_AF_SEARCH_PDAF_CONFIDENCE,
10402 DEVCAMDEBUG_AF_SEARCH_TOF_TARGET_POS,
10403 DEVCAMDEBUG_AF_SEARCH_TOF_NEXT_POS,
10404 DEVCAMDEBUG_AF_SEARCH_TOF_NEAR_POS,
10405 DEVCAMDEBUG_AF_SEARCH_TOF_FAR_POS,
10406 DEVCAMDEBUG_AF_SEARCH_TOF_CONFIDENCE,
10407 DEVCAMDEBUG_AF_SEARCH_TYPE_SELECT,
10408 DEVCAMDEBUG_AF_SEARCH_NEXT_POS,
10409 DEVCAMDEBUG_AF_SEARCH_TARGET_POS,
10410 // DevCamDebug metadata result_keys AEC
10411 DEVCAMDEBUG_AEC_TARGET_LUMA,
10412 DEVCAMDEBUG_AEC_COMP_LUMA,
10413 DEVCAMDEBUG_AEC_AVG_LUMA,
10414 DEVCAMDEBUG_AEC_CUR_LUMA,
10415 DEVCAMDEBUG_AEC_LINECOUNT,
10416 DEVCAMDEBUG_AEC_REAL_GAIN,
10417 DEVCAMDEBUG_AEC_EXP_INDEX,
10418 DEVCAMDEBUG_AEC_LUX_IDX,
Samuel Ha34229982017-02-17 13:51:11 -080010419 // DevCamDebug metadata result_keys zzHDR
10420 DEVCAMDEBUG_AEC_L_REAL_GAIN,
10421 DEVCAMDEBUG_AEC_L_LINECOUNT,
10422 DEVCAMDEBUG_AEC_S_REAL_GAIN,
10423 DEVCAMDEBUG_AEC_S_LINECOUNT,
10424 DEVCAMDEBUG_AEC_HDR_SENSITIVITY_RATIO,
10425 DEVCAMDEBUG_AEC_HDR_EXP_TIME_RATIO,
10426 // DevCamDebug metadata result_keys ADRC
10427 DEVCAMDEBUG_AEC_TOTAL_DRC_GAIN,
10428 DEVCAMDEBUG_AEC_COLOR_DRC_GAIN,
10429 DEVCAMDEBUG_AEC_GTM_RATIO,
10430 DEVCAMDEBUG_AEC_LTM_RATIO,
10431 DEVCAMDEBUG_AEC_LA_RATIO,
10432 DEVCAMDEBUG_AEC_GAMMA_RATIO,
Samuel Habdf4fac2017-07-28 17:21:18 -070010433 // DevCamDebug metadata result_keys AEC MOTION
10434 DEVCAMDEBUG_AEC_CAMERA_MOTION_DX,
10435 DEVCAMDEBUG_AEC_CAMERA_MOTION_DY,
10436 DEVCAMDEBUG_AEC_SUBJECT_MOTION,
Samuel Ha68ba5172016-12-15 18:41:12 -080010437 // DevCamDebug metadata result_keys AWB
10438 DEVCAMDEBUG_AWB_R_GAIN,
10439 DEVCAMDEBUG_AWB_G_GAIN,
10440 DEVCAMDEBUG_AWB_B_GAIN,
10441 DEVCAMDEBUG_AWB_CCT,
10442 DEVCAMDEBUG_AWB_DECISION,
10443 /* DevCamDebug metadata end */
Shuzhen Wang14415f52016-11-16 18:26:18 -080010444 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE,
10445 NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS,
10446 NEXUS_EXPERIMENTAL_2017_HISTOGRAM,
Shuzhen Wangf1b4ddc2017-04-10 18:22:11 -070010447 NEXUS_EXPERIMENTAL_2017_AF_REGIONS_CONFIDENCE,
Shuzhen Wangc89c77e2017-08-07 15:50:12 -070010448 NEXUS_EXPERIMENTAL_2017_EXP_TIME_BOOST,
Shuzhen Wang3569d4a2017-09-04 19:10:28 -070010449 NEXUS_EXPERIMENTAL_2017_SCENE_DISTANCE,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010450 };
10451
Thierry Strudel3d639192016-09-09 11:52:26 -070010452 size_t result_keys_cnt =
10453 sizeof(result_keys_basic)/sizeof(result_keys_basic[0]);
10454
10455 Vector<int32_t> available_result_keys;
10456 available_result_keys.appendArray(result_keys_basic, result_keys_cnt);
10457 if (gCamCapability[cameraId]->supported_focus_modes_cnt > 1) {
10458 available_result_keys.add(ANDROID_CONTROL_AF_REGIONS);
10459 }
10460 if (CAM_SENSOR_RAW == gCamCapability[cameraId]->sensor_type.sens_type) {
10461 available_result_keys.add(ANDROID_SENSOR_NOISE_PROFILE);
10462 available_result_keys.add(ANDROID_SENSOR_GREEN_SPLIT);
10463 }
10464 if (supportedFaceDetectMode == 1) {
10465 available_result_keys.add(ANDROID_STATISTICS_FACE_RECTANGLES);
10466 available_result_keys.add(ANDROID_STATISTICS_FACE_SCORES);
10467 } else if ((supportedFaceDetectMode == 2) ||
10468 (supportedFaceDetectMode == 3)) {
10469 available_result_keys.add(ANDROID_STATISTICS_FACE_IDS);
10470 available_result_keys.add(ANDROID_STATISTICS_FACE_LANDMARKS);
10471 }
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010472#ifndef USE_HAL_3_3
Shuzhen Wanga1bc9de2017-09-14 16:54:02 -070010473 {
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010474 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_BLACK_LEVEL);
10475 available_result_keys.add(ANDROID_SENSOR_DYNAMIC_WHITE_LEVEL);
10476 }
10477#endif
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010478
10479 if (gExposeEnableZslKey) {
10480 available_result_keys.add(ANDROID_CONTROL_ENABLE_ZSL);
Chien-Yu Chendaf68892017-08-25 12:56:40 -070010481 available_result_keys.add(NEXUS_EXPERIMENTAL_2017_NEXT_STILL_INTENT_REQUEST_READY);
Chien-Yu Chen0a921f92017-08-27 17:25:33 -070010482 available_result_keys.add(NEXUS_EXPERIMENTAL_2017_POSTVIEW_CONFIG);
10483 available_result_keys.add(NEXUS_EXPERIMENTAL_2017_POSTVIEW_DATA);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070010484 }
10485
Thierry Strudel3d639192016-09-09 11:52:26 -070010486 staticInfo.update(ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10487 available_result_keys.array(), available_result_keys.size());
10488
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010489 int32_t characteristics_keys_basic[] = {ANDROID_CONTROL_AE_AVAILABLE_ANTIBANDING_MODES,
Thierry Strudel3d639192016-09-09 11:52:26 -070010490 ANDROID_CONTROL_AE_AVAILABLE_MODES, ANDROID_CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES,
10491 ANDROID_CONTROL_AE_COMPENSATION_RANGE, ANDROID_CONTROL_AE_COMPENSATION_STEP,
10492 ANDROID_CONTROL_AF_AVAILABLE_MODES, ANDROID_CONTROL_AVAILABLE_EFFECTS,
10493 ANDROID_COLOR_CORRECTION_AVAILABLE_ABERRATION_MODES,
10494 ANDROID_SCALER_CROPPING_TYPE,
10495 ANDROID_SYNC_MAX_LATENCY,
10496 ANDROID_SENSOR_INFO_TIMESTAMP_SOURCE,
10497 ANDROID_CONTROL_AVAILABLE_SCENE_MODES,
10498 ANDROID_CONTROL_AVAILABLE_VIDEO_STABILIZATION_MODES,
10499 ANDROID_CONTROL_AWB_AVAILABLE_MODES, ANDROID_CONTROL_MAX_REGIONS,
10500 ANDROID_CONTROL_SCENE_MODE_OVERRIDES,ANDROID_FLASH_INFO_AVAILABLE,
10501 ANDROID_FLASH_INFO_CHARGE_DURATION, ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
10502 ANDROID_JPEG_MAX_SIZE, ANDROID_LENS_INFO_AVAILABLE_APERTURES,
10503 ANDROID_LENS_INFO_AVAILABLE_FILTER_DENSITIES,
10504 ANDROID_LENS_INFO_AVAILABLE_FOCAL_LENGTHS,
10505 ANDROID_LENS_INFO_AVAILABLE_OPTICAL_STABILIZATION,
10506 ANDROID_LENS_INFO_HYPERFOCAL_DISTANCE, ANDROID_LENS_INFO_MINIMUM_FOCUS_DISTANCE,
10507 ANDROID_LENS_INFO_SHADING_MAP_SIZE, ANDROID_LENS_INFO_FOCUS_DISTANCE_CALIBRATION,
10508 ANDROID_LENS_FACING,
10509 ANDROID_REQUEST_MAX_NUM_OUTPUT_STREAMS, ANDROID_REQUEST_MAX_NUM_INPUT_STREAMS,
10510 ANDROID_REQUEST_PIPELINE_MAX_DEPTH, ANDROID_REQUEST_AVAILABLE_CAPABILITIES,
10511 ANDROID_REQUEST_AVAILABLE_REQUEST_KEYS, ANDROID_REQUEST_AVAILABLE_RESULT_KEYS,
10512 ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS, ANDROID_REQUEST_PARTIAL_RESULT_COUNT,
10513 ANDROID_SCALER_AVAILABLE_MAX_DIGITAL_ZOOM,
10514 ANDROID_SCALER_AVAILABLE_INPUT_OUTPUT_FORMATS_MAP,
10515 ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS,
10516 /*ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,*/
10517 ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS, ANDROID_SENSOR_FORWARD_MATRIX1,
10518 ANDROID_SENSOR_REFERENCE_ILLUMINANT1, ANDROID_SENSOR_REFERENCE_ILLUMINANT2,
10519 ANDROID_SENSOR_FORWARD_MATRIX2, ANDROID_SENSOR_COLOR_TRANSFORM1,
10520 ANDROID_SENSOR_COLOR_TRANSFORM2, ANDROID_SENSOR_CALIBRATION_TRANSFORM1,
10521 ANDROID_SENSOR_CALIBRATION_TRANSFORM2, ANDROID_SENSOR_INFO_ACTIVE_ARRAY_SIZE,
10522 ANDROID_SENSOR_INFO_SENSITIVITY_RANGE, ANDROID_SENSOR_INFO_COLOR_FILTER_ARRANGEMENT,
10523 ANDROID_SENSOR_INFO_EXPOSURE_TIME_RANGE, ANDROID_SENSOR_INFO_MAX_FRAME_DURATION,
10524 ANDROID_SENSOR_INFO_PHYSICAL_SIZE, ANDROID_SENSOR_INFO_PIXEL_ARRAY_SIZE,
10525 ANDROID_SENSOR_INFO_WHITE_LEVEL, ANDROID_SENSOR_BASE_GAIN_FACTOR,
10526 ANDROID_SENSOR_BLACK_LEVEL_PATTERN, ANDROID_SENSOR_MAX_ANALOG_SENSITIVITY,
10527 ANDROID_SENSOR_ORIENTATION, ANDROID_SENSOR_AVAILABLE_TEST_PATTERN_MODES,
10528 ANDROID_STATISTICS_INFO_AVAILABLE_FACE_DETECT_MODES,
Thierry Strudel54dc9782017-02-15 12:12:10 -080010529 ANDROID_STATISTICS_INFO_MAX_FACE_COUNT,
Thierry Strudel3d639192016-09-09 11:52:26 -070010530 ANDROID_STATISTICS_INFO_MAX_SHARPNESS_MAP_VALUE,
10531 ANDROID_STATISTICS_INFO_SHARPNESS_MAP_SIZE, ANDROID_HOT_PIXEL_AVAILABLE_HOT_PIXEL_MODES,
10532 ANDROID_EDGE_AVAILABLE_EDGE_MODES,
10533 ANDROID_NOISE_REDUCTION_AVAILABLE_NOISE_REDUCTION_MODES,
10534 ANDROID_TONEMAP_AVAILABLE_TONE_MAP_MODES,
10535 ANDROID_STATISTICS_INFO_AVAILABLE_HOT_PIXEL_MAP_MODES,
10536 ANDROID_TONEMAP_MAX_CURVE_POINTS,
10537 ANDROID_CONTROL_AVAILABLE_MODES,
10538 ANDROID_CONTROL_AE_LOCK_AVAILABLE,
10539 ANDROID_CONTROL_AWB_LOCK_AVAILABLE,
10540 ANDROID_STATISTICS_INFO_AVAILABLE_LENS_SHADING_MAP_MODES,
10541 ANDROID_SHADING_AVAILABLE_MODES,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010542 ANDROID_INFO_SUPPORTED_HARDWARE_LEVEL,
10543#ifndef USE_HAL_3_3
10544 ANDROID_SENSOR_OPAQUE_RAW_SIZE,
10545 ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST_RANGE,
10546#endif
10547 };
10548
10549 Vector<int32_t> available_characteristics_keys;
10550 available_characteristics_keys.appendArray(characteristics_keys_basic,
10551 sizeof(characteristics_keys_basic)/sizeof(int32_t));
10552#ifndef USE_HAL_3_3
10553 if (hasBlackRegions) {
10554 available_characteristics_keys.add(ANDROID_SENSOR_OPTICAL_BLACK_REGIONS);
10555 }
10556#endif
Emilian Peev0f3c3162017-03-15 12:57:46 +000010557
10558 if (0 <= indexPD) {
10559 int32_t depthKeys[] = {
10560 ANDROID_DEPTH_MAX_DEPTH_SAMPLES,
10561 ANDROID_DEPTH_AVAILABLE_DEPTH_STREAM_CONFIGURATIONS,
10562 ANDROID_DEPTH_AVAILABLE_DEPTH_MIN_FRAME_DURATIONS,
10563 ANDROID_DEPTH_AVAILABLE_DEPTH_STALL_DURATIONS,
10564 ANDROID_DEPTH_DEPTH_IS_EXCLUSIVE
10565 };
10566 available_characteristics_keys.appendArray(depthKeys,
10567 sizeof(depthKeys) / sizeof(depthKeys[0]));
10568 }
10569
Thierry Strudel3d639192016-09-09 11:52:26 -070010570 staticInfo.update(ANDROID_REQUEST_AVAILABLE_CHARACTERISTICS_KEYS,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010571 available_characteristics_keys.array(),
10572 available_characteristics_keys.size());
Thierry Strudel3d639192016-09-09 11:52:26 -070010573
10574 /*available stall durations depend on the hw + sw and will be different for different devices */
10575 /*have to add for raw after implementation*/
10576 int32_t stall_formats[] = {HAL_PIXEL_FORMAT_BLOB, ANDROID_SCALER_AVAILABLE_FORMATS_RAW16};
10577 size_t stall_formats_count = sizeof(stall_formats)/sizeof(int32_t);
10578
10579 Vector<int64_t> available_stall_durations;
10580 for (uint32_t j = 0; j < stall_formats_count; j++) {
10581 if (stall_formats[j] == HAL_PIXEL_FORMAT_BLOB) {
10582 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10583 gCamCapability[cameraId]->picture_sizes_tbl_cnt); i++) {
10584 available_stall_durations.add(stall_formats[j]);
10585 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].width);
10586 available_stall_durations.add(gCamCapability[cameraId]->picture_sizes_tbl[i].height);
10587 available_stall_durations.add(gCamCapability[cameraId]->jpeg_stall_durations[i]);
10588 }
10589 } else {
10590 for (uint32_t i = 0; i < MIN(MAX_SIZES_CNT,
10591 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10592 available_stall_durations.add(stall_formats[j]);
10593 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].width);
10594 available_stall_durations.add(gCamCapability[cameraId]->raw_dim[i].height);
10595 available_stall_durations.add(gCamCapability[cameraId]->raw16_stall_durations[i]);
10596 }
10597 }
10598 }
10599 staticInfo.update(ANDROID_SCALER_AVAILABLE_STALL_DURATIONS,
10600 available_stall_durations.array(),
10601 available_stall_durations.size());
10602
10603 //QCAMERA3_OPAQUE_RAW
10604 uint8_t raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10605 cam_format_t fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10606 switch (gCamCapability[cameraId]->opaque_raw_fmt) {
10607 case LEGACY_RAW:
10608 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10609 fmt = CAM_FORMAT_BAYER_QCOM_RAW_8BPP_GBRG;
10610 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10611 fmt = CAM_FORMAT_BAYER_QCOM_RAW_10BPP_GBRG;
10612 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10613 fmt = CAM_FORMAT_BAYER_QCOM_RAW_12BPP_GBRG;
10614 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_LEGACY;
10615 break;
10616 case MIPI_RAW:
10617 if (gCamCapability[cameraId]->white_level == MAX_VALUE_8BIT)
10618 fmt = CAM_FORMAT_BAYER_MIPI_RAW_8BPP_GBRG;
10619 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_10BIT)
10620 fmt = CAM_FORMAT_BAYER_MIPI_RAW_10BPP_GBRG;
10621 else if (gCamCapability[cameraId]->white_level == MAX_VALUE_12BIT)
10622 fmt = CAM_FORMAT_BAYER_MIPI_RAW_12BPP_GBRG;
10623 raw_format = QCAMERA3_OPAQUE_RAW_FORMAT_MIPI;
10624 break;
10625 default:
10626 LOGE("unknown opaque_raw_format %d",
10627 gCamCapability[cameraId]->opaque_raw_fmt);
10628 break;
10629 }
10630 staticInfo.update(QCAMERA3_OPAQUE_RAW_FORMAT, &raw_format, 1);
10631
10632 Vector<int32_t> strides;
10633 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10634 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10635 cam_stream_buf_plane_info_t buf_planes;
10636 strides.add(gCamCapability[cameraId]->raw_dim[i].width);
10637 strides.add(gCamCapability[cameraId]->raw_dim[i].height);
10638 mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10639 &gCamCapability[cameraId]->padding_info, &buf_planes);
10640 strides.add(buf_planes.plane_info.mp[0].stride);
10641 }
10642 staticInfo.update(QCAMERA3_OPAQUE_RAW_STRIDES, strides.array(),
10643 strides.size());
10644
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010645 //TBD: remove the following line once backend advertises zzHDR in feature mask
10646 gCamCapability[cameraId]->qcom_supported_feature_mask |= CAM_QCOM_FEATURE_ZIGZAG_HDR;
Thierry Strudel04e026f2016-10-10 11:27:36 -070010647 //Video HDR default
10648 if ((gCamCapability[cameraId]->qcom_supported_feature_mask) &
10649 (CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR |
Mansoor Aftab58465fa2017-01-26 15:02:44 -080010650 CAM_QCOM_FEATURE_ZIGZAG_HDR | CAM_QCOM_FEATURE_SENSOR_HDR)) {
Thierry Strudel04e026f2016-10-10 11:27:36 -070010651 int32_t vhdr_mode[] = {
10652 QCAMERA3_VIDEO_HDR_MODE_OFF,
10653 QCAMERA3_VIDEO_HDR_MODE_ON};
10654
10655 size_t vhdr_mode_count = sizeof(vhdr_mode) / sizeof(int32_t);
10656 staticInfo.update(QCAMERA3_AVAILABLE_VIDEO_HDR_MODES,
10657 vhdr_mode, vhdr_mode_count);
10658 }
10659
Thierry Strudel3d639192016-09-09 11:52:26 -070010660 staticInfo.update(QCAMERA3_DUALCAM_CALIB_META_DATA_BLOB,
10661 (const uint8_t*)&gCamCapability[cameraId]->related_cam_calibration,
10662 sizeof(gCamCapability[cameraId]->related_cam_calibration));
10663
10664 uint8_t isMonoOnly =
10665 (gCamCapability[cameraId]->color_arrangement == CAM_FILTER_ARRANGEMENT_Y);
10666 staticInfo.update(QCAMERA3_SENSOR_IS_MONO_ONLY,
10667 &isMonoOnly, 1);
10668
Thierry Strudel9e74aae2016-09-22 17:10:18 -070010669#ifndef USE_HAL_3_3
10670 Vector<int32_t> opaque_size;
10671 for (size_t j = 0; j < scalar_formats_count; j++) {
10672 if (scalar_formats[j] == ANDROID_SCALER_AVAILABLE_FORMATS_RAW_OPAQUE) {
10673 for (size_t i = 0; i < MIN(MAX_SIZES_CNT,
10674 gCamCapability[cameraId]->supported_raw_dim_cnt); i++) {
10675 cam_stream_buf_plane_info_t buf_planes;
10676
10677 rc = mm_stream_calc_offset_raw(fmt, &gCamCapability[cameraId]->raw_dim[i],
10678 &gCamCapability[cameraId]->padding_info, &buf_planes);
10679
10680 if (rc == 0) {
10681 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].width);
10682 opaque_size.add(gCamCapability[cameraId]->raw_dim[i].height);
10683 opaque_size.add(buf_planes.plane_info.frame_len);
10684 }else {
10685 LOGE("raw frame calculation failed!");
10686 }
10687 }
10688 }
10689 }
10690
10691 if ((opaque_size.size() > 0) &&
10692 (opaque_size.size() % PER_CONFIGURATION_SIZE_3 == 0))
10693 staticInfo.update(ANDROID_SENSOR_OPAQUE_RAW_SIZE, opaque_size.array(), opaque_size.size());
10694 else
10695 LOGW("Warning: ANDROID_SENSOR_OPAQUE_RAW_SIZE is using rough estimation(2 bytes/pixel)");
10696#endif
10697
Thierry Strudel04e026f2016-10-10 11:27:36 -070010698 if (gCamCapability[cameraId]->supported_ir_mode_cnt > 0) {
10699 int32_t avail_ir_modes[CAM_IR_MODE_MAX];
10700 size = 0;
10701 count = CAM_IR_MODE_MAX;
10702 count = MIN(gCamCapability[cameraId]->supported_ir_mode_cnt, count);
10703 for (size_t i = 0; i < count; i++) {
10704 int val = lookupFwkName(IR_MODES_MAP, METADATA_MAP_SIZE(IR_MODES_MAP),
10705 gCamCapability[cameraId]->supported_ir_modes[i]);
10706 if (NAME_NOT_FOUND != val) {
10707 avail_ir_modes[size] = (int32_t)val;
10708 size++;
10709 }
10710 }
10711 staticInfo.update(QCAMERA3_IR_AVAILABLE_MODES,
10712 avail_ir_modes, size);
10713 }
10714
Thierry Strudel295a0ca2016-11-03 18:38:47 -070010715 if (gCamCapability[cameraId]->supported_instant_aec_modes_cnt > 0) {
10716 int32_t available_instant_aec_modes[CAM_AEC_CONVERGENCE_MAX];
10717 size = 0;
10718 count = CAM_AEC_CONVERGENCE_MAX;
10719 count = MIN(gCamCapability[cameraId]->supported_instant_aec_modes_cnt, count);
10720 for (size_t i = 0; i < count; i++) {
10721 int val = lookupFwkName(INSTANT_AEC_MODES_MAP, METADATA_MAP_SIZE(INSTANT_AEC_MODES_MAP),
10722 gCamCapability[cameraId]->supported_instant_aec_modes[i]);
10723 if (NAME_NOT_FOUND != val) {
10724 available_instant_aec_modes[size] = (int32_t)val;
10725 size++;
10726 }
10727 }
10728 staticInfo.update(QCAMERA3_INSTANT_AEC_AVAILABLE_MODES,
10729 available_instant_aec_modes, size);
10730 }
10731
Thierry Strudel54dc9782017-02-15 12:12:10 -080010732 int32_t sharpness_range[] = {
10733 gCamCapability[cameraId]->sharpness_ctrl.min_value,
10734 gCamCapability[cameraId]->sharpness_ctrl.max_value};
10735 staticInfo.update(QCAMERA3_SHARPNESS_RANGE, sharpness_range, 2);
10736
10737 if (gCamCapability[cameraId]->supported_binning_correction_mode_cnt > 0) {
10738 int32_t avail_binning_modes[CAM_BINNING_CORRECTION_MODE_MAX];
10739 size = 0;
10740 count = CAM_BINNING_CORRECTION_MODE_MAX;
10741 count = MIN(gCamCapability[cameraId]->supported_binning_correction_mode_cnt, count);
10742 for (size_t i = 0; i < count; i++) {
10743 int val = lookupFwkName(BINNING_CORRECTION_MODES_MAP,
10744 METADATA_MAP_SIZE(BINNING_CORRECTION_MODES_MAP),
10745 gCamCapability[cameraId]->supported_binning_modes[i]);
10746 if (NAME_NOT_FOUND != val) {
10747 avail_binning_modes[size] = (int32_t)val;
10748 size++;
10749 }
10750 }
10751 staticInfo.update(QCAMERA3_AVAILABLE_BINNING_CORRECTION_MODES,
10752 avail_binning_modes, size);
10753 }
10754
10755 if (gCamCapability[cameraId]->supported_aec_modes_cnt > 0) {
10756 int32_t available_aec_modes[CAM_AEC_MODE_MAX];
10757 size = 0;
10758 count = MIN(gCamCapability[cameraId]->supported_aec_modes_cnt, CAM_AEC_MODE_MAX);
10759 for (size_t i = 0; i < count; i++) {
10760 int32_t val = lookupFwkName(AEC_MODES_MAP, METADATA_MAP_SIZE(AEC_MODES_MAP),
10761 gCamCapability[cameraId]->supported_aec_modes[i]);
10762 if (NAME_NOT_FOUND != val)
10763 available_aec_modes[size++] = val;
10764 }
10765 staticInfo.update(QCAMERA3_EXPOSURE_METER_AVAILABLE_MODES,
10766 available_aec_modes, size);
10767 }
10768
10769 if (gCamCapability[cameraId]->supported_iso_modes_cnt > 0) {
10770 int32_t available_iso_modes[CAM_ISO_MODE_MAX];
10771 size = 0;
10772 count = MIN(gCamCapability[cameraId]->supported_iso_modes_cnt, CAM_ISO_MODE_MAX);
10773 for (size_t i = 0; i < count; i++) {
10774 int32_t val = lookupFwkName(ISO_MODES_MAP, METADATA_MAP_SIZE(ISO_MODES_MAP),
10775 gCamCapability[cameraId]->supported_iso_modes[i]);
10776 if (NAME_NOT_FOUND != val)
10777 available_iso_modes[size++] = val;
10778 }
10779 staticInfo.update(QCAMERA3_ISO_AVAILABLE_MODES,
10780 available_iso_modes, size);
10781 }
10782
10783 int64_t available_exp_time_range[EXPOSURE_TIME_RANGE_CNT];
Jason Lee805955a2017-05-04 10:29:14 -070010784 for (size_t i = 0; i < EXPOSURE_TIME_RANGE_CNT; i++)
Thierry Strudel54dc9782017-02-15 12:12:10 -080010785 available_exp_time_range[i] = gCamCapability[cameraId]->exposure_time_range[i];
10786 staticInfo.update(QCAMERA3_EXP_TIME_RANGE,
10787 available_exp_time_range, EXPOSURE_TIME_RANGE_CNT);
10788
10789 int32_t available_saturation_range[4];
10790 available_saturation_range[0] = gCamCapability[cameraId]->saturation_ctrl.min_value;
10791 available_saturation_range[1] = gCamCapability[cameraId]->saturation_ctrl.max_value;
10792 available_saturation_range[2] = gCamCapability[cameraId]->saturation_ctrl.def_value;
10793 available_saturation_range[3] = gCamCapability[cameraId]->saturation_ctrl.step;
10794 staticInfo.update(QCAMERA3_SATURATION_RANGE,
10795 available_saturation_range, 4);
10796
10797 uint8_t is_hdr_values[2];
10798 is_hdr_values[0] = 0;
10799 is_hdr_values[1] = 1;
10800 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_VALUES,
10801 is_hdr_values, 2);
10802
10803 float is_hdr_confidence_range[2];
10804 is_hdr_confidence_range[0] = 0.0;
10805 is_hdr_confidence_range[1] = 1.0;
10806 staticInfo.update(QCAMERA3_STATS_IS_HDR_SCENE_CONFIDENCE_RANGE,
10807 is_hdr_confidence_range, 2);
10808
Emilian Peev0a972ef2017-03-16 10:25:53 +000010809 size_t eepromLength = strnlen(
10810 reinterpret_cast<const char *>(
10811 gCamCapability[cameraId]->eeprom_version_info),
10812 sizeof(gCamCapability[cameraId]->eeprom_version_info));
10813 if (0 < eepromLength) {
Zhijun Hea557c4c2017-03-16 18:37:53 -070010814 char easelInfo[] = ",E:N";
10815 char *eepromInfo = reinterpret_cast<char *>(gCamCapability[cameraId]->eeprom_version_info);
10816 if (eepromLength + sizeof(easelInfo) < MAX_EEPROM_VERSION_INFO_LEN) {
10817 eepromLength += sizeof(easelInfo);
Chien-Yu Chend77a5462017-06-02 18:00:38 -070010818 strlcat(eepromInfo, ((gEaselManagerClient != nullptr &&
Arnd Geis082a4d72017-08-24 10:33:07 -070010819 gEaselManagerClient->isEaselPresentOnDevice()) ? ",E-ver" : ",E:N"),
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070010820 MAX_EEPROM_VERSION_INFO_LEN);
Zhijun Hea557c4c2017-03-16 18:37:53 -070010821 }
Emilian Peev0a972ef2017-03-16 10:25:53 +000010822 staticInfo.update(NEXUS_EXPERIMENTAL_2017_EEPROM_VERSION_INFO,
10823 gCamCapability[cameraId]->eeprom_version_info, eepromLength);
10824 }
10825
Thierry Strudel3d639192016-09-09 11:52:26 -070010826 gStaticMetadata[cameraId] = staticInfo.release();
10827 return rc;
10828}
10829
10830/*===========================================================================
10831 * FUNCTION : makeTable
10832 *
10833 * DESCRIPTION: make a table of sizes
10834 *
10835 * PARAMETERS :
10836 *
10837 *
10838 *==========================================================================*/
10839void QCamera3HardwareInterface::makeTable(cam_dimension_t* dimTable, size_t size,
10840 size_t max_size, int32_t *sizeTable)
10841{
10842 size_t j = 0;
10843 if (size > max_size) {
10844 size = max_size;
10845 }
10846 for (size_t i = 0; i < size; i++) {
10847 sizeTable[j] = dimTable[i].width;
10848 sizeTable[j+1] = dimTable[i].height;
10849 j+=2;
10850 }
10851}
10852
10853/*===========================================================================
10854 * FUNCTION : makeFPSTable
10855 *
10856 * DESCRIPTION: make a table of fps ranges
10857 *
10858 * PARAMETERS :
10859 *
10860 *==========================================================================*/
10861void QCamera3HardwareInterface::makeFPSTable(cam_fps_range_t* fpsTable, size_t size,
10862 size_t max_size, int32_t *fpsRangesTable)
10863{
10864 size_t j = 0;
10865 if (size > max_size) {
10866 size = max_size;
10867 }
10868 for (size_t i = 0; i < size; i++) {
10869 fpsRangesTable[j] = (int32_t)fpsTable[i].min_fps;
10870 fpsRangesTable[j+1] = (int32_t)fpsTable[i].max_fps;
10871 j+=2;
10872 }
10873}
10874
10875/*===========================================================================
10876 * FUNCTION : makeOverridesList
10877 *
10878 * DESCRIPTION: make a list of scene mode overrides
10879 *
10880 * PARAMETERS :
10881 *
10882 *
10883 *==========================================================================*/
10884void QCamera3HardwareInterface::makeOverridesList(
10885 cam_scene_mode_overrides_t* overridesTable, size_t size, size_t max_size,
10886 uint8_t *overridesList, uint8_t *supported_indexes, uint32_t camera_id)
10887{
10888 /*daemon will give a list of overrides for all scene modes.
10889 However we should send the fwk only the overrides for the scene modes
10890 supported by the framework*/
10891 size_t j = 0;
10892 if (size > max_size) {
10893 size = max_size;
10894 }
10895 size_t focus_count = CAM_FOCUS_MODE_MAX;
10896 focus_count = MIN(gCamCapability[camera_id]->supported_focus_modes_cnt,
10897 focus_count);
10898 for (size_t i = 0; i < size; i++) {
10899 bool supt = false;
10900 size_t index = supported_indexes[i];
10901 overridesList[j] = gCamCapability[camera_id]->flash_available ?
10902 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH : ANDROID_CONTROL_AE_MODE_ON;
10903 int val = lookupFwkName(WHITE_BALANCE_MODES_MAP,
10904 METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
10905 overridesTable[index].awb_mode);
10906 if (NAME_NOT_FOUND != val) {
10907 overridesList[j+1] = (uint8_t)val;
10908 }
10909 uint8_t focus_override = overridesTable[index].af_mode;
10910 for (size_t k = 0; k < focus_count; k++) {
10911 if (gCamCapability[camera_id]->supported_focus_modes[k] == focus_override) {
10912 supt = true;
10913 break;
10914 }
10915 }
10916 if (supt) {
10917 val = lookupFwkName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
10918 focus_override);
10919 if (NAME_NOT_FOUND != val) {
10920 overridesList[j+2] = (uint8_t)val;
10921 }
10922 } else {
10923 overridesList[j+2] = ANDROID_CONTROL_AF_MODE_OFF;
10924 }
10925 j+=3;
10926 }
10927}
10928
10929/*===========================================================================
10930 * FUNCTION : filterJpegSizes
10931 *
10932 * DESCRIPTION: Returns the supported jpeg sizes based on the max dimension that
10933 * could be downscaled to
10934 *
10935 * PARAMETERS :
10936 *
10937 * RETURN : length of jpegSizes array
10938 *==========================================================================*/
10939
10940size_t QCamera3HardwareInterface::filterJpegSizes(int32_t *jpegSizes, int32_t *processedSizes,
10941 size_t processedSizesCnt, size_t maxCount, cam_rect_t active_array_size,
10942 uint8_t downscale_factor)
10943{
10944 if (0 == downscale_factor) {
10945 downscale_factor = 1;
10946 }
10947
10948 int32_t min_width = active_array_size.width / downscale_factor;
10949 int32_t min_height = active_array_size.height / downscale_factor;
10950 size_t jpegSizesCnt = 0;
10951 if (processedSizesCnt > maxCount) {
10952 processedSizesCnt = maxCount;
10953 }
10954 for (size_t i = 0; i < processedSizesCnt; i+=2) {
10955 if (processedSizes[i] >= min_width && processedSizes[i+1] >= min_height) {
10956 jpegSizes[jpegSizesCnt] = processedSizes[i];
10957 jpegSizes[jpegSizesCnt+1] = processedSizes[i+1];
10958 jpegSizesCnt += 2;
10959 }
10960 }
10961 return jpegSizesCnt;
10962}
10963
10964/*===========================================================================
10965 * FUNCTION : computeNoiseModelEntryS
10966 *
10967 * DESCRIPTION: function to map a given sensitivity to the S noise
10968 * model parameters in the DNG noise model.
10969 *
10970 * PARAMETERS : sens : the sensor sensitivity
10971 *
10972 ** RETURN : S (sensor amplification) noise
10973 *
10974 *==========================================================================*/
10975double QCamera3HardwareInterface::computeNoiseModelEntryS(int32_t sens) {
10976 double s = gCamCapability[mCameraId]->gradient_S * sens +
10977 gCamCapability[mCameraId]->offset_S;
10978 return ((s < 0.0) ? 0.0 : s);
10979}
10980
10981/*===========================================================================
10982 * FUNCTION : computeNoiseModelEntryO
10983 *
10984 * DESCRIPTION: function to map a given sensitivity to the O noise
10985 * model parameters in the DNG noise model.
10986 *
10987 * PARAMETERS : sens : the sensor sensitivity
10988 *
10989 ** RETURN : O (sensor readout) noise
10990 *
10991 *==========================================================================*/
10992double QCamera3HardwareInterface::computeNoiseModelEntryO(int32_t sens) {
10993 int32_t max_analog_sens = gCamCapability[mCameraId]->max_analog_sensitivity;
10994 double digital_gain = (1.0 * sens / max_analog_sens) < 1.0 ?
10995 1.0 : (1.0 * sens / max_analog_sens);
10996 double o = gCamCapability[mCameraId]->gradient_O * sens * sens +
10997 gCamCapability[mCameraId]->offset_O * digital_gain * digital_gain;
10998 return ((o < 0.0) ? 0.0 : o);
10999}
11000
11001/*===========================================================================
11002 * FUNCTION : getSensorSensitivity
11003 *
11004 * DESCRIPTION: convert iso_mode to an integer value
11005 *
11006 * PARAMETERS : iso_mode : the iso_mode supported by sensor
11007 *
11008 ** RETURN : sensitivity supported by sensor
11009 *
11010 *==========================================================================*/
11011int32_t QCamera3HardwareInterface::getSensorSensitivity(int32_t iso_mode)
11012{
11013 int32_t sensitivity;
11014
11015 switch (iso_mode) {
11016 case CAM_ISO_MODE_100:
11017 sensitivity = 100;
11018 break;
11019 case CAM_ISO_MODE_200:
11020 sensitivity = 200;
11021 break;
11022 case CAM_ISO_MODE_400:
11023 sensitivity = 400;
11024 break;
11025 case CAM_ISO_MODE_800:
11026 sensitivity = 800;
11027 break;
11028 case CAM_ISO_MODE_1600:
11029 sensitivity = 1600;
11030 break;
11031 default:
11032 sensitivity = -1;
11033 break;
11034 }
11035 return sensitivity;
11036}
11037
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080011038int QCamera3HardwareInterface::initHdrPlusClientLocked() {
Chien-Yu Chend77a5462017-06-02 18:00:38 -070011039 if (gEaselManagerClient == nullptr) {
11040 gEaselManagerClient = EaselManagerClient::create();
11041 if (gEaselManagerClient == nullptr) {
11042 ALOGE("%s: Failed to create Easel manager client.", __FUNCTION__);
11043 return -ENODEV;
11044 }
11045 }
11046
11047 if (!EaselManagerClientOpened && gEaselManagerClient->isEaselPresentOnDevice()) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070011048 // Check if HAL should not power on Easel even if it's present. This is to allow HDR+ tests
11049 // to connect to Easel.
11050 bool doNotpowerOnEasel =
11051 property_get_bool("camera.hdrplus.donotpoweroneasel", false);
11052
11053 if (doNotpowerOnEasel) {
Chien-Yu Chen08309b32017-03-13 17:41:32 -070011054 ALOGI("%s: Easel is present but not powered on.", __FUNCTION__);
11055 return OK;
11056 }
11057
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080011058 // If Easel is present, power on Easel and suspend it immediately.
Chien-Yu Chend77a5462017-06-02 18:00:38 -070011059 status_t res = gEaselManagerClient->open();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080011060 if (res != OK) {
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070011061 ALOGE("%s: Opening Easel manager client failed: %s (%d)", __FUNCTION__, strerror(-res),
11062 res);
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080011063 return res;
11064 }
11065
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070011066 EaselManagerClientOpened = true;
11067
Chien-Yu Chend77a5462017-06-02 18:00:38 -070011068 res = gEaselManagerClient->suspend();
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080011069 if (res != OK) {
11070 ALOGE("%s: Suspending Easel failed: %s (%d)", __FUNCTION__, strerror(-res), res);
11071 }
11072
Zhijun Hedaacd8a2017-09-14 12:07:42 -070011073 gEaselBypassOnly = !property_get_bool("persist.camera.hdrplus.enable", false);
Chien-Yu Chen509314b2017-04-07 15:27:55 -070011074 gEaselProfilingEnabled = property_get_bool("persist.camera.hdrplus.profiling", false);
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070011075 gEnableMultipleHdrplusOutputs =
11076 property_get_bool("persist.camera.hdrplus.multiple_outputs", false);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011077
11078 // Expose enableZsl key only when HDR+ mode is enabled.
11079 gExposeEnableZslKey = !gEaselBypassOnly;
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080011080 }
11081
11082 return OK;
11083}
11084
Thierry Strudel3d639192016-09-09 11:52:26 -070011085/*===========================================================================
11086 * FUNCTION : getCamInfo
11087 *
11088 * DESCRIPTION: query camera capabilities
11089 *
11090 * PARAMETERS :
11091 * @cameraId : camera Id
11092 * @info : camera info struct to be filled in with camera capabilities
11093 *
11094 * RETURN : int type of status
11095 * NO_ERROR -- success
11096 * none-zero failure code
11097 *==========================================================================*/
11098int QCamera3HardwareInterface::getCamInfo(uint32_t cameraId,
11099 struct camera_info *info)
11100{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011101 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_GET_CAM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070011102 int rc = 0;
11103
11104 pthread_mutex_lock(&gCamLock);
Zhijun Hea557c4c2017-03-16 18:37:53 -070011105
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070011106 {
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070011107 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070011108 rc = initHdrPlusClientLocked();
11109 if (rc != OK) {
11110 ALOGE("%s: initHdrPlusClientLocked failed: %s (%d)", __FUNCTION__, strerror(-rc), rc);
11111 pthread_mutex_unlock(&gCamLock);
11112 return rc;
11113 }
Zhijun Hea557c4c2017-03-16 18:37:53 -070011114 }
11115
Thierry Strudel3d639192016-09-09 11:52:26 -070011116 if (NULL == gCamCapability[cameraId]) {
11117 rc = initCapabilities(cameraId);
11118 if (rc < 0) {
11119 pthread_mutex_unlock(&gCamLock);
11120 return rc;
11121 }
11122 }
11123
11124 if (NULL == gStaticMetadata[cameraId]) {
11125 rc = initStaticMetadata(cameraId);
11126 if (rc < 0) {
11127 pthread_mutex_unlock(&gCamLock);
11128 return rc;
11129 }
11130 }
11131
11132 switch(gCamCapability[cameraId]->position) {
11133 case CAM_POSITION_BACK:
11134 case CAM_POSITION_BACK_AUX:
11135 info->facing = CAMERA_FACING_BACK;
11136 break;
11137
11138 case CAM_POSITION_FRONT:
11139 case CAM_POSITION_FRONT_AUX:
11140 info->facing = CAMERA_FACING_FRONT;
11141 break;
11142
11143 default:
11144 LOGE("Unknown position type %d for camera id:%d",
11145 gCamCapability[cameraId]->position, cameraId);
11146 rc = -1;
11147 break;
11148 }
11149
11150
11151 info->orientation = (int)gCamCapability[cameraId]->sensor_mount_angle;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011152#ifndef USE_HAL_3_3
11153 info->device_version = CAMERA_DEVICE_API_VERSION_3_4;
11154#else
Thierry Strudel3d639192016-09-09 11:52:26 -070011155 info->device_version = CAMERA_DEVICE_API_VERSION_3_3;
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011156#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070011157 info->static_camera_characteristics = gStaticMetadata[cameraId];
11158
11159 //For now assume both cameras can operate independently.
11160 info->conflicting_devices = NULL;
11161 info->conflicting_devices_length = 0;
11162
11163 //resource cost is 100 * MIN(1.0, m/M),
11164 //where m is throughput requirement with maximum stream configuration
11165 //and M is CPP maximum throughput.
11166 float max_fps = 0.0;
11167 for (uint32_t i = 0;
11168 i < gCamCapability[cameraId]->fps_ranges_tbl_cnt; i++) {
11169 if (max_fps < gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps)
11170 max_fps = gCamCapability[cameraId]->fps_ranges_tbl[i].max_fps;
11171 }
11172 float ratio = 1.0 * MAX_PROCESSED_STREAMS *
11173 gCamCapability[cameraId]->active_array_size.width *
11174 gCamCapability[cameraId]->active_array_size.height * max_fps /
11175 gCamCapability[cameraId]->max_pixel_bandwidth;
11176 info->resource_cost = 100 * MIN(1.0, ratio);
11177 LOGI("camera %d resource cost is %d", cameraId,
11178 info->resource_cost);
11179
11180 pthread_mutex_unlock(&gCamLock);
11181 return rc;
11182}
11183
11184/*===========================================================================
11185 * FUNCTION : translateCapabilityToMetadata
11186 *
11187 * DESCRIPTION: translate the capability into camera_metadata_t
11188 *
11189 * PARAMETERS : type of the request
11190 *
11191 *
11192 * RETURN : success: camera_metadata_t*
11193 * failure: NULL
11194 *
11195 *==========================================================================*/
11196camera_metadata_t* QCamera3HardwareInterface::translateCapabilityToMetadata(int type)
11197{
11198 if (mDefaultMetadata[type] != NULL) {
11199 return mDefaultMetadata[type];
11200 }
11201 //first time we are handling this request
11202 //fill up the metadata structure using the wrapper class
11203 CameraMetadata settings;
11204 //translate from cam_capability_t to camera_metadata_tag_t
11205 static const uint8_t requestType = ANDROID_REQUEST_TYPE_CAPTURE;
11206 settings.update(ANDROID_REQUEST_TYPE, &requestType, 1);
11207 int32_t defaultRequestID = 0;
11208 settings.update(ANDROID_REQUEST_ID, &defaultRequestID, 1);
11209
11210 /* OIS disable */
11211 char ois_prop[PROPERTY_VALUE_MAX];
11212 memset(ois_prop, 0, sizeof(ois_prop));
11213 property_get("persist.camera.ois.disable", ois_prop, "0");
11214 uint8_t ois_disable = (uint8_t)atoi(ois_prop);
11215
11216 /* Force video to use OIS */
11217 char videoOisProp[PROPERTY_VALUE_MAX];
11218 memset(videoOisProp, 0, sizeof(videoOisProp));
11219 property_get("persist.camera.ois.video", videoOisProp, "1");
11220 uint8_t forceVideoOis = (uint8_t)atoi(videoOisProp);
Shuzhen Wang19463d72016-03-08 11:09:52 -080011221
11222 // Hybrid AE enable/disable
11223 char hybrid_ae_prop[PROPERTY_VALUE_MAX];
11224 memset(hybrid_ae_prop, 0, sizeof(hybrid_ae_prop));
11225 property_get("persist.camera.hybrid_ae.enable", hybrid_ae_prop, "0");
Shuzhen Wang77b049a2017-08-30 12:24:36 -070011226 uint8_t hybrid_ae = (uint8_t)atoi(hybrid_ae_prop);
Shuzhen Wang19463d72016-03-08 11:09:52 -080011227
Thierry Strudel3d639192016-09-09 11:52:26 -070011228 uint8_t controlIntent = 0;
11229 uint8_t focusMode;
11230 uint8_t vsMode;
11231 uint8_t optStabMode;
11232 uint8_t cacMode;
11233 uint8_t edge_mode;
11234 uint8_t noise_red_mode;
11235 uint8_t tonemap_mode;
11236 bool highQualityModeEntryAvailable = FALSE;
11237 bool fastModeEntryAvailable = FALSE;
Shuzhen Wang14415f52016-11-16 18:26:18 -080011238 uint8_t histogramEnable = false;
Thierry Strudel3d639192016-09-09 11:52:26 -070011239 vsMode = ANDROID_CONTROL_VIDEO_STABILIZATION_MODE_OFF;
11240 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011241 uint8_t shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_OFF;
Shuzhen Wangcc386c52017-03-29 09:28:08 -070011242 uint8_t trackingAfTrigger = NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER_IDLE;
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011243 uint8_t enableZsl = ANDROID_CONTROL_ENABLE_ZSL_FALSE;
Mansoor Aftabea39eba2017-01-26 14:58:25 -080011244
Thierry Strudel3d639192016-09-09 11:52:26 -070011245 switch (type) {
11246 case CAMERA3_TEMPLATE_PREVIEW:
11247 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_PREVIEW;
11248 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11249 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11250 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11251 edge_mode = ANDROID_EDGE_MODE_FAST;
11252 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11253 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11254 break;
11255 case CAMERA3_TEMPLATE_STILL_CAPTURE:
11256 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_STILL_CAPTURE;
11257 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11258 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11259 edge_mode = ANDROID_EDGE_MODE_HIGH_QUALITY;
11260 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY;
11261 tonemap_mode = ANDROID_TONEMAP_MODE_HIGH_QUALITY;
11262 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
11263 // Order of priority for default CAC is HIGH Quality -> FAST -> OFF
11264 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
11265 if (gCamCapability[mCameraId]->aberration_modes[i] ==
11266 CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
11267 highQualityModeEntryAvailable = TRUE;
11268 } else if (gCamCapability[mCameraId]->aberration_modes[i] ==
11269 CAM_COLOR_CORRECTION_ABERRATION_FAST) {
11270 fastModeEntryAvailable = TRUE;
11271 }
11272 }
11273 if (highQualityModeEntryAvailable) {
11274 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY;
11275 } else if (fastModeEntryAvailable) {
11276 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11277 }
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011278 if (CAM_SENSOR_RAW == gCamCapability[mCameraId]->sensor_type.sens_type) {
11279 shadingmap_mode = ANDROID_STATISTICS_LENS_SHADING_MAP_MODE_ON;
11280 }
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011281 enableZsl = ANDROID_CONTROL_ENABLE_ZSL_TRUE;
Thierry Strudel3d639192016-09-09 11:52:26 -070011282 break;
11283 case CAMERA3_TEMPLATE_VIDEO_RECORD:
11284 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_RECORD;
11285 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
11286 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070011287 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11288 edge_mode = ANDROID_EDGE_MODE_FAST;
11289 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11290 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11291 if (forceVideoOis)
11292 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11293 break;
11294 case CAMERA3_TEMPLATE_VIDEO_SNAPSHOT:
11295 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_VIDEO_SNAPSHOT;
11296 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_VIDEO;
11297 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
Thierry Strudel3d639192016-09-09 11:52:26 -070011298 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11299 edge_mode = ANDROID_EDGE_MODE_FAST;
11300 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11301 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11302 if (forceVideoOis)
11303 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11304 break;
11305 case CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG:
11306 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_ZERO_SHUTTER_LAG;
11307 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11308 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11309 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11310 edge_mode = ANDROID_EDGE_MODE_ZERO_SHUTTER_LAG;
11311 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_ZERO_SHUTTER_LAG;
11312 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11313 break;
11314 case CAMERA3_TEMPLATE_MANUAL:
11315 edge_mode = ANDROID_EDGE_MODE_FAST;
11316 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11317 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11318 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11319 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_MANUAL;
11320 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
11321 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11322 break;
11323 default:
11324 edge_mode = ANDROID_EDGE_MODE_FAST;
11325 noise_red_mode = ANDROID_NOISE_REDUCTION_MODE_FAST;
11326 tonemap_mode = ANDROID_TONEMAP_MODE_FAST;
11327 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_FAST;
11328 controlIntent = ANDROID_CONTROL_CAPTURE_INTENT_CUSTOM;
11329 focusMode = ANDROID_CONTROL_AF_MODE_CONTINUOUS_PICTURE;
11330 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11331 break;
11332 }
Thierry Strudel04e026f2016-10-10 11:27:36 -070011333 // Set CAC to OFF if underlying device doesn't support
11334 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
11335 cacMode = ANDROID_COLOR_CORRECTION_ABERRATION_MODE_OFF;
11336 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011337 settings.update(ANDROID_COLOR_CORRECTION_ABERRATION_MODE, &cacMode, 1);
11338 settings.update(ANDROID_CONTROL_CAPTURE_INTENT, &controlIntent, 1);
11339 settings.update(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE, &vsMode, 1);
11340 if (gCamCapability[mCameraId]->supported_focus_modes_cnt == 1) {
11341 focusMode = ANDROID_CONTROL_AF_MODE_OFF;
11342 }
11343 settings.update(ANDROID_CONTROL_AF_MODE, &focusMode, 1);
Shuzhen Wang14415f52016-11-16 18:26:18 -080011344 settings.update(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE, &histogramEnable, 1);
Shuzhen Wangcc386c52017-03-29 09:28:08 -070011345 settings.update(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER, &trackingAfTrigger, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011346
11347 if (gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
11348 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_ON)
11349 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_ON;
11350 else if ((gCamCapability[mCameraId]->optical_stab_modes_count == 1 &&
11351 gCamCapability[mCameraId]->optical_stab_modes[0] == CAM_OPT_STAB_OFF)
11352 || ois_disable)
11353 optStabMode = ANDROID_LENS_OPTICAL_STABILIZATION_MODE_OFF;
11354 settings.update(ANDROID_LENS_OPTICAL_STABILIZATION_MODE, &optStabMode, 1);
Shuzhen Wang8f66c042016-08-17 14:50:26 -070011355 settings.update(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE, &shadingmap_mode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011356
11357 settings.update(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION,
11358 &gCamCapability[mCameraId]->exposure_compensation_default, 1);
11359
11360 static const uint8_t aeLock = ANDROID_CONTROL_AE_LOCK_OFF;
11361 settings.update(ANDROID_CONTROL_AE_LOCK, &aeLock, 1);
11362
11363 static const uint8_t awbLock = ANDROID_CONTROL_AWB_LOCK_OFF;
11364 settings.update(ANDROID_CONTROL_AWB_LOCK, &awbLock, 1);
11365
11366 static const uint8_t awbMode = ANDROID_CONTROL_AWB_MODE_AUTO;
11367 settings.update(ANDROID_CONTROL_AWB_MODE, &awbMode, 1);
11368
11369 static const uint8_t controlMode = ANDROID_CONTROL_MODE_AUTO;
11370 settings.update(ANDROID_CONTROL_MODE, &controlMode, 1);
11371
11372 static const uint8_t effectMode = ANDROID_CONTROL_EFFECT_MODE_OFF;
11373 settings.update(ANDROID_CONTROL_EFFECT_MODE, &effectMode, 1);
11374
11375 static const uint8_t sceneMode = ANDROID_CONTROL_SCENE_MODE_FACE_PRIORITY;
11376 settings.update(ANDROID_CONTROL_SCENE_MODE, &sceneMode, 1);
11377
11378 static const uint8_t aeMode = ANDROID_CONTROL_AE_MODE_ON;
11379 settings.update(ANDROID_CONTROL_AE_MODE, &aeMode, 1);
11380
11381 /*flash*/
11382 static const uint8_t flashMode = ANDROID_FLASH_MODE_OFF;
11383 settings.update(ANDROID_FLASH_MODE, &flashMode, 1);
11384
11385 static const uint8_t flashFiringLevel = CAM_FLASH_FIRING_LEVEL_4;
11386 settings.update(ANDROID_FLASH_FIRING_POWER,
11387 &flashFiringLevel, 1);
11388
11389 /* lens */
11390 float default_aperture = gCamCapability[mCameraId]->apertures[0];
11391 settings.update(ANDROID_LENS_APERTURE, &default_aperture, 1);
11392
11393 if (gCamCapability[mCameraId]->filter_densities_count) {
11394 float default_filter_density = gCamCapability[mCameraId]->filter_densities[0];
11395 settings.update(ANDROID_LENS_FILTER_DENSITY, &default_filter_density,
11396 gCamCapability[mCameraId]->filter_densities_count);
11397 }
11398
11399 float default_focal_length = gCamCapability[mCameraId]->focal_length;
11400 settings.update(ANDROID_LENS_FOCAL_LENGTH, &default_focal_length, 1);
11401
Thierry Strudel3d639192016-09-09 11:52:26 -070011402 static const uint8_t demosaicMode = ANDROID_DEMOSAIC_MODE_FAST;
11403 settings.update(ANDROID_DEMOSAIC_MODE, &demosaicMode, 1);
11404
11405 static const uint8_t hotpixelMode = ANDROID_HOT_PIXEL_MODE_FAST;
11406 settings.update(ANDROID_HOT_PIXEL_MODE, &hotpixelMode, 1);
11407
11408 static const int32_t testpatternMode = ANDROID_SENSOR_TEST_PATTERN_MODE_OFF;
11409 settings.update(ANDROID_SENSOR_TEST_PATTERN_MODE, &testpatternMode, 1);
11410
11411 /* face detection (default to OFF) */
11412 static const uint8_t faceDetectMode = ANDROID_STATISTICS_FACE_DETECT_MODE_OFF;
11413 settings.update(ANDROID_STATISTICS_FACE_DETECT_MODE, &faceDetectMode, 1);
11414
Thierry Strudel54dc9782017-02-15 12:12:10 -080011415 static const uint8_t histogramMode = QCAMERA3_HISTOGRAM_MODE_OFF;
11416 settings.update(QCAMERA3_HISTOGRAM_MODE, &histogramMode, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011417
11418 static const uint8_t sharpnessMapMode = ANDROID_STATISTICS_SHARPNESS_MAP_MODE_OFF;
11419 settings.update(ANDROID_STATISTICS_SHARPNESS_MAP_MODE, &sharpnessMapMode, 1);
11420
11421 static const uint8_t hotPixelMapMode = ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE_OFF;
11422 settings.update(ANDROID_STATISTICS_HOT_PIXEL_MAP_MODE, &hotPixelMapMode, 1);
11423
Thierry Strudel3d639192016-09-09 11:52:26 -070011424
11425 static const uint8_t blackLevelLock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11426 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blackLevelLock, 1);
11427
11428 /* Exposure time(Update the Min Exposure Time)*/
11429 int64_t default_exposure_time = gCamCapability[mCameraId]->exposure_time_range[0];
11430 settings.update(ANDROID_SENSOR_EXPOSURE_TIME, &default_exposure_time, 1);
11431
11432 /* frame duration */
11433 static const int64_t default_frame_duration = NSEC_PER_33MSEC;
11434 settings.update(ANDROID_SENSOR_FRAME_DURATION, &default_frame_duration, 1);
11435
11436 /* sensitivity */
11437 static const int32_t default_sensitivity = 100;
11438 settings.update(ANDROID_SENSOR_SENSITIVITY, &default_sensitivity, 1);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070011439#ifndef USE_HAL_3_3
11440 static const int32_t default_isp_sensitivity =
11441 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
11442 settings.update(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST, &default_isp_sensitivity, 1);
11443#endif
Thierry Strudel3d639192016-09-09 11:52:26 -070011444
11445 /*edge mode*/
11446 settings.update(ANDROID_EDGE_MODE, &edge_mode, 1);
11447
11448 /*noise reduction mode*/
11449 settings.update(ANDROID_NOISE_REDUCTION_MODE, &noise_red_mode, 1);
11450
11451 /*color correction mode*/
11452 static const uint8_t color_correct_mode = ANDROID_COLOR_CORRECTION_MODE_FAST;
11453 settings.update(ANDROID_COLOR_CORRECTION_MODE, &color_correct_mode, 1);
11454
11455 /*transform matrix mode*/
11456 settings.update(ANDROID_TONEMAP_MODE, &tonemap_mode, 1);
11457
11458 int32_t scaler_crop_region[4];
11459 scaler_crop_region[0] = 0;
11460 scaler_crop_region[1] = 0;
11461 scaler_crop_region[2] = gCamCapability[mCameraId]->active_array_size.width;
11462 scaler_crop_region[3] = gCamCapability[mCameraId]->active_array_size.height;
11463 settings.update(ANDROID_SCALER_CROP_REGION, scaler_crop_region, 4);
11464
11465 static const uint8_t antibanding_mode = ANDROID_CONTROL_AE_ANTIBANDING_MODE_AUTO;
11466 settings.update(ANDROID_CONTROL_AE_ANTIBANDING_MODE, &antibanding_mode, 1);
11467
11468 /*focus distance*/
11469 float focus_distance = 0.0;
11470 settings.update(ANDROID_LENS_FOCUS_DISTANCE, &focus_distance, 1);
11471
11472 /*target fps range: use maximum range for picture, and maximum fixed range for video*/
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011473 /* Restrict template max_fps to 30 */
Thierry Strudel3d639192016-09-09 11:52:26 -070011474 float max_range = 0.0;
11475 float max_fixed_fps = 0.0;
11476 int32_t fps_range[2] = {0, 0};
11477 for (uint32_t i = 0; i < gCamCapability[mCameraId]->fps_ranges_tbl_cnt;
11478 i++) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080011479 if (gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps >
11480 TEMPLATE_MAX_PREVIEW_FPS) {
11481 continue;
11482 }
Thierry Strudel3d639192016-09-09 11:52:26 -070011483 float range = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps -
11484 gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11485 if (type == CAMERA3_TEMPLATE_PREVIEW ||
11486 type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
11487 type == CAMERA3_TEMPLATE_ZERO_SHUTTER_LAG) {
11488 if (range > max_range) {
11489 fps_range[0] =
11490 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11491 fps_range[1] =
11492 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11493 max_range = range;
11494 }
11495 } else {
11496 if (range < 0.01 && max_fixed_fps <
11497 gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps) {
11498 fps_range[0] =
11499 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].min_fps;
11500 fps_range[1] =
11501 (int32_t)gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11502 max_fixed_fps = gCamCapability[mCameraId]->fps_ranges_tbl[i].max_fps;
11503 }
11504 }
11505 }
11506 settings.update(ANDROID_CONTROL_AE_TARGET_FPS_RANGE, fps_range, 2);
11507
11508 /*precapture trigger*/
11509 uint8_t precapture_trigger = ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER_IDLE;
11510 settings.update(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER, &precapture_trigger, 1);
11511
11512 /*af trigger*/
11513 uint8_t af_trigger = ANDROID_CONTROL_AF_TRIGGER_IDLE;
11514 settings.update(ANDROID_CONTROL_AF_TRIGGER, &af_trigger, 1);
11515
11516 /* ae & af regions */
11517 int32_t active_region[] = {
11518 gCamCapability[mCameraId]->active_array_size.left,
11519 gCamCapability[mCameraId]->active_array_size.top,
11520 gCamCapability[mCameraId]->active_array_size.left +
11521 gCamCapability[mCameraId]->active_array_size.width,
11522 gCamCapability[mCameraId]->active_array_size.top +
11523 gCamCapability[mCameraId]->active_array_size.height,
11524 0};
11525 settings.update(ANDROID_CONTROL_AE_REGIONS, active_region,
11526 sizeof(active_region) / sizeof(active_region[0]));
11527 settings.update(ANDROID_CONTROL_AF_REGIONS, active_region,
11528 sizeof(active_region) / sizeof(active_region[0]));
11529
11530 /* black level lock */
11531 uint8_t blacklevel_lock = ANDROID_BLACK_LEVEL_LOCK_OFF;
11532 settings.update(ANDROID_BLACK_LEVEL_LOCK, &blacklevel_lock, 1);
11533
Thierry Strudel3d639192016-09-09 11:52:26 -070011534 //special defaults for manual template
11535 if (type == CAMERA3_TEMPLATE_MANUAL) {
11536 static const uint8_t manualControlMode = ANDROID_CONTROL_MODE_OFF;
11537 settings.update(ANDROID_CONTROL_MODE, &manualControlMode, 1);
11538
11539 static const uint8_t manualFocusMode = ANDROID_CONTROL_AF_MODE_OFF;
11540 settings.update(ANDROID_CONTROL_AF_MODE, &manualFocusMode, 1);
11541
11542 static const uint8_t manualAeMode = ANDROID_CONTROL_AE_MODE_OFF;
11543 settings.update(ANDROID_CONTROL_AE_MODE, &manualAeMode, 1);
11544
11545 static const uint8_t manualAwbMode = ANDROID_CONTROL_AWB_MODE_OFF;
11546 settings.update(ANDROID_CONTROL_AWB_MODE, &manualAwbMode, 1);
11547
11548 static const uint8_t manualTonemapMode = ANDROID_TONEMAP_MODE_FAST;
11549 settings.update(ANDROID_TONEMAP_MODE, &manualTonemapMode, 1);
11550
11551 static const uint8_t manualColorCorrectMode = ANDROID_COLOR_CORRECTION_MODE_TRANSFORM_MATRIX;
11552 settings.update(ANDROID_COLOR_CORRECTION_MODE, &manualColorCorrectMode, 1);
11553 }
11554
11555
11556 /* TNR
11557 * We'll use this location to determine which modes TNR will be set.
11558 * We will enable TNR to be on if either of the Preview/Video stream requires TNR
11559 * This is not to be confused with linking on a per stream basis that decision
11560 * is still on per-session basis and will be handled as part of config stream
11561 */
11562 uint8_t tnr_enable = 0;
11563
11564 if (m_bTnrPreview || m_bTnrVideo) {
11565
11566 switch (type) {
11567 case CAMERA3_TEMPLATE_VIDEO_RECORD:
11568 tnr_enable = 1;
11569 break;
11570
11571 default:
11572 tnr_enable = 0;
11573 break;
11574 }
11575
11576 int32_t tnr_process_type = (int32_t)getTemporalDenoiseProcessPlate();
11577 settings.update(QCAMERA3_TEMPORAL_DENOISE_ENABLE, &tnr_enable, 1);
11578 settings.update(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE, &tnr_process_type, 1);
11579
11580 LOGD("TNR:%d with process plate %d for template:%d",
11581 tnr_enable, tnr_process_type, type);
11582 }
11583
11584 //Update Link tags to default
Shuzhen Wang920ea402017-05-03 08:49:39 -070011585 uint8_t sync_type = CAM_TYPE_STANDALONE;
Thierry Strudel3d639192016-09-09 11:52:26 -070011586 settings.update(QCAMERA3_DUALCAM_LINK_ENABLE, &sync_type, 1);
11587
Chien-Yu Chena3bbdc02017-05-05 11:31:47 -070011588 uint8_t is_main = 1;
Thierry Strudel3d639192016-09-09 11:52:26 -070011589 settings.update(QCAMERA3_DUALCAM_LINK_IS_MAIN, &is_main, 1);
11590
Shuzhen Wang920ea402017-05-03 08:49:39 -070011591 uint8_t related_camera_id = mCameraId;
11592 settings.update(QCAMERA3_DUALCAM_LINK_RELATED_CAMERA_ID, &related_camera_id, 1);
Thierry Strudel3d639192016-09-09 11:52:26 -070011593
11594 /* CDS default */
11595 char prop[PROPERTY_VALUE_MAX];
11596 memset(prop, 0, sizeof(prop));
11597 property_get("persist.camera.CDS", prop, "Auto");
11598 cam_cds_mode_type_t cds_mode = CAM_CDS_MODE_AUTO;
11599 cds_mode = lookupProp(CDS_MAP, METADATA_MAP_SIZE(CDS_MAP), prop);
11600 if (CAM_CDS_MODE_MAX == cds_mode) {
11601 cds_mode = CAM_CDS_MODE_AUTO;
11602 }
11603
11604 /* Disabling CDS in templates which have TNR enabled*/
11605 if (tnr_enable)
11606 cds_mode = CAM_CDS_MODE_OFF;
11607
11608 int32_t mode = cds_mode;
11609 settings.update(QCAMERA3_CDS_MODE, &mode, 1);
Thierry Strudel04e026f2016-10-10 11:27:36 -070011610
Thierry Strudel269c81a2016-10-12 12:13:59 -070011611 /* Manual Convergence AEC Speed is disabled by default*/
11612 float default_aec_speed = 0;
11613 settings.update(QCAMERA3_AEC_CONVERGENCE_SPEED, &default_aec_speed, 1);
11614
11615 /* Manual Convergence AWB Speed is disabled by default*/
11616 float default_awb_speed = 0;
11617 settings.update(QCAMERA3_AWB_CONVERGENCE_SPEED, &default_awb_speed, 1);
11618
Thierry Strudel295a0ca2016-11-03 18:38:47 -070011619 // Set instant AEC to normal convergence by default
11620 int32_t instant_aec_mode = (int32_t)QCAMERA3_INSTANT_AEC_NORMAL_CONVERGENCE;
11621 settings.update(QCAMERA3_INSTANT_AEC_MODE, &instant_aec_mode, 1);
11622
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011623 if (gExposeEnableZslKey) {
11624 settings.update(ANDROID_CONTROL_ENABLE_ZSL, &enableZsl, 1);
Chien-Yu Chen0a921f92017-08-27 17:25:33 -070011625 int32_t postview = 0;
11626 settings.update(NEXUS_EXPERIMENTAL_2017_POSTVIEW, &postview, 1);
Chien-Yu Chenb0981e32017-08-28 19:27:35 -070011627 int32_t continuousZslCapture = 0;
11628 settings.update(NEXUS_EXPERIMENTAL_2017_CONTINUOUS_ZSL_CAPTURE, &continuousZslCapture, 1);
Chien-Yu Chenfadf40e2017-09-15 14:33:57 -070011629 // Disable HDR+ for templates other than CAMERA3_TEMPLATE_STILL_CAPTURE and
11630 // CAMERA3_TEMPLATE_PREVIEW.
11631 int32_t disableHdrplus = (type == CAMERA3_TEMPLATE_STILL_CAPTURE ||
11632 type == CAMERA3_TEMPLATE_PREVIEW) ? 0 : 1;
Chien-Yu Chenec328c82017-08-30 16:41:08 -070011633 settings.update(NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS, &disableHdrplus, 1);
11634
Shuzhen Wang77b049a2017-08-30 12:24:36 -070011635 // Set hybrid_ae tag in PREVIEW and STILL_CAPTURE templates to 1 so that
11636 // hybrid ae is enabled for 3rd party app HDR+.
11637 if (type == CAMERA3_TEMPLATE_PREVIEW ||
11638 type == CAMERA3_TEMPLATE_STILL_CAPTURE) {
11639 hybrid_ae = 1;
11640 }
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011641 }
Shuzhen Wang77b049a2017-08-30 12:24:36 -070011642 /* hybrid ae */
11643 settings.update(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE, &hybrid_ae, 1);
Chien-Yu Chen66ec22a2017-04-13 18:00:36 -070011644
Thierry Strudel3d639192016-09-09 11:52:26 -070011645 mDefaultMetadata[type] = settings.release();
11646
11647 return mDefaultMetadata[type];
11648}
11649
11650/*===========================================================================
Emilian Peev30522a12017-08-03 14:36:33 +010011651 * FUNCTION : getExpectedFrameDuration
11652 *
11653 * DESCRIPTION: Extract the maximum frame duration from either exposure or frame
11654 * duration
11655 *
11656 * PARAMETERS :
11657 * @request : request settings
11658 * @frameDuration : The maximum frame duration in nanoseconds
11659 *
11660 * RETURN : None
11661 *==========================================================================*/
11662void QCamera3HardwareInterface::getExpectedFrameDuration(
11663 const camera_metadata_t *request, nsecs_t *frameDuration /*out*/) {
11664 if (nullptr == frameDuration) {
11665 return;
11666 }
11667
11668 camera_metadata_ro_entry_t e = camera_metadata_ro_entry_t();
11669 find_camera_metadata_ro_entry(request,
11670 ANDROID_SENSOR_EXPOSURE_TIME,
11671 &e);
11672 if (e.count > 0) {
11673 *frameDuration = e.data.i64[0];
11674 }
11675 find_camera_metadata_ro_entry(request,
11676 ANDROID_SENSOR_FRAME_DURATION,
11677 &e);
11678 if (e.count > 0) {
11679 *frameDuration = std::max(e.data.i64[0], *frameDuration);
11680 }
11681}
11682
11683/*===========================================================================
11684 * FUNCTION : calculateMaxExpectedDuration
11685 *
11686 * DESCRIPTION: Calculate the expected frame duration in nanoseconds given the
11687 * current camera settings.
11688 *
11689 * PARAMETERS :
11690 * @request : request settings
11691 *
11692 * RETURN : Expected frame duration in nanoseconds.
11693 *==========================================================================*/
11694nsecs_t QCamera3HardwareInterface::calculateMaxExpectedDuration(
11695 const camera_metadata_t *request) {
11696 nsecs_t maxExpectedDuration = kDefaultExpectedDuration;
11697 camera_metadata_ro_entry_t e = camera_metadata_ro_entry_t();
11698 find_camera_metadata_ro_entry(request, ANDROID_CONTROL_MODE, &e);
11699 if (e.count == 0) {
11700 return maxExpectedDuration;
11701 }
11702
11703 if (e.data.u8[0] == ANDROID_CONTROL_MODE_OFF) {
11704 getExpectedFrameDuration(request, &maxExpectedDuration /*out*/);
11705 }
11706
11707 if (e.data.u8[0] != ANDROID_CONTROL_MODE_AUTO) {
11708 return maxExpectedDuration;
11709 }
11710
11711 find_camera_metadata_ro_entry(request, ANDROID_CONTROL_AE_MODE, &e);
11712 if (e.count == 0) {
11713 return maxExpectedDuration;
11714 }
11715
11716 switch (e.data.u8[0]) {
11717 case ANDROID_CONTROL_AE_MODE_OFF:
11718 getExpectedFrameDuration(request, &maxExpectedDuration /*out*/);
11719 break;
11720 default:
11721 find_camera_metadata_ro_entry(request,
11722 ANDROID_CONTROL_AE_TARGET_FPS_RANGE,
11723 &e);
11724 if (e.count > 1) {
11725 maxExpectedDuration = 1e9 / e.data.u8[0];
11726 }
11727 break;
11728 }
11729
11730 return maxExpectedDuration;
11731}
11732
11733/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070011734 * FUNCTION : setFrameParameters
11735 *
11736 * DESCRIPTION: set parameters per frame as requested in the metadata from
11737 * framework
11738 *
11739 * PARAMETERS :
11740 * @request : request that needs to be serviced
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011741 * @streamsArray : Stream ID of all the requested streams
Thierry Strudel3d639192016-09-09 11:52:26 -070011742 * @blob_request: Whether this request is a blob request or not
11743 *
11744 * RETURN : success: NO_ERROR
11745 * failure:
11746 *==========================================================================*/
11747int QCamera3HardwareInterface::setFrameParameters(
11748 camera3_capture_request_t *request,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011749 cam_stream_ID_t streamsArray,
Thierry Strudel3d639192016-09-09 11:52:26 -070011750 int blob_request,
11751 uint32_t snapshotStreamId)
11752{
11753 /*translate from camera_metadata_t type to parm_type_t*/
11754 int rc = 0;
11755 int32_t hal_version = CAM_HAL_V3;
11756
11757 clear_metadata_buffer(mParameters);
11758 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_HAL_VERSION, hal_version)) {
11759 LOGE("Failed to set hal version in the parameters");
11760 return BAD_VALUE;
11761 }
11762
11763 /*we need to update the frame number in the parameters*/
11764 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_FRAME_NUMBER,
11765 request->frame_number)) {
11766 LOGE("Failed to set the frame number in the parameters");
11767 return BAD_VALUE;
11768 }
11769
11770 /* Update stream id of all the requested buffers */
Thierry Strudelc2ee3302016-11-17 12:33:12 -080011771 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_STREAM_ID, streamsArray)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070011772 LOGE("Failed to set stream type mask in the parameters");
11773 return BAD_VALUE;
11774 }
11775
11776 if (mUpdateDebugLevel) {
11777 uint32_t dummyDebugLevel = 0;
11778 /* The value of dummyDebugLevel is irrelavent. On
11779 * CAM_INTF_PARM_UPDATE_DEBUG_LEVEL, read debug property */
11780 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_UPDATE_DEBUG_LEVEL,
11781 dummyDebugLevel)) {
11782 LOGE("Failed to set UPDATE_DEBUG_LEVEL");
11783 return BAD_VALUE;
11784 }
11785 mUpdateDebugLevel = false;
11786 }
11787
11788 if(request->settings != NULL){
Emilian Peev30522a12017-08-03 14:36:33 +010011789 mExpectedFrameDuration = calculateMaxExpectedDuration(request->settings);
Thierry Strudel3d639192016-09-09 11:52:26 -070011790 rc = translateToHalMetadata(request, mParameters, snapshotStreamId);
11791 if (blob_request)
11792 memcpy(mPrevParameters, mParameters, sizeof(metadata_buffer_t));
11793 }
11794
11795 return rc;
11796}
11797
11798/*===========================================================================
11799 * FUNCTION : setReprocParameters
11800 *
11801 * DESCRIPTION: Translate frameworks metadata to HAL metadata structure, and
11802 * return it.
11803 *
11804 * PARAMETERS :
11805 * @request : request that needs to be serviced
11806 *
11807 * RETURN : success: NO_ERROR
11808 * failure:
11809 *==========================================================================*/
11810int32_t QCamera3HardwareInterface::setReprocParameters(
11811 camera3_capture_request_t *request, metadata_buffer_t *reprocParam,
11812 uint32_t snapshotStreamId)
11813{
11814 /*translate from camera_metadata_t type to parm_type_t*/
11815 int rc = 0;
11816
11817 if (NULL == request->settings){
11818 LOGE("Reprocess settings cannot be NULL");
11819 return BAD_VALUE;
11820 }
11821
11822 if (NULL == reprocParam) {
11823 LOGE("Invalid reprocessing metadata buffer");
11824 return BAD_VALUE;
11825 }
11826 clear_metadata_buffer(reprocParam);
11827
11828 /*we need to update the frame number in the parameters*/
11829 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FRAME_NUMBER,
11830 request->frame_number)) {
11831 LOGE("Failed to set the frame number in the parameters");
11832 return BAD_VALUE;
11833 }
11834
11835 rc = translateToHalMetadata(request, reprocParam, snapshotStreamId);
11836 if (rc < 0) {
11837 LOGE("Failed to translate reproc request");
11838 return rc;
11839 }
11840
11841 CameraMetadata frame_settings;
11842 frame_settings = request->settings;
11843 if (frame_settings.exists(QCAMERA3_CROP_COUNT_REPROCESS) &&
11844 frame_settings.exists(QCAMERA3_CROP_REPROCESS)) {
11845 int32_t *crop_count =
11846 frame_settings.find(QCAMERA3_CROP_COUNT_REPROCESS).data.i32;
11847 int32_t *crop_data =
11848 frame_settings.find(QCAMERA3_CROP_REPROCESS).data.i32;
11849 int32_t *roi_map =
11850 frame_settings.find(QCAMERA3_CROP_ROI_MAP_REPROCESS).data.i32;
11851 if ((0 < *crop_count) && (*crop_count < MAX_NUM_STREAMS)) {
11852 cam_crop_data_t crop_meta;
11853 memset(&crop_meta, 0, sizeof(cam_crop_data_t));
11854 crop_meta.num_of_streams = 1;
11855 crop_meta.crop_info[0].crop.left = crop_data[0];
11856 crop_meta.crop_info[0].crop.top = crop_data[1];
11857 crop_meta.crop_info[0].crop.width = crop_data[2];
11858 crop_meta.crop_info[0].crop.height = crop_data[3];
11859
11860 crop_meta.crop_info[0].roi_map.left =
11861 roi_map[0];
11862 crop_meta.crop_info[0].roi_map.top =
11863 roi_map[1];
11864 crop_meta.crop_info[0].roi_map.width =
11865 roi_map[2];
11866 crop_meta.crop_info[0].roi_map.height =
11867 roi_map[3];
11868
11869 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_CROP_DATA, crop_meta)) {
11870 rc = BAD_VALUE;
11871 }
11872 LOGD("Found reprocess crop data for stream %p %dx%d, %dx%d",
11873 request->input_buffer->stream,
11874 crop_meta.crop_info[0].crop.left,
11875 crop_meta.crop_info[0].crop.top,
11876 crop_meta.crop_info[0].crop.width,
11877 crop_meta.crop_info[0].crop.height);
11878 LOGD("Found reprocess roi map data for stream %p %dx%d, %dx%d",
11879 request->input_buffer->stream,
11880 crop_meta.crop_info[0].roi_map.left,
11881 crop_meta.crop_info[0].roi_map.top,
11882 crop_meta.crop_info[0].roi_map.width,
11883 crop_meta.crop_info[0].roi_map.height);
11884 } else {
11885 LOGE("Invalid reprocess crop count %d!", *crop_count);
11886 }
11887 } else {
11888 LOGE("No crop data from matching output stream");
11889 }
11890
11891 /* These settings are not needed for regular requests so handle them specially for
11892 reprocess requests; information needed for EXIF tags */
11893 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
11894 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
11895 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11896 if (NAME_NOT_FOUND != val) {
11897 uint32_t flashMode = (uint32_t)val;
11898 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_MODE, flashMode)) {
11899 rc = BAD_VALUE;
11900 }
11901 } else {
11902 LOGE("Could not map fwk flash mode %d to correct hal flash mode",
11903 frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
11904 }
11905 } else {
11906 LOGH("No flash mode in reprocess settings");
11907 }
11908
11909 if (frame_settings.exists(ANDROID_FLASH_STATE)) {
11910 int32_t flashState = (int32_t)frame_settings.find(ANDROID_FLASH_STATE).data.u8[0];
11911 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_FLASH_STATE, flashState)) {
11912 rc = BAD_VALUE;
11913 }
11914 } else {
11915 LOGH("No flash state in reprocess settings");
11916 }
11917
11918 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS)) {
11919 uint8_t *reprocessFlags =
11920 frame_settings.find(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS).data.u8;
11921 if (ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_REPROCESS_FLAGS,
11922 *reprocessFlags)) {
11923 rc = BAD_VALUE;
11924 }
11925 }
11926
Thierry Strudel54dc9782017-02-15 12:12:10 -080011927 // Add exif debug data to internal metadata
11928 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB)) {
11929 mm_jpeg_debug_exif_params_t *debug_params =
11930 (mm_jpeg_debug_exif_params_t *)frame_settings.find
11931 (QCAMERA3_HAL_PRIVATEDATA_EXIF_DEBUG_DATA_BLOB).data.u8;
11932 // AE
11933 if (debug_params->ae_debug_params_valid == TRUE) {
11934 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AE,
11935 debug_params->ae_debug_params);
11936 }
11937 // AWB
11938 if (debug_params->awb_debug_params_valid == TRUE) {
11939 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AWB,
11940 debug_params->awb_debug_params);
11941 }
11942 // AF
11943 if (debug_params->af_debug_params_valid == TRUE) {
11944 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_AF,
11945 debug_params->af_debug_params);
11946 }
11947 // ASD
11948 if (debug_params->asd_debug_params_valid == TRUE) {
11949 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_ASD,
11950 debug_params->asd_debug_params);
11951 }
11952 // Stats
11953 if (debug_params->stats_debug_params_valid == TRUE) {
11954 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_STATS,
11955 debug_params->stats_debug_params);
11956 }
11957 // BE Stats
11958 if (debug_params->bestats_debug_params_valid == TRUE) {
11959 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BESTATS,
11960 debug_params->bestats_debug_params);
11961 }
11962 // BHIST
11963 if (debug_params->bhist_debug_params_valid == TRUE) {
11964 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_BHIST,
11965 debug_params->bhist_debug_params);
11966 }
11967 // 3A Tuning
11968 if (debug_params->q3a_tuning_debug_params_valid == TRUE) {
11969 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_EXIF_DEBUG_3A_TUNING,
11970 debug_params->q3a_tuning_debug_params);
11971 }
11972 }
11973
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011974 // Add metadata which reprocess needs
11975 if (frame_settings.exists(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB)) {
11976 cam_reprocess_info_t *repro_info =
11977 (cam_reprocess_info_t *)frame_settings.find
11978 (QCAMERA3_HAL_PRIVATEDATA_REPROCESS_DATA_BLOB).data.u8;
Thierry Strudel3d639192016-09-09 11:52:26 -070011979 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_SENSOR,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011980 repro_info->sensor_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011981 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CAMIF,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011982 repro_info->camif_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011983 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_ISP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011984 repro_info->isp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011985 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_SNAP_CROP_INFO_CPP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011986 repro_info->cpp_crop_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070011987 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_FOCAL_LENGTH_RATIO,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011988 repro_info->af_focal_length_ratio);
Thierry Strudel3d639192016-09-09 11:52:26 -070011989 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_FLIP,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070011990 repro_info->pipeline_flip);
11991 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_AF_ROI,
11992 repro_info->af_roi);
11993 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_META_IMG_DYN_FEAT,
11994 repro_info->dyn_mask);
Thierry Strudel3d639192016-09-09 11:52:26 -070011995 /* If there is ANDROID_JPEG_ORIENTATION in frame setting,
11996 CAM_INTF_PARM_ROTATION metadata then has been added in
11997 translateToHalMetadata. HAL need to keep this new rotation
11998 metadata. Otherwise, the old rotation info saved in the vendor tag
11999 would be used */
12000 IF_META_AVAILABLE(cam_rotation_info_t, rotationInfo,
12001 CAM_INTF_PARM_ROTATION, reprocParam) {
12002 LOGD("CAM_INTF_PARM_ROTATION metadata is added in translateToHalMetadata");
12003 } else {
12004 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_ROTATION,
Thierry Strudelcca4d9c2016-10-20 08:25:53 -070012005 repro_info->rotation_info);
Thierry Strudel3d639192016-09-09 11:52:26 -070012006 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012007 }
12008
12009 /* Add additional JPEG cropping information. App add QCAMERA3_JPEG_ENCODE_CROP_RECT
12010 to ask for cropping and use ROI for downscale/upscale during HW JPEG encoding.
12011 roi.width and roi.height would be the final JPEG size.
12012 For now, HAL only checks this for reprocess request */
12013 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ENABLE) &&
12014 frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_RECT)) {
12015 uint8_t *enable =
12016 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ENABLE).data.u8;
12017 if (*enable == TRUE) {
12018 int32_t *crop_data =
12019 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_RECT).data.i32;
12020 cam_stream_crop_info_t crop_meta;
12021 memset(&crop_meta, 0, sizeof(cam_stream_crop_info_t));
12022 crop_meta.stream_id = 0;
12023 crop_meta.crop.left = crop_data[0];
12024 crop_meta.crop.top = crop_data[1];
12025 crop_meta.crop.width = crop_data[2];
12026 crop_meta.crop.height = crop_data[3];
Thierry Strudel9e74aae2016-09-22 17:10:18 -070012027 // The JPEG crop roi should match cpp output size
12028 IF_META_AVAILABLE(cam_stream_crop_info_t, cpp_crop,
12029 CAM_INTF_META_SNAP_CROP_INFO_CPP, reprocParam) {
12030 crop_meta.roi_map.left = 0;
12031 crop_meta.roi_map.top = 0;
12032 crop_meta.roi_map.width = cpp_crop->crop.width;
12033 crop_meta.roi_map.height = cpp_crop->crop.height;
Thierry Strudel3d639192016-09-09 11:52:26 -070012034 }
12035 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_ENCODE_CROP,
12036 crop_meta);
Thierry Strudel9e74aae2016-09-22 17:10:18 -070012037 LOGH("Add JPEG encode crop left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070012038 crop_meta.crop.left, crop_meta.crop.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070012039 crop_meta.crop.width, crop_meta.crop.height, mCameraId);
12040 LOGH("Add JPEG encode crop ROI left %d, top %d, width %d, height %d, mCameraId %d",
Thierry Strudel3d639192016-09-09 11:52:26 -070012041 crop_meta.roi_map.left, crop_meta.roi_map.top,
Thierry Strudel9e74aae2016-09-22 17:10:18 -070012042 crop_meta.roi_map.width, crop_meta.roi_map.height, mCameraId);
12043
12044 // Add JPEG scale information
12045 cam_dimension_t scale_dim;
12046 memset(&scale_dim, 0, sizeof(cam_dimension_t));
12047 if (frame_settings.exists(QCAMERA3_JPEG_ENCODE_CROP_ROI)) {
12048 int32_t *roi =
12049 frame_settings.find(QCAMERA3_JPEG_ENCODE_CROP_ROI).data.i32;
12050 scale_dim.width = roi[2];
12051 scale_dim.height = roi[3];
12052 ADD_SET_PARAM_ENTRY_TO_BATCH(reprocParam, CAM_INTF_PARM_JPEG_SCALE_DIMENSION,
12053 scale_dim);
12054 LOGH("Add JPEG encode scale width %d, height %d, mCameraId %d",
12055 scale_dim.width, scale_dim.height, mCameraId);
12056 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012057 }
12058 }
12059
12060 return rc;
12061}
12062
12063/*===========================================================================
12064 * FUNCTION : saveRequestSettings
12065 *
12066 * DESCRIPTION: Add any settings that might have changed to the request settings
12067 * and save the settings to be applied on the frame
12068 *
12069 * PARAMETERS :
12070 * @jpegMetadata : the extracted and/or modified jpeg metadata
12071 * @request : request with initial settings
12072 *
12073 * RETURN :
12074 * camera_metadata_t* : pointer to the saved request settings
12075 *==========================================================================*/
12076camera_metadata_t* QCamera3HardwareInterface::saveRequestSettings(
12077 const CameraMetadata &jpegMetadata,
12078 camera3_capture_request_t *request)
12079{
12080 camera_metadata_t *resultMetadata;
12081 CameraMetadata camMetadata;
12082 camMetadata = request->settings;
12083
12084 if (jpegMetadata.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
12085 int32_t thumbnail_size[2];
12086 thumbnail_size[0] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
12087 thumbnail_size[1] = jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
12088 camMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, thumbnail_size,
12089 jpegMetadata.find(ANDROID_JPEG_THUMBNAIL_SIZE).count);
12090 }
12091
12092 if (request->input_buffer != NULL) {
12093 uint8_t reprocessFlags = 1;
12094 camMetadata.update(QCAMERA3_HAL_PRIVATEDATA_REPROCESS_FLAGS,
12095 (uint8_t*)&reprocessFlags,
12096 sizeof(reprocessFlags));
12097 }
12098
12099 resultMetadata = camMetadata.release();
12100 return resultMetadata;
12101}
12102
12103/*===========================================================================
12104 * FUNCTION : setHalFpsRange
12105 *
12106 * DESCRIPTION: set FPS range parameter
12107 *
12108 *
12109 * PARAMETERS :
12110 * @settings : Metadata from framework
12111 * @hal_metadata: Metadata buffer
12112 *
12113 *
12114 * RETURN : success: NO_ERROR
12115 * failure:
12116 *==========================================================================*/
12117int32_t QCamera3HardwareInterface::setHalFpsRange(const CameraMetadata &settings,
12118 metadata_buffer_t *hal_metadata)
12119{
12120 int32_t rc = NO_ERROR;
12121 cam_fps_range_t fps_range;
12122 fps_range.min_fps = (float)
12123 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[0];
12124 fps_range.max_fps = (float)
12125 settings.find(ANDROID_CONTROL_AE_TARGET_FPS_RANGE).data.i32[1];
12126 fps_range.video_min_fps = fps_range.min_fps;
12127 fps_range.video_max_fps = fps_range.max_fps;
12128
12129 LOGD("aeTargetFpsRange fps: [%f %f]",
12130 fps_range.min_fps, fps_range.max_fps);
12131 /* In CONSTRAINED_HFR_MODE, sensor_fps is derived from aeTargetFpsRange as
12132 * follows:
12133 * ---------------------------------------------------------------|
12134 * Video stream is absent in configure_streams |
12135 * (Camcorder preview before the first video record |
12136 * ---------------------------------------------------------------|
12137 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
12138 * | | | vid_min/max_fps|
12139 * ---------------------------------------------------------------|
12140 * NO | [ 30, 240] | 240 | [240, 240] |
12141 * |-------------|-------------|----------------|
12142 * | [240, 240] | 240 | [240, 240] |
12143 * ---------------------------------------------------------------|
12144 * Video stream is present in configure_streams |
12145 * ---------------------------------------------------------------|
12146 * vid_buf_requested | aeTgtFpsRng | snsrFpsMode | sensorFpsRange |
12147 * | | | vid_min/max_fps|
12148 * ---------------------------------------------------------------|
12149 * NO | [ 30, 240] | 240 | [240, 240] |
12150 * (camcorder prev |-------------|-------------|----------------|
12151 * after video rec | [240, 240] | 240 | [240, 240] |
12152 * is stopped) | | | |
12153 * ---------------------------------------------------------------|
12154 * YES | [ 30, 240] | 240 | [240, 240] |
12155 * |-------------|-------------|----------------|
12156 * | [240, 240] | 240 | [240, 240] |
12157 * ---------------------------------------------------------------|
12158 * When Video stream is absent in configure_streams,
12159 * preview fps = sensor_fps / batchsize
12160 * Eg: for 240fps at batchSize 4, preview = 60fps
12161 * for 120fps at batchSize 4, preview = 30fps
12162 *
12163 * When video stream is present in configure_streams, preview fps is as per
12164 * the ratio of preview buffers to video buffers requested in process
12165 * capture request
12166 */
12167 mBatchSize = 0;
12168 if (CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE == mOpMode) {
12169 fps_range.min_fps = fps_range.video_max_fps;
12170 fps_range.video_min_fps = fps_range.video_max_fps;
12171 int val = lookupHalName(HFR_MODE_MAP, METADATA_MAP_SIZE(HFR_MODE_MAP),
12172 fps_range.max_fps);
12173 if (NAME_NOT_FOUND != val) {
12174 cam_hfr_mode_t hfrMode = (cam_hfr_mode_t)val;
12175 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
12176 return BAD_VALUE;
12177 }
12178
12179 if (fps_range.max_fps >= MIN_FPS_FOR_BATCH_MODE) {
12180 /* If batchmode is currently in progress and the fps changes,
12181 * set the flag to restart the sensor */
12182 if((mHFRVideoFps >= MIN_FPS_FOR_BATCH_MODE) &&
12183 (mHFRVideoFps != fps_range.max_fps)) {
12184 mNeedSensorRestart = true;
12185 }
12186 mHFRVideoFps = fps_range.max_fps;
12187 mBatchSize = mHFRVideoFps / PREVIEW_FPS_FOR_HFR;
12188 if (mBatchSize > MAX_HFR_BATCH_SIZE) {
12189 mBatchSize = MAX_HFR_BATCH_SIZE;
12190 }
12191 }
12192 LOGD("hfrMode: %d batchSize: %d", hfrMode, mBatchSize);
12193
12194 }
12195 } else {
12196 /* HFR mode is session param in backend/ISP. This should be reset when
12197 * in non-HFR mode */
12198 cam_hfr_mode_t hfrMode = CAM_HFR_MODE_OFF;
12199 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_HFR, hfrMode)) {
12200 return BAD_VALUE;
12201 }
12202 }
12203 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FPS_RANGE, fps_range)) {
12204 return BAD_VALUE;
12205 }
12206 LOGD("fps: [%f %f] vid_fps: [%f %f]", fps_range.min_fps,
12207 fps_range.max_fps, fps_range.video_min_fps, fps_range.video_max_fps);
12208 return rc;
12209}
12210
12211/*===========================================================================
12212 * FUNCTION : translateToHalMetadata
12213 *
12214 * DESCRIPTION: read from the camera_metadata_t and change to parm_type_t
12215 *
12216 *
12217 * PARAMETERS :
12218 * @request : request sent from framework
12219 *
12220 *
12221 * RETURN : success: NO_ERROR
12222 * failure:
12223 *==========================================================================*/
12224int QCamera3HardwareInterface::translateToHalMetadata
12225 (const camera3_capture_request_t *request,
12226 metadata_buffer_t *hal_metadata,
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012227 uint32_t snapshotStreamId) {
12228 if (request == nullptr || hal_metadata == nullptr) {
12229 return BAD_VALUE;
12230 }
12231
12232 int64_t minFrameDuration = getMinFrameDuration(request);
12233
12234 return translateFwkMetadataToHalMetadata(request->settings, hal_metadata, snapshotStreamId,
12235 minFrameDuration);
12236}
12237
12238int QCamera3HardwareInterface::translateFwkMetadataToHalMetadata(
12239 const camera_metadata_t *frameworkMetadata, metadata_buffer_t *hal_metadata,
12240 uint32_t snapshotStreamId, int64_t minFrameDuration) {
12241
Thierry Strudel3d639192016-09-09 11:52:26 -070012242 int rc = 0;
12243 CameraMetadata frame_settings;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012244 frame_settings = frameworkMetadata;
Thierry Strudel3d639192016-09-09 11:52:26 -070012245
12246 /* Do not change the order of the following list unless you know what you are
12247 * doing.
12248 * The order is laid out in such a way that parameters in the front of the table
12249 * may be used to override the parameters later in the table. Examples are:
12250 * 1. META_MODE should precede AEC/AWB/AF MODE
12251 * 2. AEC MODE should preced EXPOSURE_TIME/SENSITIVITY/FRAME_DURATION
12252 * 3. AWB_MODE should precede COLOR_CORRECTION_MODE
12253 * 4. Any mode should precede it's corresponding settings
12254 */
12255 if (frame_settings.exists(ANDROID_CONTROL_MODE)) {
12256 uint8_t metaMode = frame_settings.find(ANDROID_CONTROL_MODE).data.u8[0];
12257 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MODE, metaMode)) {
12258 rc = BAD_VALUE;
12259 }
12260 rc = extractSceneMode(frame_settings, metaMode, hal_metadata);
12261 if (rc != NO_ERROR) {
12262 LOGE("extractSceneMode failed");
12263 }
12264 }
12265
12266 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
12267 uint8_t fwk_aeMode =
12268 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
12269 uint8_t aeMode;
12270 int32_t redeye;
12271
12272 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_OFF ) {
12273 aeMode = CAM_AE_MODE_OFF;
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080012274 } else if (fwk_aeMode == NEXUS_EXPERIMENTAL_2016_CONTROL_AE_MODE_EXTERNAL_FLASH) {
12275 aeMode = CAM_AE_MODE_ON_EXTERNAL_FLASH;
Thierry Strudel3d639192016-09-09 11:52:26 -070012276 } else {
12277 aeMode = CAM_AE_MODE_ON;
12278 }
12279 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
12280 redeye = 1;
12281 } else {
12282 redeye = 0;
12283 }
12284
12285 int val = lookupHalName(AE_FLASH_MODE_MAP, METADATA_MAP_SIZE(AE_FLASH_MODE_MAP),
12286 fwk_aeMode);
12287 if (NAME_NOT_FOUND != val) {
12288 int32_t flashMode = (int32_t)val;
12289 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode);
12290 }
12291
12292 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_MODE, aeMode);
12293 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_REDEYE_REDUCTION, redeye)) {
12294 rc = BAD_VALUE;
12295 }
12296 }
12297
12298 if (frame_settings.exists(ANDROID_CONTROL_AWB_MODE)) {
12299 uint8_t fwk_whiteLevel = frame_settings.find(ANDROID_CONTROL_AWB_MODE).data.u8[0];
12300 int val = lookupHalName(WHITE_BALANCE_MODES_MAP, METADATA_MAP_SIZE(WHITE_BALANCE_MODES_MAP),
12301 fwk_whiteLevel);
12302 if (NAME_NOT_FOUND != val) {
12303 uint8_t whiteLevel = (uint8_t)val;
12304 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_WHITE_BALANCE, whiteLevel)) {
12305 rc = BAD_VALUE;
12306 }
12307 }
12308 }
12309
12310 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE)) {
12311 uint8_t fwk_cacMode =
12312 frame_settings.find(
12313 ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0];
12314 int val = lookupHalName(COLOR_ABERRATION_MAP, METADATA_MAP_SIZE(COLOR_ABERRATION_MAP),
12315 fwk_cacMode);
12316 if (NAME_NOT_FOUND != val) {
12317 cam_aberration_mode_t cacMode = (cam_aberration_mode_t) val;
12318 bool entryAvailable = FALSE;
12319 // Check whether Frameworks set CAC mode is supported in device or not
12320 for (size_t i = 0; i < gCamCapability[mCameraId]->aberration_modes_count; i++) {
12321 if (gCamCapability[mCameraId]->aberration_modes[i] == cacMode) {
12322 entryAvailable = TRUE;
12323 break;
12324 }
12325 }
12326 LOGD("FrameworksCacMode=%d entryAvailable=%d", cacMode, entryAvailable);
12327 // If entry not found then set the device supported mode instead of frameworks mode i.e,
12328 // Only HW ISP CAC + NO SW CAC : Advertise all 3 with High doing same as fast by ISP
12329 // NO HW ISP CAC + Only SW CAC : Advertise all 3 with Fast doing the same as OFF
12330 if (entryAvailable == FALSE) {
12331 if (gCamCapability[mCameraId]->aberration_modes_count == 0) {
12332 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
12333 } else {
12334 if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_HIGH_QUALITY) {
12335 // High is not supported and so set the FAST as spec say's underlying
12336 // device implementation can be the same for both modes.
12337 cacMode = CAM_COLOR_CORRECTION_ABERRATION_FAST;
12338 } else if (cacMode == CAM_COLOR_CORRECTION_ABERRATION_FAST) {
12339 // Fast is not supported and so we cannot set HIGH or FAST but choose OFF
12340 // in order to avoid the fps drop due to high quality
12341 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
12342 } else {
12343 cacMode = CAM_COLOR_CORRECTION_ABERRATION_OFF;
12344 }
12345 }
12346 }
12347 LOGD("Final cacMode is %d", cacMode);
12348 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_CAC, cacMode)) {
12349 rc = BAD_VALUE;
12350 }
12351 } else {
12352 LOGE("Invalid framework CAC mode: %d", fwk_cacMode);
12353 }
12354 }
12355
Jason Lee84ae9972017-02-24 13:24:24 -080012356 uint8_t fwk_focusMode = 0;
Shuzhen Wangb57ec912017-07-31 13:24:27 -070012357 if (m_bForceInfinityAf == 0) {
Thierry Strudel2896d122017-02-23 19:18:03 -080012358 if (frame_settings.exists(ANDROID_CONTROL_AF_MODE)) {
Jason Lee84ae9972017-02-24 13:24:24 -080012359 fwk_focusMode = frame_settings.find(ANDROID_CONTROL_AF_MODE).data.u8[0];
Thierry Strudel2896d122017-02-23 19:18:03 -080012360 int val = lookupHalName(FOCUS_MODES_MAP, METADATA_MAP_SIZE(FOCUS_MODES_MAP),
12361 fwk_focusMode);
12362 if (NAME_NOT_FOUND != val) {
12363 uint8_t focusMode = (uint8_t)val;
12364 LOGD("set focus mode %d", focusMode);
12365 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12366 CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
12367 rc = BAD_VALUE;
12368 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012369 }
12370 }
Thierry Strudel2896d122017-02-23 19:18:03 -080012371 } else {
12372 uint8_t focusMode = (uint8_t)CAM_FOCUS_MODE_INFINITY;
12373 LOGE("Focus forced to infinity %d", focusMode);
12374 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_FOCUS_MODE, focusMode)) {
12375 rc = BAD_VALUE;
12376 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012377 }
12378
Jason Lee84ae9972017-02-24 13:24:24 -080012379 if (frame_settings.exists(ANDROID_LENS_FOCUS_DISTANCE) &&
12380 fwk_focusMode == ANDROID_CONTROL_AF_MODE_OFF) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012381 float focalDistance = frame_settings.find(ANDROID_LENS_FOCUS_DISTANCE).data.f[0];
12382 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCUS_DISTANCE,
12383 focalDistance)) {
12384 rc = BAD_VALUE;
12385 }
12386 }
12387
12388 if (frame_settings.exists(ANDROID_CONTROL_AE_ANTIBANDING_MODE)) {
12389 uint8_t fwk_antibandingMode =
12390 frame_settings.find(ANDROID_CONTROL_AE_ANTIBANDING_MODE).data.u8[0];
12391 int val = lookupHalName(ANTIBANDING_MODES_MAP,
12392 METADATA_MAP_SIZE(ANTIBANDING_MODES_MAP), fwk_antibandingMode);
12393 if (NAME_NOT_FOUND != val) {
12394 uint32_t hal_antibandingMode = (uint32_t)val;
Shuzhen Wangf6890e02016-08-12 14:28:54 -070012395 if (hal_antibandingMode == CAM_ANTIBANDING_MODE_AUTO) {
12396 if (m60HzZone) {
12397 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_60HZ;
12398 } else {
12399 hal_antibandingMode = CAM_ANTIBANDING_MODE_AUTO_50HZ;
12400 }
12401 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012402 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ANTIBANDING,
12403 hal_antibandingMode)) {
12404 rc = BAD_VALUE;
12405 }
12406 }
12407 }
12408
12409 if (frame_settings.exists(ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION)) {
12410 int32_t expCompensation = frame_settings.find(
12411 ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION).data.i32[0];
12412 if (expCompensation < gCamCapability[mCameraId]->exposure_compensation_min)
12413 expCompensation = gCamCapability[mCameraId]->exposure_compensation_min;
12414 if (expCompensation > gCamCapability[mCameraId]->exposure_compensation_max)
12415 expCompensation = gCamCapability[mCameraId]->exposure_compensation_max;
Thierry Strudel54dc9782017-02-15 12:12:10 -080012416 LOGD("Setting compensation:%d", expCompensation);
Thierry Strudel3d639192016-09-09 11:52:26 -070012417 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_COMPENSATION,
12418 expCompensation)) {
12419 rc = BAD_VALUE;
12420 }
12421 }
12422
12423 if (frame_settings.exists(ANDROID_CONTROL_AE_LOCK)) {
12424 uint8_t aeLock = frame_settings.find(ANDROID_CONTROL_AE_LOCK).data.u8[0];
12425 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_LOCK, aeLock)) {
12426 rc = BAD_VALUE;
12427 }
12428 }
12429 if (frame_settings.exists(ANDROID_CONTROL_AE_TARGET_FPS_RANGE)) {
12430 rc = setHalFpsRange(frame_settings, hal_metadata);
12431 if (rc != NO_ERROR) {
12432 LOGE("setHalFpsRange failed");
12433 }
12434 }
12435
12436 if (frame_settings.exists(ANDROID_CONTROL_AWB_LOCK)) {
12437 uint8_t awbLock = frame_settings.find(ANDROID_CONTROL_AWB_LOCK).data.u8[0];
12438 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AWB_LOCK, awbLock)) {
12439 rc = BAD_VALUE;
12440 }
12441 }
12442
12443 if (frame_settings.exists(ANDROID_CONTROL_EFFECT_MODE)) {
12444 uint8_t fwk_effectMode = frame_settings.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0];
12445 int val = lookupHalName(EFFECT_MODES_MAP, METADATA_MAP_SIZE(EFFECT_MODES_MAP),
12446 fwk_effectMode);
12447 if (NAME_NOT_FOUND != val) {
12448 uint8_t effectMode = (uint8_t)val;
12449 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EFFECT, effectMode)) {
12450 rc = BAD_VALUE;
12451 }
12452 }
12453 }
12454
12455 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_MODE)) {
12456 uint8_t colorCorrectMode = frame_settings.find(ANDROID_COLOR_CORRECTION_MODE).data.u8[0];
12457 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_MODE,
12458 colorCorrectMode)) {
12459 rc = BAD_VALUE;
12460 }
12461 }
12462
12463 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_GAINS)) {
12464 cam_color_correct_gains_t colorCorrectGains;
12465 for (size_t i = 0; i < CC_GAIN_MAX; i++) {
12466 colorCorrectGains.gains[i] =
12467 frame_settings.find(ANDROID_COLOR_CORRECTION_GAINS).data.f[i];
12468 }
12469 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_GAINS,
12470 colorCorrectGains)) {
12471 rc = BAD_VALUE;
12472 }
12473 }
12474
12475 if (frame_settings.exists(ANDROID_COLOR_CORRECTION_TRANSFORM)) {
12476 cam_color_correct_matrix_t colorCorrectTransform;
12477 cam_rational_type_t transform_elem;
12478 size_t num = 0;
12479 for (size_t i = 0; i < CC_MATRIX_ROWS; i++) {
12480 for (size_t j = 0; j < CC_MATRIX_COLS; j++) {
12481 transform_elem.numerator =
12482 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].numerator;
12483 transform_elem.denominator =
12484 frame_settings.find(ANDROID_COLOR_CORRECTION_TRANSFORM).data.r[num].denominator;
12485 colorCorrectTransform.transform_matrix[i][j] = transform_elem;
12486 num++;
12487 }
12488 }
12489 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_COLOR_CORRECT_TRANSFORM,
12490 colorCorrectTransform)) {
12491 rc = BAD_VALUE;
12492 }
12493 }
12494
12495 cam_trigger_t aecTrigger;
12496 aecTrigger.trigger = CAM_AEC_TRIGGER_IDLE;
12497 aecTrigger.trigger_id = -1;
12498 if (frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER)&&
12499 frame_settings.exists(ANDROID_CONTROL_AE_PRECAPTURE_ID)) {
12500 aecTrigger.trigger =
12501 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER).data.u8[0];
12502 aecTrigger.trigger_id =
12503 frame_settings.find(ANDROID_CONTROL_AE_PRECAPTURE_ID).data.i32[0];
12504 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_PRECAPTURE_TRIGGER,
12505 aecTrigger)) {
12506 rc = BAD_VALUE;
12507 }
12508 LOGD("precaptureTrigger: %d precaptureTriggerID: %d",
12509 aecTrigger.trigger, aecTrigger.trigger_id);
12510 }
12511
12512 /*af_trigger must come with a trigger id*/
12513 if (frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER) &&
12514 frame_settings.exists(ANDROID_CONTROL_AF_TRIGGER_ID)) {
12515 cam_trigger_t af_trigger;
12516 af_trigger.trigger =
12517 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER).data.u8[0];
12518 af_trigger.trigger_id =
12519 frame_settings.find(ANDROID_CONTROL_AF_TRIGGER_ID).data.i32[0];
12520 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_TRIGGER, af_trigger)) {
12521 rc = BAD_VALUE;
12522 }
12523 LOGD("AfTrigger: %d AfTriggerID: %d",
12524 af_trigger.trigger, af_trigger.trigger_id);
12525 }
12526
12527 if (frame_settings.exists(ANDROID_DEMOSAIC_MODE)) {
12528 int32_t demosaic = frame_settings.find(ANDROID_DEMOSAIC_MODE).data.u8[0];
12529 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_DEMOSAIC, demosaic)) {
12530 rc = BAD_VALUE;
12531 }
12532 }
12533 if (frame_settings.exists(ANDROID_EDGE_MODE)) {
12534 cam_edge_application_t edge_application;
12535 edge_application.edge_mode = frame_settings.find(ANDROID_EDGE_MODE).data.u8[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012536
Thierry Strudel3d639192016-09-09 11:52:26 -070012537 if (edge_application.edge_mode == CAM_EDGE_MODE_OFF) {
12538 edge_application.sharpness = 0;
12539 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012540 edge_application.sharpness =
12541 gCamCapability[mCameraId]->sharpness_ctrl.def_value; //default
12542 if (frame_settings.exists(QCAMERA3_SHARPNESS_STRENGTH)) {
12543 int32_t sharpness =
12544 frame_settings.find(QCAMERA3_SHARPNESS_STRENGTH).data.i32[0];
12545 if (sharpness >= gCamCapability[mCameraId]->sharpness_ctrl.min_value &&
12546 sharpness <= gCamCapability[mCameraId]->sharpness_ctrl.max_value) {
12547 LOGD("Setting edge mode sharpness %d", sharpness);
12548 edge_application.sharpness = sharpness;
12549 }
12550 }
Thierry Strudel3d639192016-09-09 11:52:26 -070012551 }
12552 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EDGE_MODE, edge_application)) {
12553 rc = BAD_VALUE;
12554 }
12555 }
12556
12557 if (frame_settings.exists(ANDROID_FLASH_MODE)) {
12558 int32_t respectFlashMode = 1;
12559 if (frame_settings.exists(ANDROID_CONTROL_AE_MODE)) {
12560 uint8_t fwk_aeMode =
12561 frame_settings.find(ANDROID_CONTROL_AE_MODE).data.u8[0];
Eino-Ville Talvalae2de8432017-02-25 11:11:13 -080012562 if (fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH ||
12563 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_ALWAYS_FLASH ||
12564 fwk_aeMode == ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH_REDEYE) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012565 respectFlashMode = 0;
12566 LOGH("AE Mode controls flash, ignore android.flash.mode");
12567 }
12568 }
12569 if (respectFlashMode) {
12570 int val = lookupHalName(FLASH_MODES_MAP, METADATA_MAP_SIZE(FLASH_MODES_MAP),
12571 (int)frame_settings.find(ANDROID_FLASH_MODE).data.u8[0]);
12572 LOGH("flash mode after mapping %d", val);
12573 // To check: CAM_INTF_META_FLASH_MODE usage
12574 if (NAME_NOT_FOUND != val) {
12575 uint8_t flashMode = (uint8_t)val;
12576 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_LED_MODE, flashMode)) {
12577 rc = BAD_VALUE;
12578 }
12579 }
12580 }
12581 }
12582
12583 if (frame_settings.exists(ANDROID_FLASH_FIRING_POWER)) {
12584 uint8_t flashPower = frame_settings.find(ANDROID_FLASH_FIRING_POWER).data.u8[0];
12585 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_POWER, flashPower)) {
12586 rc = BAD_VALUE;
12587 }
12588 }
12589
12590 if (frame_settings.exists(ANDROID_FLASH_FIRING_TIME)) {
12591 int64_t flashFiringTime = frame_settings.find(ANDROID_FLASH_FIRING_TIME).data.i64[0];
12592 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_FLASH_FIRING_TIME,
12593 flashFiringTime)) {
12594 rc = BAD_VALUE;
12595 }
12596 }
12597
12598 if (frame_settings.exists(ANDROID_HOT_PIXEL_MODE)) {
12599 uint8_t hotPixelMode = frame_settings.find(ANDROID_HOT_PIXEL_MODE).data.u8[0];
12600 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HOTPIXEL_MODE,
12601 hotPixelMode)) {
12602 rc = BAD_VALUE;
12603 }
12604 }
12605
12606 if (frame_settings.exists(ANDROID_LENS_APERTURE)) {
12607 float lensAperture = frame_settings.find( ANDROID_LENS_APERTURE).data.f[0];
12608 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_APERTURE,
12609 lensAperture)) {
12610 rc = BAD_VALUE;
12611 }
12612 }
12613
12614 if (frame_settings.exists(ANDROID_LENS_FILTER_DENSITY)) {
12615 float filterDensity = frame_settings.find(ANDROID_LENS_FILTER_DENSITY).data.f[0];
12616 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FILTERDENSITY,
12617 filterDensity)) {
12618 rc = BAD_VALUE;
12619 }
12620 }
12621
12622 if (frame_settings.exists(ANDROID_LENS_FOCAL_LENGTH)) {
12623 float focalLength = frame_settings.find(ANDROID_LENS_FOCAL_LENGTH).data.f[0];
12624 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_FOCAL_LENGTH,
12625 focalLength)) {
12626 rc = BAD_VALUE;
12627 }
12628 }
12629
12630 if (frame_settings.exists(ANDROID_LENS_OPTICAL_STABILIZATION_MODE)) {
12631 uint8_t optStabMode =
12632 frame_settings.find(ANDROID_LENS_OPTICAL_STABILIZATION_MODE).data.u8[0];
12633 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_OPT_STAB_MODE,
12634 optStabMode)) {
12635 rc = BAD_VALUE;
12636 }
12637 }
12638
12639 if (frame_settings.exists(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE)) {
12640 uint8_t videoStabMode =
12641 frame_settings.find(ANDROID_CONTROL_VIDEO_STABILIZATION_MODE).data.u8[0];
12642 LOGD("videoStabMode from APP = %d", videoStabMode);
12643 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_META_VIDEO_STAB_MODE,
12644 videoStabMode)) {
12645 rc = BAD_VALUE;
12646 }
12647 }
12648
12649
12650 if (frame_settings.exists(ANDROID_NOISE_REDUCTION_MODE)) {
12651 uint8_t noiseRedMode = frame_settings.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0];
12652 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_NOISE_REDUCTION_MODE,
12653 noiseRedMode)) {
12654 rc = BAD_VALUE;
12655 }
12656 }
12657
12658 if (frame_settings.exists(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR)) {
12659 float reprocessEffectiveExposureFactor =
12660 frame_settings.find(ANDROID_REPROCESS_EFFECTIVE_EXPOSURE_FACTOR).data.f[0];
12661 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_EFFECTIVE_EXPOSURE_FACTOR,
12662 reprocessEffectiveExposureFactor)) {
12663 rc = BAD_VALUE;
12664 }
12665 }
12666
12667 cam_crop_region_t scalerCropRegion;
12668 bool scalerCropSet = false;
12669 if (frame_settings.exists(ANDROID_SCALER_CROP_REGION)) {
12670 scalerCropRegion.left = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[0];
12671 scalerCropRegion.top = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[1];
12672 scalerCropRegion.width = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[2];
12673 scalerCropRegion.height = frame_settings.find(ANDROID_SCALER_CROP_REGION).data.i32[3];
12674
12675 // Map coordinate system from active array to sensor output.
12676 mCropRegionMapper.toSensor(scalerCropRegion.left, scalerCropRegion.top,
12677 scalerCropRegion.width, scalerCropRegion.height);
12678
12679 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SCALER_CROP_REGION,
12680 scalerCropRegion)) {
12681 rc = BAD_VALUE;
12682 }
12683 scalerCropSet = true;
12684 }
12685
12686 if (frame_settings.exists(ANDROID_SENSOR_EXPOSURE_TIME)) {
12687 int64_t sensorExpTime =
12688 frame_settings.find(ANDROID_SENSOR_EXPOSURE_TIME).data.i64[0];
12689 LOGD("setting sensorExpTime %lld", sensorExpTime);
12690 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_EXPOSURE_TIME,
12691 sensorExpTime)) {
12692 rc = BAD_VALUE;
12693 }
12694 }
12695
12696 if (frame_settings.exists(ANDROID_SENSOR_FRAME_DURATION)) {
12697 int64_t sensorFrameDuration =
12698 frame_settings.find(ANDROID_SENSOR_FRAME_DURATION).data.i64[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012699 sensorFrameDuration = MAX(sensorFrameDuration, minFrameDuration);
12700 if (sensorFrameDuration > gCamCapability[mCameraId]->max_frame_duration)
12701 sensorFrameDuration = gCamCapability[mCameraId]->max_frame_duration;
12702 LOGD("clamp sensorFrameDuration to %lld", sensorFrameDuration);
12703 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_FRAME_DURATION,
12704 sensorFrameDuration)) {
12705 rc = BAD_VALUE;
12706 }
12707 }
12708
12709 if (frame_settings.exists(ANDROID_SENSOR_SENSITIVITY)) {
12710 int32_t sensorSensitivity = frame_settings.find(ANDROID_SENSOR_SENSITIVITY).data.i32[0];
12711 if (sensorSensitivity < gCamCapability[mCameraId]->sensitivity_range.min_sensitivity)
12712 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.min_sensitivity;
12713 if (sensorSensitivity > gCamCapability[mCameraId]->sensitivity_range.max_sensitivity)
12714 sensorSensitivity = gCamCapability[mCameraId]->sensitivity_range.max_sensitivity;
12715 LOGD("clamp sensorSensitivity to %d", sensorSensitivity);
12716 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SENSOR_SENSITIVITY,
12717 sensorSensitivity)) {
12718 rc = BAD_VALUE;
12719 }
12720 }
12721
Thierry Strudel9e74aae2016-09-22 17:10:18 -070012722#ifndef USE_HAL_3_3
12723 if (frame_settings.exists(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST)) {
12724 int32_t ispSensitivity =
12725 frame_settings.find(ANDROID_CONTROL_POST_RAW_SENSITIVITY_BOOST).data.i32[0];
12726 if (ispSensitivity <
12727 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity) {
12728 ispSensitivity =
12729 gCamCapability[mCameraId]->isp_sensitivity_range.min_sensitivity;
12730 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12731 }
12732 if (ispSensitivity >
12733 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity) {
12734 ispSensitivity =
12735 gCamCapability[mCameraId]->isp_sensitivity_range.max_sensitivity;
12736 LOGD("clamp ispSensitivity to %d", ispSensitivity);
12737 }
12738 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_ISP_SENSITIVITY,
12739 ispSensitivity)) {
12740 rc = BAD_VALUE;
12741 }
12742 }
12743#endif
12744
Thierry Strudel3d639192016-09-09 11:52:26 -070012745 if (frame_settings.exists(ANDROID_SHADING_MODE)) {
12746 uint8_t shadingMode = frame_settings.find(ANDROID_SHADING_MODE).data.u8[0];
12747 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_SHADING_MODE, shadingMode)) {
12748 rc = BAD_VALUE;
12749 }
12750 }
12751
12752 if (frame_settings.exists(ANDROID_STATISTICS_FACE_DETECT_MODE)) {
12753 uint8_t fwk_facedetectMode =
12754 frame_settings.find(ANDROID_STATISTICS_FACE_DETECT_MODE).data.u8[0];
12755
12756 int val = lookupHalName(FACEDETECT_MODES_MAP, METADATA_MAP_SIZE(FACEDETECT_MODES_MAP),
12757 fwk_facedetectMode);
12758
12759 if (NAME_NOT_FOUND != val) {
12760 uint8_t facedetectMode = (uint8_t)val;
12761 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_FACEDETECT_MODE,
12762 facedetectMode)) {
12763 rc = BAD_VALUE;
12764 }
12765 }
12766 }
12767
Thierry Strudel54dc9782017-02-15 12:12:10 -080012768 if (frame_settings.exists(QCAMERA3_HISTOGRAM_MODE)) {
Thierry Strudel3d639192016-09-09 11:52:26 -070012769 uint8_t histogramMode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080012770 frame_settings.find(QCAMERA3_HISTOGRAM_MODE).data.u8[0];
Thierry Strudel3d639192016-09-09 11:52:26 -070012771 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
12772 histogramMode)) {
12773 rc = BAD_VALUE;
12774 }
12775 }
12776
12777 if (frame_settings.exists(ANDROID_STATISTICS_SHARPNESS_MAP_MODE)) {
12778 uint8_t sharpnessMapMode =
12779 frame_settings.find(ANDROID_STATISTICS_SHARPNESS_MAP_MODE).data.u8[0];
12780 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_SHARPNESS_MAP_MODE,
12781 sharpnessMapMode)) {
12782 rc = BAD_VALUE;
12783 }
12784 }
12785
12786 if (frame_settings.exists(ANDROID_TONEMAP_MODE)) {
12787 uint8_t tonemapMode =
12788 frame_settings.find(ANDROID_TONEMAP_MODE).data.u8[0];
12789 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_MODE, tonemapMode)) {
12790 rc = BAD_VALUE;
12791 }
12792 }
12793 /* Tonemap curve channels ch0 = G, ch 1 = B, ch 2 = R */
12794 /*All tonemap channels will have the same number of points*/
12795 if (frame_settings.exists(ANDROID_TONEMAP_CURVE_GREEN) &&
12796 frame_settings.exists(ANDROID_TONEMAP_CURVE_BLUE) &&
12797 frame_settings.exists(ANDROID_TONEMAP_CURVE_RED)) {
12798 cam_rgb_tonemap_curves tonemapCurves;
12799 tonemapCurves.tonemap_points_cnt = frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).count/2;
12800 if (tonemapCurves.tonemap_points_cnt > CAM_MAX_TONEMAP_CURVE_SIZE) {
12801 LOGE("Fatal: tonemap_points_cnt %d exceeds max value of %d",
12802 tonemapCurves.tonemap_points_cnt,
12803 CAM_MAX_TONEMAP_CURVE_SIZE);
12804 tonemapCurves.tonemap_points_cnt = CAM_MAX_TONEMAP_CURVE_SIZE;
12805 }
12806
12807 /* ch0 = G*/
12808 size_t point = 0;
12809 cam_tonemap_curve_t tonemapCurveGreen;
12810 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12811 for (size_t j = 0; j < 2; j++) {
12812 tonemapCurveGreen.tonemap_points[i][j] =
12813 frame_settings.find(ANDROID_TONEMAP_CURVE_GREEN).data.f[point];
12814 point++;
12815 }
12816 }
12817 tonemapCurves.curves[0] = tonemapCurveGreen;
12818
12819 /* ch 1 = B */
12820 point = 0;
12821 cam_tonemap_curve_t tonemapCurveBlue;
12822 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12823 for (size_t j = 0; j < 2; j++) {
12824 tonemapCurveBlue.tonemap_points[i][j] =
12825 frame_settings.find(ANDROID_TONEMAP_CURVE_BLUE).data.f[point];
12826 point++;
12827 }
12828 }
12829 tonemapCurves.curves[1] = tonemapCurveBlue;
12830
12831 /* ch 2 = R */
12832 point = 0;
12833 cam_tonemap_curve_t tonemapCurveRed;
12834 for (size_t i = 0; i < tonemapCurves.tonemap_points_cnt; i++) {
12835 for (size_t j = 0; j < 2; j++) {
12836 tonemapCurveRed.tonemap_points[i][j] =
12837 frame_settings.find(ANDROID_TONEMAP_CURVE_RED).data.f[point];
12838 point++;
12839 }
12840 }
12841 tonemapCurves.curves[2] = tonemapCurveRed;
12842
12843 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TONEMAP_CURVES,
12844 tonemapCurves)) {
12845 rc = BAD_VALUE;
12846 }
12847 }
12848
12849 if (frame_settings.exists(ANDROID_CONTROL_CAPTURE_INTENT)) {
12850 uint8_t captureIntent = frame_settings.find(ANDROID_CONTROL_CAPTURE_INTENT).data.u8[0];
12851 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_CAPTURE_INTENT,
12852 captureIntent)) {
12853 rc = BAD_VALUE;
12854 }
12855 }
12856
12857 if (frame_settings.exists(ANDROID_BLACK_LEVEL_LOCK)) {
12858 uint8_t blackLevelLock = frame_settings.find(ANDROID_BLACK_LEVEL_LOCK).data.u8[0];
12859 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_BLACK_LEVEL_LOCK,
12860 blackLevelLock)) {
12861 rc = BAD_VALUE;
12862 }
12863 }
12864
12865 if (frame_settings.exists(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE)) {
12866 uint8_t lensShadingMapMode =
12867 frame_settings.find(ANDROID_STATISTICS_LENS_SHADING_MAP_MODE).data.u8[0];
12868 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_LENS_SHADING_MAP_MODE,
12869 lensShadingMapMode)) {
12870 rc = BAD_VALUE;
12871 }
12872 }
12873
12874 if (frame_settings.exists(ANDROID_CONTROL_AE_REGIONS)) {
12875 cam_area_t roi;
12876 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012877 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AE_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012878
12879 // Map coordinate system from active array to sensor output.
12880 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12881 roi.rect.height);
12882
12883 if (scalerCropSet) {
12884 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12885 }
12886 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_ROI, roi)) {
12887 rc = BAD_VALUE;
12888 }
12889 }
12890
12891 if (frame_settings.exists(ANDROID_CONTROL_AF_REGIONS)) {
12892 cam_area_t roi;
12893 bool reset = true;
Chien-Yu Chen92724a82017-01-06 11:50:30 -080012894 convertFromRegions(roi, frame_settings, ANDROID_CONTROL_AF_REGIONS);
Thierry Strudel3d639192016-09-09 11:52:26 -070012895
12896 // Map coordinate system from active array to sensor output.
12897 mCropRegionMapper.toSensor(roi.rect.left, roi.rect.top, roi.rect.width,
12898 roi.rect.height);
12899
12900 if (scalerCropSet) {
12901 reset = resetIfNeededROI(&roi, &scalerCropRegion);
12902 }
12903 if (reset && ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AF_ROI, roi)) {
12904 rc = BAD_VALUE;
12905 }
12906 }
12907
12908 // CDS for non-HFR non-video mode
12909 if ((mOpMode != CAMERA3_STREAM_CONFIGURATION_CONSTRAINED_HIGH_SPEED_MODE) &&
12910 !(m_bIsVideo) && frame_settings.exists(QCAMERA3_CDS_MODE)) {
12911 int32_t *fwk_cds = frame_settings.find(QCAMERA3_CDS_MODE).data.i32;
12912 if ((CAM_CDS_MODE_MAX <= *fwk_cds) || (0 > *fwk_cds)) {
12913 LOGE("Invalid CDS mode %d!", *fwk_cds);
12914 } else {
12915 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12916 CAM_INTF_PARM_CDS_MODE, *fwk_cds)) {
12917 rc = BAD_VALUE;
12918 }
12919 }
12920 }
12921
Thierry Strudel04e026f2016-10-10 11:27:36 -070012922 // Video HDR
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012923 cam_video_hdr_mode_t vhdr = CAM_VIDEO_HDR_MODE_OFF;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012924 if (frame_settings.exists(QCAMERA3_VIDEO_HDR_MODE)) {
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012925 vhdr = (cam_video_hdr_mode_t) frame_settings.find(QCAMERA3_VIDEO_HDR_MODE).data.i32[0];
12926 }
12927 if (m_bVideoHdrEnabled)
12928 vhdr = CAM_VIDEO_HDR_MODE_ON;
12929
Thierry Strudel54dc9782017-02-15 12:12:10 -080012930 int8_t curr_hdr_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) != 0);
12931
12932 if(vhdr != curr_hdr_state)
12933 LOGH("PROFILE_SET_HDR_MODE %d" ,vhdr);
12934
Mansoor Aftab93a66e52017-01-26 14:58:25 -080012935 rc = setVideoHdrMode(mParameters, vhdr);
12936 if (rc != NO_ERROR) {
12937 LOGE("setVideoHDR is failed");
Thierry Strudel04e026f2016-10-10 11:27:36 -070012938 }
12939
12940 //IR
12941 if(frame_settings.exists(QCAMERA3_IR_MODE)) {
12942 cam_ir_mode_type_t fwk_ir = (cam_ir_mode_type_t)
12943 frame_settings.find(QCAMERA3_IR_MODE).data.i32[0];
Thierry Strudel54dc9782017-02-15 12:12:10 -080012944 uint8_t curr_ir_state = ((mCurrFeatureState & CAM_QCOM_FEATURE_IR) != 0);
12945 uint8_t isIRon = 0;
12946
12947 (fwk_ir >0) ? (isIRon = 1) : (isIRon = 0) ;
Thierry Strudel04e026f2016-10-10 11:27:36 -070012948 if ((CAM_IR_MODE_MAX <= fwk_ir) || (0 > fwk_ir)) {
12949 LOGE("Invalid IR mode %d!", fwk_ir);
12950 } else {
Thierry Strudel54dc9782017-02-15 12:12:10 -080012951 if(isIRon != curr_ir_state )
12952 LOGH("PROFILE_SET_IR_MODE %d" ,isIRon);
12953
Thierry Strudel04e026f2016-10-10 11:27:36 -070012954 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12955 CAM_INTF_META_IR_MODE, fwk_ir)) {
12956 rc = BAD_VALUE;
12957 }
12958 }
12959 }
12960
Thierry Strudel54dc9782017-02-15 12:12:10 -080012961 //Binning Correction Mode
12962 if(frame_settings.exists(QCAMERA3_BINNING_CORRECTION_MODE)) {
12963 cam_binning_correction_mode_t fwk_binning_correction = (cam_binning_correction_mode_t)
12964 frame_settings.find(QCAMERA3_BINNING_CORRECTION_MODE).data.i32[0];
12965 if ((CAM_BINNING_CORRECTION_MODE_MAX <= fwk_binning_correction)
12966 || (0 > fwk_binning_correction)) {
12967 LOGE("Invalid binning correction mode %d!", fwk_binning_correction);
12968 } else {
12969 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
12970 CAM_INTF_META_BINNING_CORRECTION_MODE, fwk_binning_correction)) {
12971 rc = BAD_VALUE;
12972 }
12973 }
12974 }
12975
Thierry Strudel269c81a2016-10-12 12:13:59 -070012976 if (frame_settings.exists(QCAMERA3_AEC_CONVERGENCE_SPEED)) {
12977 float aec_speed;
12978 aec_speed = frame_settings.find(QCAMERA3_AEC_CONVERGENCE_SPEED).data.f[0];
12979 LOGD("AEC Speed :%f", aec_speed);
12980 if ( aec_speed < 0 ) {
12981 LOGE("Invalid AEC mode %f!", aec_speed);
12982 } else {
12983 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AEC_CONVERGENCE_SPEED,
12984 aec_speed)) {
12985 rc = BAD_VALUE;
12986 }
12987 }
12988 }
12989
12990 if (frame_settings.exists(QCAMERA3_AWB_CONVERGENCE_SPEED)) {
12991 float awb_speed;
12992 awb_speed = frame_settings.find(QCAMERA3_AWB_CONVERGENCE_SPEED).data.f[0];
12993 LOGD("AWB Speed :%f", awb_speed);
12994 if ( awb_speed < 0 ) {
12995 LOGE("Invalid AWB mode %f!", awb_speed);
12996 } else {
12997 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_AWB_CONVERGENCE_SPEED,
12998 awb_speed)) {
12999 rc = BAD_VALUE;
13000 }
13001 }
13002 }
13003
Thierry Strudel3d639192016-09-09 11:52:26 -070013004 // TNR
13005 if (frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_ENABLE) &&
13006 frame_settings.exists(QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE)) {
13007 uint8_t b_TnrRequested = 0;
Thierry Strudel54dc9782017-02-15 12:12:10 -080013008 uint8_t curr_tnr_state = ((mCurrFeatureState & CAM_QTI_FEATURE_SW_TNR) != 0);
Thierry Strudel3d639192016-09-09 11:52:26 -070013009 cam_denoise_param_t tnr;
13010 tnr.denoise_enable = frame_settings.find(QCAMERA3_TEMPORAL_DENOISE_ENABLE).data.u8[0];
13011 tnr.process_plates =
13012 (cam_denoise_process_type_t)frame_settings.find(
13013 QCAMERA3_TEMPORAL_DENOISE_PROCESS_TYPE).data.i32[0];
13014 b_TnrRequested = tnr.denoise_enable;
Thierry Strudel54dc9782017-02-15 12:12:10 -080013015
13016 if(b_TnrRequested != curr_tnr_state)
13017 LOGH("PROFILE_SET_TNR_MODE %d" ,b_TnrRequested);
13018
Thierry Strudel3d639192016-09-09 11:52:26 -070013019 if (ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_TEMPORAL_DENOISE, tnr)) {
13020 rc = BAD_VALUE;
13021 }
13022 }
13023
Thierry Strudel54dc9782017-02-15 12:12:10 -080013024 if (frame_settings.exists(QCAMERA3_EXPOSURE_METER)) {
Thierry Strudel295a0ca2016-11-03 18:38:47 -070013025 int32_t* exposure_metering_mode =
Thierry Strudel54dc9782017-02-15 12:12:10 -080013026 frame_settings.find(QCAMERA3_EXPOSURE_METER).data.i32;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070013027 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_AEC_ALGO_TYPE,
13028 *exposure_metering_mode)) {
13029 rc = BAD_VALUE;
13030 }
13031 }
13032
Thierry Strudel3d639192016-09-09 11:52:26 -070013033 if (frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_MODE)) {
13034 int32_t fwk_testPatternMode =
13035 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_MODE).data.i32[0];
13036 int testPatternMode = lookupHalName(TEST_PATTERN_MAP,
13037 METADATA_MAP_SIZE(TEST_PATTERN_MAP), fwk_testPatternMode);
13038
13039 if (NAME_NOT_FOUND != testPatternMode) {
13040 cam_test_pattern_data_t testPatternData;
13041 memset(&testPatternData, 0, sizeof(testPatternData));
13042 testPatternData.mode = (cam_test_pattern_mode_t)testPatternMode;
13043 if (testPatternMode == CAM_TEST_PATTERN_SOLID_COLOR &&
13044 frame_settings.exists(ANDROID_SENSOR_TEST_PATTERN_DATA)) {
13045 int32_t *fwk_testPatternData =
13046 frame_settings.find(ANDROID_SENSOR_TEST_PATTERN_DATA).data.i32;
13047 testPatternData.r = fwk_testPatternData[0];
13048 testPatternData.b = fwk_testPatternData[3];
13049 switch (gCamCapability[mCameraId]->color_arrangement) {
13050 case CAM_FILTER_ARRANGEMENT_RGGB:
13051 case CAM_FILTER_ARRANGEMENT_GRBG:
13052 testPatternData.gr = fwk_testPatternData[1];
13053 testPatternData.gb = fwk_testPatternData[2];
13054 break;
13055 case CAM_FILTER_ARRANGEMENT_GBRG:
13056 case CAM_FILTER_ARRANGEMENT_BGGR:
13057 testPatternData.gr = fwk_testPatternData[2];
13058 testPatternData.gb = fwk_testPatternData[1];
13059 break;
13060 default:
13061 LOGE("color arrangement %d is not supported",
13062 gCamCapability[mCameraId]->color_arrangement);
13063 break;
13064 }
13065 }
13066 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TEST_PATTERN_DATA,
13067 testPatternData)) {
13068 rc = BAD_VALUE;
13069 }
13070 } else {
13071 LOGE("Invalid framework sensor test pattern mode %d",
13072 fwk_testPatternMode);
13073 }
13074 }
13075
13076 if (frame_settings.exists(ANDROID_JPEG_GPS_COORDINATES)) {
13077 size_t count = 0;
13078 camera_metadata_entry_t gps_coords = frame_settings.find(ANDROID_JPEG_GPS_COORDINATES);
13079 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_COORDINATES,
13080 gps_coords.data.d, gps_coords.count, count);
13081 if (gps_coords.count != count) {
13082 rc = BAD_VALUE;
13083 }
13084 }
13085
13086 if (frame_settings.exists(ANDROID_JPEG_GPS_PROCESSING_METHOD)) {
13087 char gps_methods[GPS_PROCESSING_METHOD_SIZE];
13088 size_t count = 0;
13089 const char *gps_methods_src = (const char *)
13090 frame_settings.find(ANDROID_JPEG_GPS_PROCESSING_METHOD).data.u8;
13091 memset(gps_methods, '\0', sizeof(gps_methods));
13092 strlcpy(gps_methods, gps_methods_src, sizeof(gps_methods));
13093 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_PROC_METHODS,
13094 gps_methods, GPS_PROCESSING_METHOD_SIZE, count);
13095 if (GPS_PROCESSING_METHOD_SIZE != count) {
13096 rc = BAD_VALUE;
13097 }
13098 }
13099
13100 if (frame_settings.exists(ANDROID_JPEG_GPS_TIMESTAMP)) {
13101 int64_t gps_timestamp = frame_settings.find(ANDROID_JPEG_GPS_TIMESTAMP).data.i64[0];
13102 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_GPS_TIMESTAMP,
13103 gps_timestamp)) {
13104 rc = BAD_VALUE;
13105 }
13106 }
13107
13108 if (frame_settings.exists(ANDROID_JPEG_ORIENTATION)) {
13109 int32_t orientation = frame_settings.find(ANDROID_JPEG_ORIENTATION).data.i32[0];
13110 cam_rotation_info_t rotation_info;
13111 if (orientation == 0) {
13112 rotation_info.rotation = ROTATE_0;
13113 } else if (orientation == 90) {
13114 rotation_info.rotation = ROTATE_90;
13115 } else if (orientation == 180) {
13116 rotation_info.rotation = ROTATE_180;
13117 } else if (orientation == 270) {
13118 rotation_info.rotation = ROTATE_270;
13119 }
Shuzhen Wang6ec8eac2016-07-28 23:09:23 -070013120 rotation_info.device_rotation = ROTATE_0;
Thierry Strudel3d639192016-09-09 11:52:26 -070013121 rotation_info.streamId = snapshotStreamId;
13122 ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_ORIENTATION, orientation);
13123 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ROTATION, rotation_info)) {
13124 rc = BAD_VALUE;
13125 }
13126 }
13127
13128 if (frame_settings.exists(ANDROID_JPEG_QUALITY)) {
13129 uint32_t quality = (uint32_t) frame_settings.find(ANDROID_JPEG_QUALITY).data.u8[0];
13130 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_QUALITY, quality)) {
13131 rc = BAD_VALUE;
13132 }
13133 }
13134
13135 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_QUALITY)) {
13136 uint32_t thumb_quality = (uint32_t)
13137 frame_settings.find(ANDROID_JPEG_THUMBNAIL_QUALITY).data.u8[0];
13138 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_QUALITY,
13139 thumb_quality)) {
13140 rc = BAD_VALUE;
13141 }
13142 }
13143
13144 if (frame_settings.exists(ANDROID_JPEG_THUMBNAIL_SIZE)) {
13145 cam_dimension_t dim;
13146 dim.width = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[0];
13147 dim.height = frame_settings.find(ANDROID_JPEG_THUMBNAIL_SIZE).data.i32[1];
13148 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_JPEG_THUMB_SIZE, dim)) {
13149 rc = BAD_VALUE;
13150 }
13151 }
13152
13153 // Internal metadata
13154 if (frame_settings.exists(QCAMERA3_PRIVATEDATA_REPROCESS)) {
13155 size_t count = 0;
13156 camera_metadata_entry_t privatedata = frame_settings.find(QCAMERA3_PRIVATEDATA_REPROCESS);
13157 ADD_SET_PARAM_ARRAY_TO_BATCH(hal_metadata, CAM_INTF_META_PRIVATE_DATA,
13158 privatedata.data.i32, privatedata.count, count);
13159 if (privatedata.count != count) {
13160 rc = BAD_VALUE;
13161 }
13162 }
13163
Thierry Strudel295a0ca2016-11-03 18:38:47 -070013164 // ISO/Exposure Priority
13165 if (frame_settings.exists(QCAMERA3_USE_ISO_EXP_PRIORITY) &&
13166 frame_settings.exists(QCAMERA3_SELECT_PRIORITY)) {
13167 cam_priority_mode_t mode =
13168 (cam_priority_mode_t)frame_settings.find(QCAMERA3_SELECT_PRIORITY).data.i32[0];
13169 if((CAM_ISO_PRIORITY == mode) || (CAM_EXP_PRIORITY == mode)) {
13170 cam_intf_parm_manual_3a_t use_iso_exp_pty;
13171 use_iso_exp_pty.previewOnly = FALSE;
13172 uint64_t* ptr = (uint64_t*)frame_settings.find(QCAMERA3_USE_ISO_EXP_PRIORITY).data.i64;
13173 use_iso_exp_pty.value = *ptr;
13174
13175 if(CAM_ISO_PRIORITY == mode) {
13176 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ISO,
13177 use_iso_exp_pty)) {
13178 rc = BAD_VALUE;
13179 }
13180 }
13181 else {
13182 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EXPOSURE_TIME,
13183 use_iso_exp_pty)) {
13184 rc = BAD_VALUE;
13185 }
13186 }
Thierry Strudel54dc9782017-02-15 12:12:10 -080013187
13188 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 1)) {
13189 rc = BAD_VALUE;
13190 }
13191 }
13192 } else {
13193 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_ZSL_MODE, 0)) {
13194 rc = BAD_VALUE;
Thierry Strudel295a0ca2016-11-03 18:38:47 -070013195 }
13196 }
13197
13198 // Saturation
13199 if (frame_settings.exists(QCAMERA3_USE_SATURATION)) {
13200 int32_t* use_saturation =
13201 frame_settings.find(QCAMERA3_USE_SATURATION).data.i32;
13202 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_SATURATION, *use_saturation)) {
13203 rc = BAD_VALUE;
13204 }
13205 }
13206
Thierry Strudel3d639192016-09-09 11:52:26 -070013207 // EV step
13208 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_PARM_EV_STEP,
13209 gCamCapability[mCameraId]->exp_compensation_step)) {
13210 rc = BAD_VALUE;
13211 }
13212
13213 // CDS info
13214 if (frame_settings.exists(QCAMERA3_CDS_INFO)) {
13215 cam_cds_data_t *cdsData = (cam_cds_data_t *)
13216 frame_settings.find(QCAMERA3_CDS_INFO).data.u8;
13217
13218 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13219 CAM_INTF_META_CDS_DATA, *cdsData)) {
13220 rc = BAD_VALUE;
13221 }
13222 }
13223
Shuzhen Wang19463d72016-03-08 11:09:52 -080013224 // Hybrid AE
13225 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE)) {
13226 uint8_t *hybrid_ae = (uint8_t *)
13227 frame_settings.find(NEXUS_EXPERIMENTAL_2016_HYBRID_AE_ENABLE).data.u8;
Shuzhen Wang77b049a2017-08-30 12:24:36 -070013228 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_HYBRID_AE, *hybrid_ae)) {
13229 rc = BAD_VALUE;
13230 }
Shuzhen Wang19463d72016-03-08 11:09:52 -080013231 }
13232
Shuzhen Wang14415f52016-11-16 18:26:18 -080013233 // Histogram
13234 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE)) {
13235 uint8_t histogramMode =
13236 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_ENABLE).data.u8[0];
13237 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_MODE,
13238 histogramMode)) {
13239 rc = BAD_VALUE;
13240 }
13241 }
13242
13243 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS)) {
13244 int32_t histogramBins =
13245 frame_settings.find(NEXUS_EXPERIMENTAL_2017_HISTOGRAM_BINS).data.i32[0];
13246 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_STATS_HISTOGRAM_BINS,
13247 histogramBins)) {
13248 rc = BAD_VALUE;
13249 }
13250 }
13251
Shuzhen Wangcc386c52017-03-29 09:28:08 -070013252 // Tracking AF
13253 if (frame_settings.exists(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER)) {
13254 uint8_t trackingAfTrigger =
13255 frame_settings.find(NEXUS_EXPERIMENTAL_2017_TRACKING_AF_TRIGGER).data.u8[0];
13256 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_TRACKING_AF_TRIGGER,
13257 trackingAfTrigger)) {
13258 rc = BAD_VALUE;
13259 }
13260 }
13261
Chien-Yu Chendbd619b2017-08-04 17:50:11 -070013262 // Makernote
13263 camera_metadata_entry entry = frame_settings.find(NEXUS_EXPERIMENTAL_2017_EXIF_MAKERNOTE);
13264 if (entry.count != 0) {
13265 if (entry.count <= MAX_MAKERNOTE_LENGTH) {
13266 cam_makernote_t makernote;
13267 makernote.length = entry.count;
13268 memcpy(makernote.data, entry.data.u8, makernote.length);
13269 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata, CAM_INTF_META_MAKERNOTE, makernote)) {
13270 rc = BAD_VALUE;
13271 }
13272 } else {
13273 ALOGE("%s: Makernote length %u is larger than %d", __FUNCTION__, entry.count,
13274 MAX_MAKERNOTE_LENGTH);
13275 rc = BAD_VALUE;
13276 }
13277 }
13278
Thierry Strudel3d639192016-09-09 11:52:26 -070013279 return rc;
13280}
13281
13282/*===========================================================================
13283 * FUNCTION : captureResultCb
13284 *
13285 * DESCRIPTION: Callback handler for all channels (streams, as well as metadata)
13286 *
13287 * PARAMETERS :
13288 * @frame : frame information from mm-camera-interface
13289 * @buffer : actual gralloc buffer to be returned to frameworks. NULL if metadata.
13290 * @userdata: userdata
13291 *
13292 * RETURN : NONE
13293 *==========================================================================*/
13294void QCamera3HardwareInterface::captureResultCb(mm_camera_super_buf_t *metadata,
13295 camera3_stream_buffer_t *buffer,
13296 uint32_t frame_number, bool isInputBuffer, void *userdata)
13297{
13298 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
13299 if (hw == NULL) {
13300 LOGE("Invalid hw %p", hw);
13301 return;
13302 }
13303
13304 hw->captureResultCb(metadata, buffer, frame_number, isInputBuffer);
13305 return;
13306}
13307
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013308/*===========================================================================
13309 * FUNCTION : setBufferErrorStatus
13310 *
13311 * DESCRIPTION: Callback handler for channels to report any buffer errors
13312 *
13313 * PARAMETERS :
13314 * @ch : Channel on which buffer error is reported from
13315 * @frame_number : frame number on which buffer error is reported on
13316 * @buffer_status : buffer error status
13317 * @userdata: userdata
13318 *
13319 * RETURN : NONE
13320 *==========================================================================*/
13321void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
13322 uint32_t frame_number, camera3_buffer_status_t err, void *userdata)
13323{
13324 QCamera3HardwareInterface *hw = (QCamera3HardwareInterface *)userdata;
13325 if (hw == NULL) {
13326 LOGE("Invalid hw %p", hw);
13327 return;
13328 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013329
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013330 hw->setBufferErrorStatus(ch, frame_number, err);
13331 return;
13332}
13333
13334void QCamera3HardwareInterface::setBufferErrorStatus(QCamera3Channel* ch,
13335 uint32_t frameNumber, camera3_buffer_status_t err)
13336{
13337 LOGD("channel: %p, frame# %d, buf err: %d", ch, frameNumber, err);
13338 pthread_mutex_lock(&mMutex);
13339
13340 for (auto& req : mPendingBuffersMap.mPendingBuffersInRequest) {
13341 if (req.frame_number != frameNumber)
13342 continue;
13343 for (auto& k : req.mPendingBufferList) {
13344 if(k.stream->priv == ch) {
13345 k.bufStatus = CAMERA3_BUFFER_STATUS_ERROR;
13346 }
13347 }
13348 }
13349
13350 pthread_mutex_unlock(&mMutex);
13351 return;
13352}
Thierry Strudel3d639192016-09-09 11:52:26 -070013353/*===========================================================================
13354 * FUNCTION : initialize
13355 *
13356 * DESCRIPTION: Pass framework callback pointers to HAL
13357 *
13358 * PARAMETERS :
13359 *
13360 *
13361 * RETURN : Success : 0
13362 * Failure: -ENODEV
13363 *==========================================================================*/
13364
13365int QCamera3HardwareInterface::initialize(const struct camera3_device *device,
13366 const camera3_callback_ops_t *callback_ops)
13367{
13368 LOGD("E");
13369 QCamera3HardwareInterface *hw =
13370 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13371 if (!hw) {
13372 LOGE("NULL camera device");
13373 return -ENODEV;
13374 }
13375
13376 int rc = hw->initialize(callback_ops);
13377 LOGD("X");
13378 return rc;
13379}
13380
13381/*===========================================================================
13382 * FUNCTION : configure_streams
13383 *
13384 * DESCRIPTION:
13385 *
13386 * PARAMETERS :
13387 *
13388 *
13389 * RETURN : Success: 0
13390 * Failure: -EINVAL (if stream configuration is invalid)
13391 * -ENODEV (fatal error)
13392 *==========================================================================*/
13393
13394int QCamera3HardwareInterface::configure_streams(
13395 const struct camera3_device *device,
13396 camera3_stream_configuration_t *stream_list)
13397{
13398 LOGD("E");
13399 QCamera3HardwareInterface *hw =
13400 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13401 if (!hw) {
13402 LOGE("NULL camera device");
13403 return -ENODEV;
13404 }
13405 int rc = hw->configureStreams(stream_list);
13406 LOGD("X");
13407 return rc;
13408}
13409
13410/*===========================================================================
13411 * FUNCTION : construct_default_request_settings
13412 *
13413 * DESCRIPTION: Configure a settings buffer to meet the required use case
13414 *
13415 * PARAMETERS :
13416 *
13417 *
13418 * RETURN : Success: Return valid metadata
13419 * Failure: Return NULL
13420 *==========================================================================*/
13421const camera_metadata_t* QCamera3HardwareInterface::
13422 construct_default_request_settings(const struct camera3_device *device,
13423 int type)
13424{
13425
13426 LOGD("E");
13427 camera_metadata_t* fwk_metadata = NULL;
13428 QCamera3HardwareInterface *hw =
13429 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13430 if (!hw) {
13431 LOGE("NULL camera device");
13432 return NULL;
13433 }
13434
13435 fwk_metadata = hw->translateCapabilityToMetadata(type);
13436
13437 LOGD("X");
13438 return fwk_metadata;
13439}
13440
13441/*===========================================================================
13442 * FUNCTION : process_capture_request
13443 *
13444 * DESCRIPTION:
13445 *
13446 * PARAMETERS :
13447 *
13448 *
13449 * RETURN :
13450 *==========================================================================*/
13451int QCamera3HardwareInterface::process_capture_request(
13452 const struct camera3_device *device,
13453 camera3_capture_request_t *request)
13454{
13455 LOGD("E");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013456 CAMSCOPE_UPDATE_FLAGS(CAMSCOPE_SECTION_HAL, kpi_camscope_flags);
Thierry Strudel3d639192016-09-09 11:52:26 -070013457 QCamera3HardwareInterface *hw =
13458 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13459 if (!hw) {
13460 LOGE("NULL camera device");
13461 return -EINVAL;
13462 }
13463
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013464 int rc = hw->orchestrateRequest(request);
Thierry Strudel3d639192016-09-09 11:52:26 -070013465 LOGD("X");
13466 return rc;
13467}
13468
13469/*===========================================================================
13470 * FUNCTION : dump
13471 *
13472 * DESCRIPTION:
13473 *
13474 * PARAMETERS :
13475 *
13476 *
13477 * RETURN :
13478 *==========================================================================*/
13479
13480void QCamera3HardwareInterface::dump(
13481 const struct camera3_device *device, int fd)
13482{
13483 /* Log level property is read when "adb shell dumpsys media.camera" is
13484 called so that the log level can be controlled without restarting
13485 the media server */
13486 getLogLevel();
13487
13488 LOGD("E");
13489 QCamera3HardwareInterface *hw =
13490 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13491 if (!hw) {
13492 LOGE("NULL camera device");
13493 return;
13494 }
13495
13496 hw->dump(fd);
13497 LOGD("X");
13498 return;
13499}
13500
13501/*===========================================================================
13502 * FUNCTION : flush
13503 *
13504 * DESCRIPTION:
13505 *
13506 * PARAMETERS :
13507 *
13508 *
13509 * RETURN :
13510 *==========================================================================*/
13511
13512int QCamera3HardwareInterface::flush(
13513 const struct camera3_device *device)
13514{
13515 int rc;
13516 LOGD("E");
13517 QCamera3HardwareInterface *hw =
13518 reinterpret_cast<QCamera3HardwareInterface *>(device->priv);
13519 if (!hw) {
13520 LOGE("NULL camera device");
13521 return -EINVAL;
13522 }
13523
13524 pthread_mutex_lock(&hw->mMutex);
13525 // Validate current state
13526 switch (hw->mState) {
13527 case STARTED:
13528 /* valid state */
13529 break;
13530
13531 case ERROR:
13532 pthread_mutex_unlock(&hw->mMutex);
13533 hw->handleCameraDeviceError();
13534 return -ENODEV;
13535
13536 default:
13537 LOGI("Flush returned during state %d", hw->mState);
13538 pthread_mutex_unlock(&hw->mMutex);
13539 return 0;
13540 }
13541 pthread_mutex_unlock(&hw->mMutex);
13542
13543 rc = hw->flush(true /* restart channels */ );
13544 LOGD("X");
13545 return rc;
13546}
13547
13548/*===========================================================================
13549 * FUNCTION : close_camera_device
13550 *
13551 * DESCRIPTION:
13552 *
13553 * PARAMETERS :
13554 *
13555 *
13556 * RETURN :
13557 *==========================================================================*/
13558int QCamera3HardwareInterface::close_camera_device(struct hw_device_t* device)
13559{
13560 int ret = NO_ERROR;
13561 QCamera3HardwareInterface *hw =
13562 reinterpret_cast<QCamera3HardwareInterface *>(
13563 reinterpret_cast<camera3_device_t *>(device)->priv);
13564 if (!hw) {
13565 LOGE("NULL camera device");
13566 return BAD_VALUE;
13567 }
13568
13569 LOGI("[KPI Perf]: E camera id %d", hw->mCameraId);
13570 delete hw;
13571 LOGI("[KPI Perf]: X");
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013572 CAMSCOPE_DESTROY(CAMSCOPE_SECTION_HAL);
Thierry Strudel3d639192016-09-09 11:52:26 -070013573 return ret;
13574}
13575
13576/*===========================================================================
13577 * FUNCTION : getWaveletDenoiseProcessPlate
13578 *
13579 * DESCRIPTION: query wavelet denoise process plate
13580 *
13581 * PARAMETERS : None
13582 *
13583 * RETURN : WNR prcocess plate value
13584 *==========================================================================*/
13585cam_denoise_process_type_t QCamera3HardwareInterface::getWaveletDenoiseProcessPlate()
13586{
13587 char prop[PROPERTY_VALUE_MAX];
13588 memset(prop, 0, sizeof(prop));
13589 property_get("persist.denoise.process.plates", prop, "0");
13590 int processPlate = atoi(prop);
13591 switch(processPlate) {
13592 case 0:
13593 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13594 case 1:
13595 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13596 case 2:
13597 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13598 case 3:
13599 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13600 default:
13601 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13602 }
13603}
13604
13605
13606/*===========================================================================
13607 * FUNCTION : getTemporalDenoiseProcessPlate
13608 *
13609 * DESCRIPTION: query temporal denoise process plate
13610 *
13611 * PARAMETERS : None
13612 *
13613 * RETURN : TNR prcocess plate value
13614 *==========================================================================*/
13615cam_denoise_process_type_t QCamera3HardwareInterface::getTemporalDenoiseProcessPlate()
13616{
13617 char prop[PROPERTY_VALUE_MAX];
13618 memset(prop, 0, sizeof(prop));
13619 property_get("persist.tnr.process.plates", prop, "0");
13620 int processPlate = atoi(prop);
13621 switch(processPlate) {
13622 case 0:
13623 return CAM_WAVELET_DENOISE_YCBCR_PLANE;
13624 case 1:
13625 return CAM_WAVELET_DENOISE_CBCR_ONLY;
13626 case 2:
13627 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13628 case 3:
13629 return CAM_WAVELET_DENOISE_STREAMLINED_CBCR;
13630 default:
13631 return CAM_WAVELET_DENOISE_STREAMLINE_YCBCR;
13632 }
13633}
13634
13635
13636/*===========================================================================
13637 * FUNCTION : extractSceneMode
13638 *
13639 * DESCRIPTION: Extract scene mode from frameworks set metadata
13640 *
13641 * PARAMETERS :
13642 * @frame_settings: CameraMetadata reference
13643 * @metaMode: ANDROID_CONTORL_MODE
13644 * @hal_metadata: hal metadata structure
13645 *
13646 * RETURN : None
13647 *==========================================================================*/
13648int32_t QCamera3HardwareInterface::extractSceneMode(
13649 const CameraMetadata &frame_settings, uint8_t metaMode,
13650 metadata_buffer_t *hal_metadata)
13651{
13652 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013653 uint8_t sceneMode = CAM_SCENE_MODE_OFF;
13654
13655 if (ANDROID_CONTROL_MODE_OFF_KEEP_STATE == metaMode) {
13656 LOGD("Ignoring control mode OFF_KEEP_STATE");
13657 return NO_ERROR;
13658 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013659
13660 if (metaMode == ANDROID_CONTROL_MODE_USE_SCENE_MODE) {
13661 camera_metadata_ro_entry entry =
13662 frame_settings.find(ANDROID_CONTROL_SCENE_MODE);
13663 if (0 == entry.count)
13664 return rc;
13665
13666 uint8_t fwk_sceneMode = entry.data.u8[0];
13667
13668 int val = lookupHalName(SCENE_MODES_MAP,
13669 sizeof(SCENE_MODES_MAP)/sizeof(SCENE_MODES_MAP[0]),
13670 fwk_sceneMode);
13671 if (NAME_NOT_FOUND != val) {
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013672 sceneMode = (uint8_t)val;
Thierry Strudel3d639192016-09-09 11:52:26 -070013673 LOGD("sceneMode: %d", sceneMode);
Thierry Strudel3d639192016-09-09 11:52:26 -070013674 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013675 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013676
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013677 if ((sceneMode == CAM_SCENE_MODE_HDR) || m_bSensorHDREnabled) {
13678 rc = setSensorHDR(hal_metadata, (sceneMode == CAM_SCENE_MODE_HDR));
13679 }
13680
13681 if ((rc == NO_ERROR) && !m_bSensorHDREnabled) {
13682 if (sceneMode == ANDROID_CONTROL_SCENE_MODE_HDR) {
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013683 cam_hdr_param_t hdr_params;
13684 hdr_params.hdr_enable = 1;
13685 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13686 hdr_params.hdr_need_1x = false;
13687 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13688 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13689 rc = BAD_VALUE;
13690 }
13691 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013692
Thierry Strudel3d639192016-09-09 11:52:26 -070013693 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13694 CAM_INTF_PARM_BESTSHOT_MODE, sceneMode)) {
13695 rc = BAD_VALUE;
13696 }
13697 }
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013698
13699 if (mForceHdrSnapshot) {
13700 cam_hdr_param_t hdr_params;
13701 hdr_params.hdr_enable = 1;
13702 hdr_params.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13703 hdr_params.hdr_need_1x = false;
13704 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13705 CAM_INTF_PARM_HAL_BRACKETING_HDR, hdr_params)) {
13706 rc = BAD_VALUE;
13707 }
13708 }
13709
Thierry Strudel3d639192016-09-09 11:52:26 -070013710 return rc;
13711}
13712
13713/*===========================================================================
Thierry Strudel04e026f2016-10-10 11:27:36 -070013714 * FUNCTION : setVideoHdrMode
13715 *
13716 * DESCRIPTION: Set Video HDR mode from frameworks set metadata
13717 *
13718 * PARAMETERS :
13719 * @hal_metadata: hal metadata structure
13720 * @metaMode: QCAMERA3_VIDEO_HDR_MODE
13721 *
13722 * RETURN : None
13723 *==========================================================================*/
13724int32_t QCamera3HardwareInterface::setVideoHdrMode(
13725 metadata_buffer_t *hal_metadata, cam_video_hdr_mode_t vhdr)
13726{
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013727 if ( (vhdr >= CAM_VIDEO_HDR_MODE_OFF) && (vhdr < CAM_VIDEO_HDR_MODE_MAX)) {
13728 return setSensorHDR(hal_metadata, (vhdr == CAM_VIDEO_HDR_MODE_ON), true);
13729 }
13730
13731 LOGE("Invalid Video HDR mode %d!", vhdr);
13732 return BAD_VALUE;
13733}
13734
13735/*===========================================================================
13736 * FUNCTION : setSensorHDR
13737 *
13738 * DESCRIPTION: Enable/disable sensor HDR.
13739 *
13740 * PARAMETERS :
13741 * @hal_metadata: hal metadata structure
13742 * @enable: boolean whether to enable/disable sensor HDR
13743 *
13744 * RETURN : None
13745 *==========================================================================*/
13746int32_t QCamera3HardwareInterface::setSensorHDR(
13747 metadata_buffer_t *hal_metadata, bool enable, bool isVideoHdrEnable)
13748{
Thierry Strudel04e026f2016-10-10 11:27:36 -070013749 int32_t rc = NO_ERROR;
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013750 cam_sensor_hdr_type_t sensor_hdr = CAM_SENSOR_HDR_OFF;
13751
13752 if (enable) {
13753 char sensor_hdr_prop[PROPERTY_VALUE_MAX];
13754 memset(sensor_hdr_prop, 0, sizeof(sensor_hdr_prop));
13755 #ifdef _LE_CAMERA_
13756 //Default to staggered HDR for IOT
13757 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "3");
13758 #else
13759 property_get("persist.camera.sensor.hdr", sensor_hdr_prop, "0");
13760 #endif
13761 sensor_hdr = (cam_sensor_hdr_type_t) atoi(sensor_hdr_prop);
13762 }
13763
13764 bool isSupported = false;
13765 switch (sensor_hdr) {
13766 case CAM_SENSOR_HDR_IN_SENSOR:
13767 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13768 CAM_QCOM_FEATURE_SENSOR_HDR) {
13769 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013770 LOGD("Setting HDR mode In Sensor");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013771 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013772 break;
13773 case CAM_SENSOR_HDR_ZIGZAG:
13774 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13775 CAM_QCOM_FEATURE_ZIGZAG_HDR) {
13776 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013777 LOGD("Setting HDR mode Zigzag");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013778 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013779 break;
13780 case CAM_SENSOR_HDR_STAGGERED:
13781 if (gCamCapability[mCameraId]->qcom_supported_feature_mask &
13782 CAM_QCOM_FEATURE_STAGGERED_VIDEO_HDR) {
13783 isSupported = true;
Thierry Strudel04e026f2016-10-10 11:27:36 -070013784 LOGD("Setting HDR mode Staggered");
Thierry Strudel04e026f2016-10-10 11:27:36 -070013785 }
Mansoor Aftab58465fa2017-01-26 15:02:44 -080013786 break;
13787 case CAM_SENSOR_HDR_OFF:
13788 isSupported = true;
13789 LOGD("Turning off sensor HDR");
13790 break;
13791 default:
13792 LOGE("HDR mode %d not supported", sensor_hdr);
13793 rc = BAD_VALUE;
13794 break;
13795 }
13796
13797 if(isSupported) {
13798 if (ADD_SET_PARAM_ENTRY_TO_BATCH(hal_metadata,
13799 CAM_INTF_PARM_SENSOR_HDR, sensor_hdr)) {
13800 rc = BAD_VALUE;
13801 } else {
13802 if(!isVideoHdrEnable)
13803 m_bSensorHDREnabled = (sensor_hdr != CAM_SENSOR_HDR_OFF);
Thierry Strudel04e026f2016-10-10 11:27:36 -070013804 }
13805 }
13806 return rc;
13807}
13808
13809/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070013810 * FUNCTION : needRotationReprocess
13811 *
13812 * DESCRIPTION: if rotation needs to be done by reprocess in pp
13813 *
13814 * PARAMETERS : none
13815 *
13816 * RETURN : true: needed
13817 * false: no need
13818 *==========================================================================*/
13819bool QCamera3HardwareInterface::needRotationReprocess()
13820{
13821 if ((gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION) > 0) {
13822 // current rotation is not zero, and pp has the capability to process rotation
13823 LOGH("need do reprocess for rotation");
13824 return true;
13825 }
13826
13827 return false;
13828}
13829
13830/*===========================================================================
13831 * FUNCTION : needReprocess
13832 *
13833 * DESCRIPTION: if reprocess in needed
13834 *
13835 * PARAMETERS : none
13836 *
13837 * RETURN : true: needed
13838 * false: no need
13839 *==========================================================================*/
13840bool QCamera3HardwareInterface::needReprocess(cam_feature_mask_t postprocess_mask)
13841{
13842 if (gCamCapability[mCameraId]->qcom_supported_feature_mask > 0) {
13843 // TODO: add for ZSL HDR later
13844 // pp module has min requirement for zsl reprocess, or WNR in ZSL mode
13845 if(postprocess_mask == CAM_QCOM_FEATURE_NONE){
13846 LOGH("need do reprocess for ZSL WNR or min PP reprocess");
13847 return true;
13848 } else {
13849 LOGH("already post processed frame");
13850 return false;
13851 }
13852 }
13853 return needRotationReprocess();
13854}
13855
13856/*===========================================================================
13857 * FUNCTION : needJpegExifRotation
13858 *
13859 * DESCRIPTION: if rotation from jpeg is needed
13860 *
13861 * PARAMETERS : none
13862 *
13863 * RETURN : true: needed
13864 * false: no need
13865 *==========================================================================*/
13866bool QCamera3HardwareInterface::needJpegExifRotation()
13867{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013868 /*If the pp does not have the ability to do rotation, enable jpeg rotation*/
Thierry Strudel3d639192016-09-09 11:52:26 -070013869 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13870 LOGD("Need use Jpeg EXIF Rotation");
13871 return true;
13872 }
13873 return false;
13874}
13875
13876/*===========================================================================
13877 * FUNCTION : addOfflineReprocChannel
13878 *
13879 * DESCRIPTION: add a reprocess channel that will do reprocess on frames
13880 * coming from input channel
13881 *
13882 * PARAMETERS :
13883 * @config : reprocess configuration
13884 * @inputChHandle : pointer to the input (source) channel
13885 *
13886 *
13887 * RETURN : Ptr to the newly created channel obj. NULL if failed.
13888 *==========================================================================*/
13889QCamera3ReprocessChannel *QCamera3HardwareInterface::addOfflineReprocChannel(
13890 const reprocess_config_t &config, QCamera3ProcessingChannel *inputChHandle)
13891{
13892 int32_t rc = NO_ERROR;
13893 QCamera3ReprocessChannel *pChannel = NULL;
13894
13895 pChannel = new QCamera3ReprocessChannel(mCameraHandle->camera_handle,
Thierry Strudelc2ee3302016-11-17 12:33:12 -080013896 mChannelHandle, mCameraHandle->ops, captureResultCb, setBufferErrorStatus,
13897 config.padding, CAM_QCOM_FEATURE_NONE, this, inputChHandle);
Thierry Strudel3d639192016-09-09 11:52:26 -070013898 if (NULL == pChannel) {
13899 LOGE("no mem for reprocess channel");
13900 return NULL;
13901 }
13902
13903 rc = pChannel->initialize(IS_TYPE_NONE);
13904 if (rc != NO_ERROR) {
13905 LOGE("init reprocess channel failed, ret = %d", rc);
13906 delete pChannel;
13907 return NULL;
13908 }
13909
13910 // pp feature config
13911 cam_pp_feature_config_t pp_config;
13912 memset(&pp_config, 0, sizeof(cam_pp_feature_config_t));
13913
13914 pp_config.feature_mask |= CAM_QCOM_FEATURE_PP_SUPERSET_HAL3;
13915 if (gCamCapability[mCameraId]->qcom_supported_feature_mask
13916 & CAM_QCOM_FEATURE_DSDN) {
13917 //Use CPP CDS incase h/w supports it.
13918 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_CDS;
13919 pp_config.feature_mask |= CAM_QCOM_FEATURE_DSDN;
13920 }
13921 if (!(gCamCapability[mCameraId]->qcom_supported_feature_mask & CAM_QCOM_FEATURE_ROTATION)) {
13922 pp_config.feature_mask &= ~CAM_QCOM_FEATURE_ROTATION;
13923 }
13924
Thierry Strudele80ad7c2016-12-06 10:16:27 -080013925 if (config.hdr_param.hdr_enable) {
13926 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13927 pp_config.hdr_param = config.hdr_param;
13928 }
13929
13930 if (mForceHdrSnapshot) {
13931 pp_config.feature_mask |= CAM_QCOM_FEATURE_HDR;
13932 pp_config.hdr_param.hdr_enable = 1;
13933 pp_config.hdr_param.hdr_need_1x = 0;
13934 pp_config.hdr_param.hdr_mode = CAM_HDR_MODE_MULTIFRAME;
13935 }
13936
Thierry Strudel3d639192016-09-09 11:52:26 -070013937 rc = pChannel->addReprocStreamsFromSource(pp_config,
13938 config,
13939 IS_TYPE_NONE,
13940 mMetadataChannel);
13941
13942 if (rc != NO_ERROR) {
13943 delete pChannel;
13944 return NULL;
13945 }
13946 return pChannel;
13947}
13948
13949/*===========================================================================
13950 * FUNCTION : getMobicatMask
13951 *
13952 * DESCRIPTION: returns mobicat mask
13953 *
13954 * PARAMETERS : none
13955 *
13956 * RETURN : mobicat mask
13957 *
13958 *==========================================================================*/
13959uint8_t QCamera3HardwareInterface::getMobicatMask()
13960{
13961 return m_MobicatMask;
13962}
13963
13964/*===========================================================================
13965 * FUNCTION : setMobicat
13966 *
13967 * DESCRIPTION: set Mobicat on/off.
13968 *
13969 * PARAMETERS :
13970 * @params : none
13971 *
13972 * RETURN : int32_t type of status
13973 * NO_ERROR -- success
13974 * none-zero failure code
13975 *==========================================================================*/
13976int32_t QCamera3HardwareInterface::setMobicat()
13977{
Thierry Strudel3d639192016-09-09 11:52:26 -070013978 int32_t ret = NO_ERROR;
Thierry Strudel3d639192016-09-09 11:52:26 -070013979
Shuzhen Wangb57ec912017-07-31 13:24:27 -070013980 if (m_MobicatMask) {
Thierry Strudel3d639192016-09-09 11:52:26 -070013981 tune_cmd_t tune_cmd;
13982 tune_cmd.type = SET_RELOAD_CHROMATIX;
13983 tune_cmd.module = MODULE_ALL;
13984 tune_cmd.value = TRUE;
13985 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13986 CAM_INTF_PARM_SET_VFE_COMMAND,
13987 tune_cmd);
13988
13989 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
13990 CAM_INTF_PARM_SET_PP_COMMAND,
13991 tune_cmd);
13992 }
Thierry Strudel3d639192016-09-09 11:52:26 -070013993
13994 return ret;
13995}
13996
13997/*===========================================================================
13998* FUNCTION : getLogLevel
13999*
14000* DESCRIPTION: Reads the log level property into a variable
14001*
14002* PARAMETERS :
14003* None
14004*
14005* RETURN :
14006* None
14007*==========================================================================*/
14008void QCamera3HardwareInterface::getLogLevel()
14009{
14010 char prop[PROPERTY_VALUE_MAX];
14011 uint32_t globalLogLevel = 0;
14012
14013 property_get("persist.camera.hal.debug", prop, "0");
14014 int val = atoi(prop);
14015 if (0 <= val) {
14016 gCamHal3LogLevel = (uint32_t)val;
14017 }
14018
Thierry Strudel9ec39c62016-12-28 11:30:05 -080014019 property_get("persist.camera.kpi.debug", prop, "0");
Thierry Strudel3d639192016-09-09 11:52:26 -070014020 gKpiDebugLevel = atoi(prop);
14021
14022 property_get("persist.camera.global.debug", prop, "0");
14023 val = atoi(prop);
14024 if (0 <= val) {
14025 globalLogLevel = (uint32_t)val;
14026 }
14027
14028 /* Highest log level among hal.logs and global.logs is selected */
14029 if (gCamHal3LogLevel < globalLogLevel)
14030 gCamHal3LogLevel = globalLogLevel;
14031
14032 return;
14033}
14034
14035/*===========================================================================
14036 * FUNCTION : validateStreamRotations
14037 *
14038 * DESCRIPTION: Check if the rotations requested are supported
14039 *
14040 * PARAMETERS :
14041 * @stream_list : streams to be configured
14042 *
14043 * RETURN : NO_ERROR on success
14044 * -EINVAL on failure
14045 *
14046 *==========================================================================*/
14047int QCamera3HardwareInterface::validateStreamRotations(
14048 camera3_stream_configuration_t *streamList)
14049{
14050 int rc = NO_ERROR;
14051
14052 /*
14053 * Loop through all streams requested in configuration
14054 * Check if unsupported rotations have been requested on any of them
14055 */
14056 for (size_t j = 0; j < streamList->num_streams; j++){
14057 camera3_stream_t *newStream = streamList->streams[j];
14058
Emilian Peev35ceeed2017-06-29 11:58:56 -070014059 switch(newStream->rotation) {
14060 case CAMERA3_STREAM_ROTATION_0:
14061 case CAMERA3_STREAM_ROTATION_90:
14062 case CAMERA3_STREAM_ROTATION_180:
14063 case CAMERA3_STREAM_ROTATION_270:
14064 //Expected values
14065 break;
14066 default:
14067 ALOGE("%s: Error: Unsupported rotation of %d requested for stream"
14068 "type:%d and stream format:%d", __func__,
14069 newStream->rotation, newStream->stream_type,
14070 newStream->format);
14071 return -EINVAL;
14072 }
14073
Thierry Strudel3d639192016-09-09 11:52:26 -070014074 bool isRotated = (newStream->rotation != CAMERA3_STREAM_ROTATION_0);
14075 bool isImplDef = (newStream->format ==
14076 HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED);
14077 bool isZsl = (newStream->stream_type == CAMERA3_STREAM_BIDIRECTIONAL &&
14078 isImplDef);
14079
14080 if (isRotated && (!isImplDef || isZsl)) {
14081 LOGE("Error: Unsupported rotation of %d requested for stream"
14082 "type:%d and stream format:%d",
14083 newStream->rotation, newStream->stream_type,
14084 newStream->format);
14085 rc = -EINVAL;
14086 break;
14087 }
14088 }
14089
14090 return rc;
14091}
14092
14093/*===========================================================================
14094* FUNCTION : getFlashInfo
14095*
14096* DESCRIPTION: Retrieve information about whether the device has a flash.
14097*
14098* PARAMETERS :
14099* @cameraId : Camera id to query
14100* @hasFlash : Boolean indicating whether there is a flash device
14101* associated with given camera
14102* @flashNode : If a flash device exists, this will be its device node.
14103*
14104* RETURN :
14105* None
14106*==========================================================================*/
14107void QCamera3HardwareInterface::getFlashInfo(const int cameraId,
14108 bool& hasFlash,
14109 char (&flashNode)[QCAMERA_MAX_FILEPATH_LENGTH])
14110{
14111 cam_capability_t* camCapability = gCamCapability[cameraId];
14112 if (NULL == camCapability) {
14113 hasFlash = false;
14114 flashNode[0] = '\0';
14115 } else {
14116 hasFlash = camCapability->flash_available;
14117 strlcpy(flashNode,
14118 (char*)camCapability->flash_dev_name,
14119 QCAMERA_MAX_FILEPATH_LENGTH);
14120 }
14121}
14122
14123/*===========================================================================
14124* FUNCTION : getEepromVersionInfo
14125*
14126* DESCRIPTION: Retrieve version info of the sensor EEPROM data
14127*
14128* PARAMETERS : None
14129*
14130* RETURN : string describing EEPROM version
14131* "\0" if no such info available
14132*==========================================================================*/
14133const char *QCamera3HardwareInterface::getEepromVersionInfo()
14134{
14135 return (const char *)&gCamCapability[mCameraId]->eeprom_version_info[0];
14136}
14137
14138/*===========================================================================
14139* FUNCTION : getLdafCalib
14140*
14141* DESCRIPTION: Retrieve Laser AF calibration data
14142*
14143* PARAMETERS : None
14144*
14145* RETURN : Two uint32_t describing laser AF calibration data
14146* NULL if none is available.
14147*==========================================================================*/
14148const uint32_t *QCamera3HardwareInterface::getLdafCalib()
14149{
14150 if (mLdafCalibExist) {
14151 return &mLdafCalib[0];
14152 } else {
14153 return NULL;
14154 }
14155}
14156
14157/*===========================================================================
Arnd Geis082a4d72017-08-24 10:33:07 -070014158* FUNCTION : getEaselFwVersion
14159*
14160* DESCRIPTION: Retrieve Easel firmware version
14161*
14162* PARAMETERS : None
14163*
14164* RETURN : string describing Firmware version
Arnd Geis8cbfc182017-09-07 14:46:41 -070014165* "\0" if version is not up to date
Arnd Geis082a4d72017-08-24 10:33:07 -070014166*==========================================================================*/
14167const char *QCamera3HardwareInterface::getEaselFwVersion()
14168{
Arnd Geis8cbfc182017-09-07 14:46:41 -070014169 if (mEaselFwUpdated) {
14170 return (const char *)&mEaselFwVersion[0];
14171 } else {
14172 return NULL;
Arnd Geis082a4d72017-08-24 10:33:07 -070014173 }
Arnd Geis082a4d72017-08-24 10:33:07 -070014174}
14175
14176/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014177 * FUNCTION : dynamicUpdateMetaStreamInfo
14178 *
14179 * DESCRIPTION: This function:
14180 * (1) stops all the channels
14181 * (2) returns error on pending requests and buffers
14182 * (3) sends metastream_info in setparams
14183 * (4) starts all channels
14184 * This is useful when sensor has to be restarted to apply any
14185 * settings such as frame rate from a different sensor mode
14186 *
14187 * PARAMETERS : None
14188 *
14189 * RETURN : NO_ERROR on success
14190 * Error codes on failure
14191 *
14192 *==========================================================================*/
14193int32_t QCamera3HardwareInterface::dynamicUpdateMetaStreamInfo()
14194{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014195 ATRACE_CAMSCOPE_CALL(CAMSCOPE_HAL3_DYN_UPDATE_META_STRM_INFO);
Thierry Strudel3d639192016-09-09 11:52:26 -070014196 int rc = NO_ERROR;
14197
14198 LOGD("E");
14199
14200 rc = stopAllChannels();
14201 if (rc < 0) {
14202 LOGE("stopAllChannels failed");
14203 return rc;
14204 }
14205
14206 rc = notifyErrorForPendingRequests();
14207 if (rc < 0) {
14208 LOGE("notifyErrorForPendingRequests failed");
14209 return rc;
14210 }
14211
14212 for (uint32_t i = 0; i < mStreamConfigInfo.num_streams; i++) {
14213 LOGI("STREAM INFO : type %d, wxh: %d x %d, pp_mask: 0x%x"
14214 "Format:%d",
14215 mStreamConfigInfo.type[i],
14216 mStreamConfigInfo.stream_sizes[i].width,
14217 mStreamConfigInfo.stream_sizes[i].height,
14218 mStreamConfigInfo.postprocess_mask[i],
14219 mStreamConfigInfo.format[i]);
14220 }
14221
14222 /* Send meta stream info once again so that ISP can start */
14223 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters,
14224 CAM_INTF_META_STREAM_INFO, mStreamConfigInfo);
14225 rc = mCameraHandle->ops->set_parms(mCameraHandle->camera_handle,
14226 mParameters);
14227 if (rc < 0) {
14228 LOGE("set Metastreaminfo failed. Sensor mode does not change");
14229 }
14230
14231 rc = startAllChannels();
14232 if (rc < 0) {
14233 LOGE("startAllChannels failed");
14234 return rc;
14235 }
14236
14237 LOGD("X");
14238 return rc;
14239}
14240
14241/*===========================================================================
14242 * FUNCTION : stopAllChannels
14243 *
14244 * DESCRIPTION: This function stops (equivalent to stream-off) all channels
14245 *
14246 * PARAMETERS : None
14247 *
14248 * RETURN : NO_ERROR on success
14249 * Error codes on failure
14250 *
14251 *==========================================================================*/
14252int32_t QCamera3HardwareInterface::stopAllChannels()
14253{
14254 int32_t rc = NO_ERROR;
14255
14256 LOGD("Stopping all channels");
14257 // Stop the Streams/Channels
14258 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14259 it != mStreamInfo.end(); it++) {
14260 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14261 if (channel) {
14262 channel->stop();
14263 }
14264 (*it)->status = INVALID;
14265 }
14266
14267 if (mSupportChannel) {
14268 mSupportChannel->stop();
14269 }
14270 if (mAnalysisChannel) {
14271 mAnalysisChannel->stop();
14272 }
14273 if (mRawDumpChannel) {
14274 mRawDumpChannel->stop();
14275 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014276 if (mHdrPlusRawSrcChannel) {
14277 mHdrPlusRawSrcChannel->stop();
14278 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014279 if (mMetadataChannel) {
14280 /* If content of mStreamInfo is not 0, there is metadata stream */
14281 mMetadataChannel->stop();
14282 }
14283
14284 LOGD("All channels stopped");
14285 return rc;
14286}
14287
14288/*===========================================================================
14289 * FUNCTION : startAllChannels
14290 *
14291 * DESCRIPTION: This function starts (equivalent to stream-on) all channels
14292 *
14293 * PARAMETERS : None
14294 *
14295 * RETURN : NO_ERROR on success
14296 * Error codes on failure
14297 *
14298 *==========================================================================*/
14299int32_t QCamera3HardwareInterface::startAllChannels()
14300{
14301 int32_t rc = NO_ERROR;
14302
14303 LOGD("Start all channels ");
14304 // Start the Streams/Channels
14305 if (mMetadataChannel) {
14306 /* If content of mStreamInfo is not 0, there is metadata stream */
14307 rc = mMetadataChannel->start();
14308 if (rc < 0) {
14309 LOGE("META channel start failed");
14310 return rc;
14311 }
14312 }
14313 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14314 it != mStreamInfo.end(); it++) {
14315 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14316 if (channel) {
14317 rc = channel->start();
14318 if (rc < 0) {
14319 LOGE("channel start failed");
14320 return rc;
14321 }
14322 }
14323 }
14324 if (mAnalysisChannel) {
14325 mAnalysisChannel->start();
14326 }
14327 if (mSupportChannel) {
14328 rc = mSupportChannel->start();
14329 if (rc < 0) {
14330 LOGE("Support channel start failed");
14331 return rc;
14332 }
14333 }
14334 if (mRawDumpChannel) {
14335 rc = mRawDumpChannel->start();
14336 if (rc < 0) {
14337 LOGE("RAW dump channel start failed");
14338 return rc;
14339 }
14340 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014341 if (mHdrPlusRawSrcChannel) {
14342 rc = mHdrPlusRawSrcChannel->start();
14343 if (rc < 0) {
14344 LOGE("HDR+ RAW channel start failed");
14345 return rc;
14346 }
14347 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014348
14349 LOGD("All channels started");
14350 return rc;
14351}
14352
14353/*===========================================================================
14354 * FUNCTION : notifyErrorForPendingRequests
14355 *
14356 * DESCRIPTION: This function sends error for all the pending requests/buffers
14357 *
14358 * PARAMETERS : None
14359 *
14360 * RETURN : Error codes
14361 * NO_ERROR on success
14362 *
14363 *==========================================================================*/
14364int32_t QCamera3HardwareInterface::notifyErrorForPendingRequests()
14365{
Emilian Peev7650c122017-01-19 08:24:33 -080014366 notifyErrorFoPendingDepthData(mDepthChannel);
14367
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014368 auto pendingRequest = mPendingRequestsList.begin();
14369 auto pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.begin();
Thierry Strudel3d639192016-09-09 11:52:26 -070014370
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014371 // Iterate through pending requests (for which result metadata isn't sent yet) and pending
14372 // buffers (for which buffers aren't sent yet).
14373 while (pendingRequest != mPendingRequestsList.end() ||
14374 pendingBuffer != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
14375 if (pendingRequest == mPendingRequestsList.end() ||
14376 pendingBuffer->frame_number < pendingRequest->frame_number) {
14377 // If metadata for this frame was sent, notify about a buffer error and returns buffers
14378 // with error.
14379 for (auto &info : pendingBuffer->mPendingBufferList) {
14380 // Send a buffer error for this frame number.
Thierry Strudel3d639192016-09-09 11:52:26 -070014381 camera3_notify_msg_t notify_msg;
14382 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14383 notify_msg.type = CAMERA3_MSG_ERROR;
14384 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014385 notify_msg.message.error.error_stream = info.stream;
14386 notify_msg.message.error.frame_number = pendingBuffer->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014387 orchestrateNotify(&notify_msg);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014388
14389 camera3_stream_buffer_t buffer = {};
14390 buffer.acquire_fence = -1;
14391 buffer.release_fence = -1;
14392 buffer.buffer = info.buffer;
14393 buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
14394 buffer.stream = info.stream;
14395 mOutputBufferDispatcher.markBufferReady(pendingBuffer->frame_number, buffer);
Thierry Strudel3d639192016-09-09 11:52:26 -070014396 }
14397
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014398 pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffer);
14399 } else if (pendingBuffer == mPendingBuffersMap.mPendingBuffersInRequest.end() ||
14400 pendingBuffer->frame_number > pendingRequest->frame_number) {
14401 // If the buffers for this frame were sent already, notify about a result error.
Thierry Strudel3d639192016-09-09 11:52:26 -070014402 camera3_notify_msg_t notify_msg;
14403 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14404 notify_msg.type = CAMERA3_MSG_ERROR;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014405 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_RESULT;
14406 notify_msg.message.error.error_stream = nullptr;
14407 notify_msg.message.error.frame_number = pendingRequest->frame_number;
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014408 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070014409
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014410 if (pendingRequest->input_buffer != nullptr) {
14411 camera3_capture_result result = {};
14412 result.frame_number = pendingRequest->frame_number;
14413 result.result = nullptr;
14414 result.input_buffer = pendingRequest->input_buffer;
14415 orchestrateResult(&result);
Thierry Strudel3d639192016-09-09 11:52:26 -070014416 }
14417
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014418 mShutterDispatcher.clear(pendingRequest->frame_number);
14419 pendingRequest = mPendingRequestsList.erase(pendingRequest);
14420 } else {
14421 // If both buffers and result metadata weren't sent yet, notify about a request error
14422 // and return buffers with error.
14423 for (auto &info : pendingBuffer->mPendingBufferList) {
14424 camera3_notify_msg_t notify_msg;
14425 memset(&notify_msg, 0, sizeof(camera3_notify_msg_t));
14426 notify_msg.type = CAMERA3_MSG_ERROR;
14427 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_REQUEST;
14428 notify_msg.message.error.error_stream = info.stream;
14429 notify_msg.message.error.frame_number = pendingBuffer->frame_number;
14430 orchestrateNotify(&notify_msg);
Thierry Strudel3d639192016-09-09 11:52:26 -070014431
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014432 camera3_stream_buffer_t buffer = {};
14433 buffer.acquire_fence = -1;
14434 buffer.release_fence = -1;
14435 buffer.buffer = info.buffer;
14436 buffer.status = CAMERA3_BUFFER_STATUS_ERROR;
14437 buffer.stream = info.stream;
14438 mOutputBufferDispatcher.markBufferReady(pendingBuffer->frame_number, buffer);
14439 }
14440
14441 if (pendingRequest->input_buffer != nullptr) {
14442 camera3_capture_result result = {};
14443 result.frame_number = pendingRequest->frame_number;
14444 result.result = nullptr;
14445 result.input_buffer = pendingRequest->input_buffer;
14446 orchestrateResult(&result);
14447 }
14448
14449 mShutterDispatcher.clear(pendingRequest->frame_number);
14450 pendingBuffer = mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffer);
14451 pendingRequest = mPendingRequestsList.erase(pendingRequest);
Thierry Strudel3d639192016-09-09 11:52:26 -070014452 }
14453 }
14454
14455 /* Reset pending frame Drop list and requests list */
14456 mPendingFrameDropList.clear();
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014457 mShutterDispatcher.clear();
14458 mOutputBufferDispatcher.clear(/*clearConfiguredStreams*/false);
Thierry Strudel3d639192016-09-09 11:52:26 -070014459 mPendingBuffersMap.mPendingBuffersInRequest.clear();
Emilian Peev30522a12017-08-03 14:36:33 +010014460 mExpectedFrameDuration = 0;
14461 mExpectedInflightDuration = 0;
Thierry Strudel3d639192016-09-09 11:52:26 -070014462 LOGH("Cleared all the pending buffers ");
14463
Chien-Yu Chen3f303522017-05-19 15:21:45 -070014464 return NO_ERROR;
Thierry Strudel3d639192016-09-09 11:52:26 -070014465}
14466
14467bool QCamera3HardwareInterface::isOnEncoder(
14468 const cam_dimension_t max_viewfinder_size,
14469 uint32_t width, uint32_t height)
14470{
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014471 return ((width > (uint32_t)max_viewfinder_size.width) ||
14472 (height > (uint32_t)max_viewfinder_size.height) ||
14473 (width > (uint32_t)VIDEO_4K_WIDTH) ||
14474 (height > (uint32_t)VIDEO_4K_HEIGHT));
Thierry Strudel3d639192016-09-09 11:52:26 -070014475}
14476
14477/*===========================================================================
14478 * FUNCTION : setBundleInfo
14479 *
14480 * DESCRIPTION: Set bundle info for all streams that are bundle.
14481 *
14482 * PARAMETERS : None
14483 *
14484 * RETURN : NO_ERROR on success
14485 * Error codes on failure
14486 *==========================================================================*/
14487int32_t QCamera3HardwareInterface::setBundleInfo()
14488{
14489 int32_t rc = NO_ERROR;
14490
14491 if (mChannelHandle) {
14492 cam_bundle_config_t bundleInfo;
14493 memset(&bundleInfo, 0, sizeof(bundleInfo));
14494 rc = mCameraHandle->ops->get_bundle_info(
14495 mCameraHandle->camera_handle, mChannelHandle, &bundleInfo);
14496 if (rc != NO_ERROR) {
14497 LOGE("get_bundle_info failed");
14498 return rc;
14499 }
14500 if (mAnalysisChannel) {
14501 mAnalysisChannel->setBundleInfo(bundleInfo);
14502 }
14503 if (mSupportChannel) {
14504 mSupportChannel->setBundleInfo(bundleInfo);
14505 }
14506 for (List<stream_info_t *>::iterator it = mStreamInfo.begin();
14507 it != mStreamInfo.end(); it++) {
14508 QCamera3Channel *channel = (QCamera3Channel *)(*it)->stream->priv;
14509 channel->setBundleInfo(bundleInfo);
14510 }
14511 if (mRawDumpChannel) {
14512 mRawDumpChannel->setBundleInfo(bundleInfo);
14513 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014514 if (mHdrPlusRawSrcChannel) {
14515 mHdrPlusRawSrcChannel->setBundleInfo(bundleInfo);
14516 }
Thierry Strudel3d639192016-09-09 11:52:26 -070014517 }
14518
14519 return rc;
14520}
14521
14522/*===========================================================================
Thierry Strudel295a0ca2016-11-03 18:38:47 -070014523 * FUNCTION : setInstantAEC
14524 *
14525 * DESCRIPTION: Set Instant AEC related params.
14526 *
14527 * PARAMETERS :
14528 * @meta: CameraMetadata reference
14529 *
14530 * RETURN : NO_ERROR on success
14531 * Error codes on failure
14532 *==========================================================================*/
14533int32_t QCamera3HardwareInterface::setInstantAEC(const CameraMetadata &meta)
14534{
14535 int32_t rc = NO_ERROR;
14536 uint8_t val = 0;
14537 char prop[PROPERTY_VALUE_MAX];
14538
14539 // First try to configure instant AEC from framework metadata
14540 if (meta.exists(QCAMERA3_INSTANT_AEC_MODE)) {
14541 val = (uint8_t)meta.find(QCAMERA3_INSTANT_AEC_MODE).data.i32[0];
14542 }
14543
14544 // If framework did not set this value, try to read from set prop.
14545 if (val == 0) {
14546 memset(prop, 0, sizeof(prop));
14547 property_get("persist.camera.instant.aec", prop, "0");
14548 val = (uint8_t)atoi(prop);
14549 }
14550
14551 if ((val >= (uint8_t)CAM_AEC_NORMAL_CONVERGENCE) &&
14552 ( val < (uint8_t)CAM_AEC_CONVERGENCE_MAX)) {
14553 ADD_SET_PARAM_ENTRY_TO_BATCH(mParameters, CAM_INTF_PARM_INSTANT_AEC, val);
14554 mInstantAEC = val;
14555 mInstantAECSettledFrameNumber = 0;
14556 mInstantAecFrameIdxCount = 0;
14557 LOGH("instantAEC value set %d",val);
14558 if (mInstantAEC) {
14559 memset(prop, 0, sizeof(prop));
14560 property_get("persist.camera.ae.instant.bound", prop, "10");
14561 int32_t aec_frame_skip_cnt = atoi(prop);
14562 if (aec_frame_skip_cnt >= 0) {
14563 mAecSkipDisplayFrameBound = (uint8_t)aec_frame_skip_cnt;
14564 } else {
14565 LOGE("Invalid prop for aec frame bound %d", aec_frame_skip_cnt);
14566 rc = BAD_VALUE;
14567 }
14568 }
14569 } else {
14570 LOGE("Bad instant aec value set %d", val);
14571 rc = BAD_VALUE;
14572 }
14573 return rc;
14574}
14575
14576/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014577 * FUNCTION : get_num_overall_buffers
14578 *
14579 * DESCRIPTION: Estimate number of pending buffers across all requests.
14580 *
14581 * PARAMETERS : None
14582 *
14583 * RETURN : Number of overall pending buffers
14584 *
14585 *==========================================================================*/
14586uint32_t PendingBuffersMap::get_num_overall_buffers()
14587{
14588 uint32_t sum_buffers = 0;
14589 for (auto &req : mPendingBuffersInRequest) {
14590 sum_buffers += req.mPendingBufferList.size();
14591 }
14592 return sum_buffers;
14593}
14594
14595/*===========================================================================
14596 * FUNCTION : removeBuf
14597 *
14598 * DESCRIPTION: Remove a matching buffer from tracker.
14599 *
14600 * PARAMETERS : @buffer: image buffer for the callback
14601 *
14602 * RETURN : None
14603 *
14604 *==========================================================================*/
14605void PendingBuffersMap::removeBuf(buffer_handle_t *buffer)
14606{
14607 bool buffer_found = false;
14608 for (auto req = mPendingBuffersInRequest.begin();
14609 req != mPendingBuffersInRequest.end(); req++) {
14610 for (auto k = req->mPendingBufferList.begin();
14611 k != req->mPendingBufferList.end(); k++ ) {
14612 if (k->buffer == buffer) {
14613 LOGD("Frame %d: Found Frame buffer %p, take it out from mPendingBufferList",
14614 req->frame_number, buffer);
14615 k = req->mPendingBufferList.erase(k);
14616 if (req->mPendingBufferList.empty()) {
14617 // Remove this request from Map
14618 req = mPendingBuffersInRequest.erase(req);
14619 }
14620 buffer_found = true;
14621 break;
14622 }
14623 }
14624 if (buffer_found) {
14625 break;
14626 }
14627 }
14628 LOGD("mPendingBuffersMap.num_overall_buffers = %d",
14629 get_num_overall_buffers());
14630}
14631
14632/*===========================================================================
Thierry Strudelc2ee3302016-11-17 12:33:12 -080014633 * FUNCTION : getBufErrStatus
14634 *
14635 * DESCRIPTION: get buffer error status
14636 *
14637 * PARAMETERS : @buffer: buffer handle
14638 *
14639 * RETURN : Error status
14640 *
14641 *==========================================================================*/
14642int32_t PendingBuffersMap::getBufErrStatus(buffer_handle_t *buffer)
14643{
14644 for (auto& req : mPendingBuffersInRequest) {
14645 for (auto& k : req.mPendingBufferList) {
14646 if (k.buffer == buffer)
14647 return k.bufStatus;
14648 }
14649 }
14650 return CAMERA3_BUFFER_STATUS_OK;
14651}
14652
14653/*===========================================================================
Thierry Strudel3d639192016-09-09 11:52:26 -070014654 * FUNCTION : setPAAFSupport
14655 *
14656 * DESCRIPTION: Set the preview-assisted auto focus support bit in
14657 * feature mask according to stream type and filter
14658 * arrangement
14659 *
14660 * PARAMETERS : @feature_mask: current feature mask, which may be modified
14661 * @stream_type: stream type
14662 * @filter_arrangement: filter arrangement
14663 *
14664 * RETURN : None
14665 *==========================================================================*/
14666void QCamera3HardwareInterface::setPAAFSupport(
14667 cam_feature_mask_t& feature_mask,
14668 cam_stream_type_t stream_type,
14669 cam_color_filter_arrangement_t filter_arrangement)
14670{
Thierry Strudel3d639192016-09-09 11:52:26 -070014671 switch (filter_arrangement) {
14672 case CAM_FILTER_ARRANGEMENT_RGGB:
14673 case CAM_FILTER_ARRANGEMENT_GRBG:
14674 case CAM_FILTER_ARRANGEMENT_GBRG:
14675 case CAM_FILTER_ARRANGEMENT_BGGR:
Thierry Strudele80ad7c2016-12-06 10:16:27 -080014676 if ((stream_type == CAM_STREAM_TYPE_PREVIEW) ||
14677 (stream_type == CAM_STREAM_TYPE_ANALYSIS) ||
Thierry Strudel3d639192016-09-09 11:52:26 -070014678 (stream_type == CAM_STREAM_TYPE_VIDEO)) {
Thierry Strudel2896d122017-02-23 19:18:03 -080014679 if (!(feature_mask & CAM_QTI_FEATURE_PPEISCORE))
14680 feature_mask |= CAM_QCOM_FEATURE_PAAF;
Thierry Strudel3d639192016-09-09 11:52:26 -070014681 }
14682 break;
14683 case CAM_FILTER_ARRANGEMENT_Y:
14684 if (stream_type == CAM_STREAM_TYPE_ANALYSIS) {
14685 feature_mask |= CAM_QCOM_FEATURE_PAAF;
14686 }
14687 break;
14688 default:
14689 break;
14690 }
Shuzhen Wang3b457d92016-08-03 08:46:59 -070014691 LOGD("feature_mask=0x%llx; stream_type=%d, filter_arrangement=%d",
14692 feature_mask, stream_type, filter_arrangement);
14693
14694
Thierry Strudel3d639192016-09-09 11:52:26 -070014695}
14696
14697/*===========================================================================
14698* FUNCTION : getSensorMountAngle
14699*
14700* DESCRIPTION: Retrieve sensor mount angle
14701*
14702* PARAMETERS : None
14703*
14704* RETURN : sensor mount angle in uint32_t
14705*==========================================================================*/
14706uint32_t QCamera3HardwareInterface::getSensorMountAngle()
14707{
14708 return gCamCapability[mCameraId]->sensor_mount_angle;
14709}
14710
14711/*===========================================================================
14712* FUNCTION : getRelatedCalibrationData
14713*
14714* DESCRIPTION: Retrieve related system calibration data
14715*
14716* PARAMETERS : None
14717*
14718* RETURN : Pointer of related system calibration data
14719*==========================================================================*/
14720const cam_related_system_calibration_data_t *QCamera3HardwareInterface::getRelatedCalibrationData()
14721{
14722 return (const cam_related_system_calibration_data_t *)
14723 &(gCamCapability[mCameraId]->related_cam_calibration);
14724}
Shuzhen Wangf6890e02016-08-12 14:28:54 -070014725
14726/*===========================================================================
14727 * FUNCTION : is60HzZone
14728 *
14729 * DESCRIPTION: Whether the phone is in zone with 60hz electricity frequency
14730 *
14731 * PARAMETERS : None
14732 *
14733 * RETURN : True if in 60Hz zone, False otherwise
14734 *==========================================================================*/
14735bool QCamera3HardwareInterface::is60HzZone()
14736{
14737 time_t t = time(NULL);
14738 struct tm lt;
14739
14740 struct tm* r = localtime_r(&t, &lt);
14741
14742 if (r == NULL || lt.tm_gmtoff <= -2*60*60 || lt.tm_gmtoff >= 8*60*60)
14743 return true;
14744 else
14745 return false;
14746}
Shuzhen Wanga5da1022016-07-13 20:18:42 -070014747
14748/*===========================================================================
14749 * FUNCTION : adjustBlackLevelForCFA
14750 *
14751 * DESCRIPTION: Adjust the black level pattern in the order of RGGB to the order
14752 * of bayer CFA (Color Filter Array).
14753 *
14754 * PARAMETERS : @input: black level pattern in the order of RGGB
14755 * @output: black level pattern in the order of CFA
14756 * @color_arrangement: CFA color arrangement
14757 *
14758 * RETURN : None
14759 *==========================================================================*/
14760template<typename T>
14761void QCamera3HardwareInterface::adjustBlackLevelForCFA(
14762 T input[BLACK_LEVEL_PATTERN_CNT],
14763 T output[BLACK_LEVEL_PATTERN_CNT],
14764 cam_color_filter_arrangement_t color_arrangement)
14765{
14766 switch (color_arrangement) {
14767 case CAM_FILTER_ARRANGEMENT_GRBG:
14768 output[0] = input[1];
14769 output[1] = input[0];
14770 output[2] = input[3];
14771 output[3] = input[2];
14772 break;
14773 case CAM_FILTER_ARRANGEMENT_GBRG:
14774 output[0] = input[2];
14775 output[1] = input[3];
14776 output[2] = input[0];
14777 output[3] = input[1];
14778 break;
14779 case CAM_FILTER_ARRANGEMENT_BGGR:
14780 output[0] = input[3];
14781 output[1] = input[2];
14782 output[2] = input[1];
14783 output[3] = input[0];
14784 break;
14785 case CAM_FILTER_ARRANGEMENT_RGGB:
14786 output[0] = input[0];
14787 output[1] = input[1];
14788 output[2] = input[2];
14789 output[3] = input[3];
14790 break;
14791 default:
14792 LOGE("Invalid color arrangement to derive dynamic blacklevel");
14793 break;
14794 }
14795}
Chien-Yu Chen8e599492016-11-01 13:37:46 -070014796
Chien-Yu Chen92724a82017-01-06 11:50:30 -080014797void QCamera3HardwareInterface::updateHdrPlusResultMetadata(
14798 CameraMetadata &resultMetadata,
14799 std::shared_ptr<metadata_buffer_t> settings)
14800{
14801 if (settings == nullptr) {
14802 ALOGE("%s: settings is nullptr.", __FUNCTION__);
14803 return;
14804 }
14805
14806 IF_META_AVAILABLE(double, gps_coords, CAM_INTF_META_JPEG_GPS_COORDINATES, settings) {
14807 resultMetadata.update(ANDROID_JPEG_GPS_COORDINATES, gps_coords, 3);
14808 }
14809
14810 IF_META_AVAILABLE(uint8_t, gps_methods, CAM_INTF_META_JPEG_GPS_PROC_METHODS, settings) {
14811 String8 str((const char *)gps_methods);
14812 resultMetadata.update(ANDROID_JPEG_GPS_PROCESSING_METHOD, str);
14813 }
14814
14815 IF_META_AVAILABLE(int64_t, gps_timestamp, CAM_INTF_META_JPEG_GPS_TIMESTAMP, settings) {
14816 resultMetadata.update(ANDROID_JPEG_GPS_TIMESTAMP, gps_timestamp, 1);
14817 }
14818
14819 IF_META_AVAILABLE(int32_t, jpeg_orientation, CAM_INTF_META_JPEG_ORIENTATION, settings) {
14820 resultMetadata.update(ANDROID_JPEG_ORIENTATION, jpeg_orientation, 1);
14821 }
14822
14823 IF_META_AVAILABLE(uint32_t, jpeg_quality, CAM_INTF_META_JPEG_QUALITY, settings) {
14824 uint8_t fwk_jpeg_quality = static_cast<uint8_t>(*jpeg_quality);
14825 resultMetadata.update(ANDROID_JPEG_QUALITY, &fwk_jpeg_quality, 1);
14826 }
14827
14828 IF_META_AVAILABLE(uint32_t, thumb_quality, CAM_INTF_META_JPEG_THUMB_QUALITY, settings) {
14829 uint8_t fwk_thumb_quality = static_cast<uint8_t>(*thumb_quality);
14830 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_QUALITY, &fwk_thumb_quality, 1);
14831 }
14832
14833 IF_META_AVAILABLE(cam_dimension_t, thumb_size, CAM_INTF_META_JPEG_THUMB_SIZE, settings) {
14834 int32_t fwk_thumb_size[2];
14835 fwk_thumb_size[0] = thumb_size->width;
14836 fwk_thumb_size[1] = thumb_size->height;
14837 resultMetadata.update(ANDROID_JPEG_THUMBNAIL_SIZE, fwk_thumb_size, 2);
14838 }
14839
14840 IF_META_AVAILABLE(uint32_t, intent, CAM_INTF_META_CAPTURE_INTENT, settings) {
14841 uint8_t fwk_intent = intent[0];
14842 resultMetadata.update(ANDROID_CONTROL_CAPTURE_INTENT, &fwk_intent, 1);
14843 }
14844}
14845
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014846bool QCamera3HardwareInterface::isRequestHdrPlusCompatible(
14847 const camera3_capture_request_t &request, const CameraMetadata &metadata) {
Chien-Yu Chenec328c82017-08-30 16:41:08 -070014848 if (metadata.exists(NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS) &&
14849 metadata.find(NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS).data.i32[0] == 1) {
14850 ALOGV("%s: NEXUS_EXPERIMENTAL_2017_DISABLE_HDRPLUS is 1", __FUNCTION__);
14851 return false;
14852 }
14853
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014854 if (!metadata.exists(ANDROID_NOISE_REDUCTION_MODE) ||
14855 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0] !=
14856 ANDROID_NOISE_REDUCTION_MODE_HIGH_QUALITY) {
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014857 ALOGV("%s: ANDROID_NOISE_REDUCTION_MODE is not HQ: %d", __FUNCTION__,
Chien-Yu Chenee335912017-02-09 17:53:20 -080014858 metadata.find(ANDROID_NOISE_REDUCTION_MODE).data.u8[0]);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014859 return false;
14860 }
14861
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014862 if (!metadata.exists(ANDROID_EDGE_MODE) ||
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014863 metadata.find(ANDROID_EDGE_MODE).data.u8[0] != ANDROID_EDGE_MODE_HIGH_QUALITY) {
14864 ALOGV("%s: ANDROID_EDGE_MODE is not HQ.", __FUNCTION__);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014865 return false;
14866 }
14867
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014868 if (!metadata.exists(ANDROID_COLOR_CORRECTION_ABERRATION_MODE) ||
14869 metadata.find(ANDROID_COLOR_CORRECTION_ABERRATION_MODE).data.u8[0] !=
14870 ANDROID_COLOR_CORRECTION_ABERRATION_MODE_HIGH_QUALITY) {
14871 ALOGV("%s: ANDROID_COLOR_CORRECTION_ABERRATION_MODE is not HQ.", __FUNCTION__);
14872 return false;
14873 }
14874
14875 if (!metadata.exists(ANDROID_CONTROL_AE_MODE) ||
14876 (metadata.find(ANDROID_CONTROL_AE_MODE).data.u8[0] != ANDROID_CONTROL_AE_MODE_ON &&
14877 metadata.find(ANDROID_CONTROL_AE_MODE).data.u8[0] !=
14878 ANDROID_CONTROL_AE_MODE_ON_AUTO_FLASH)) {
14879 ALOGV("%s: ANDROID_CONTROL_AE_MODE is not ON or ON_AUTO_FLASH.", __FUNCTION__);
14880 return false;
14881 }
14882
14883 if (!metadata.exists(ANDROID_CONTROL_AWB_MODE) ||
14884 metadata.find(ANDROID_CONTROL_AWB_MODE).data.u8[0] != ANDROID_CONTROL_AWB_MODE_AUTO) {
14885 ALOGV("%s: ANDROID_CONTROL_AWB_MODE is not AUTO.", __FUNCTION__);
14886 return false;
14887 }
14888
14889 if (!metadata.exists(ANDROID_CONTROL_EFFECT_MODE) ||
14890 metadata.find(ANDROID_CONTROL_EFFECT_MODE).data.u8[0] !=
14891 ANDROID_CONTROL_EFFECT_MODE_OFF) {
14892 ALOGV("%s: ANDROID_CONTROL_EFFECT_MODE_OFF is not OFF.", __FUNCTION__);
14893 return false;
14894 }
14895
14896 if (!metadata.exists(ANDROID_CONTROL_MODE) ||
14897 (metadata.find(ANDROID_CONTROL_MODE).data.u8[0] != ANDROID_CONTROL_MODE_AUTO &&
14898 metadata.find(ANDROID_CONTROL_MODE).data.u8[0] !=
14899 ANDROID_CONTROL_MODE_USE_SCENE_MODE)) {
14900 ALOGV("%s: ANDROID_CONTROL_MODE is not AUTO or USE_SCENE_MODE.", __FUNCTION__);
14901 return false;
14902 }
14903
14904 // TODO (b/32585046): support non-ZSL.
14905 if (!metadata.exists(ANDROID_CONTROL_ENABLE_ZSL) ||
14906 metadata.find(ANDROID_CONTROL_ENABLE_ZSL).data.u8[0] != ANDROID_CONTROL_ENABLE_ZSL_TRUE) {
14907 ALOGV("%s: ANDROID_CONTROL_ENABLE_ZSL is not true.", __FUNCTION__);
14908 return false;
14909 }
14910
14911 // TODO (b/32586081): support flash.
14912 if (!metadata.exists(ANDROID_FLASH_MODE) ||
14913 metadata.find(ANDROID_FLASH_MODE).data.u8[0] != ANDROID_FLASH_MODE_OFF) {
14914 ALOGV("%s: ANDROID_FLASH_MODE is not OFF.", __FUNCTION__);
14915 return false;
14916 }
14917
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014918 if (!metadata.exists(ANDROID_TONEMAP_MODE) ||
14919 metadata.find(ANDROID_TONEMAP_MODE).data.u8[0] != ANDROID_TONEMAP_MODE_HIGH_QUALITY) {
14920 ALOGV("%s: ANDROID_TONEMAP_MODE is not HQ.", __FUNCTION__);
14921 return false;
14922 }
14923
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070014924
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014925 // TODO (b/36693254, b/36690506): support other outputs.
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070014926 if (!gEnableMultipleHdrplusOutputs && request.num_output_buffers != 1) {
14927 ALOGV("%s: Only support 1 output: %d", __FUNCTION__, request.num_output_buffers);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014928 return false;
14929 }
14930
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070014931 switch (request.output_buffers[0].stream->format) {
14932 case HAL_PIXEL_FORMAT_BLOB:
14933 break;
14934 case HAL_PIXEL_FORMAT_YCbCr_420_888:
14935 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
14936 // TODO (b/36693254): Only support full size.
14937 if (!gEnableMultipleHdrplusOutputs) {
14938 if (static_cast<int>(request.output_buffers[0].stream->width) !=
14939 gCamCapability[mCameraId]->picture_sizes_tbl[0].width ||
14940 static_cast<int>(request.output_buffers[0].stream->height) !=
14941 gCamCapability[mCameraId]->picture_sizes_tbl[0].height) {
14942 ALOGV("%s: Only full size is supported.", __FUNCTION__);
14943 return false;
14944 }
14945 }
14946 break;
14947 default:
14948 ALOGV("%s: Not an HDR+ request: Only Jpeg and YUV output is supported.", __FUNCTION__);
14949 for (uint32_t i = 0; i < request.num_output_buffers; i++) {
14950 ALOGV("%s: output_buffers[%u]: %dx%d format %d", __FUNCTION__, i,
14951 request.output_buffers[0].stream->width,
14952 request.output_buffers[0].stream->height,
14953 request.output_buffers[0].stream->format);
14954 }
14955 return false;
14956 }
14957
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014958 return true;
14959}
14960
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070014961void QCamera3HardwareInterface::abortPendingHdrplusRequest(HdrPlusPendingRequest *hdrPlusRequest) {
14962 if (hdrPlusRequest == nullptr) return;
14963
14964 for (auto & outputBufferIter : hdrPlusRequest->outputBuffers) {
14965 // Find the stream for this buffer.
14966 for (auto streamInfo : mStreamInfo) {
14967 if (streamInfo->id == outputBufferIter.first) {
14968 if (streamInfo->channel == mPictureChannel) {
14969 // For picture channel, this buffer is internally allocated so return this
14970 // buffer to picture channel.
14971 mPictureChannel->returnYuvBuffer(outputBufferIter.second.get());
14972 } else {
14973 // Unregister this buffer for other channels.
14974 streamInfo->channel->unregisterBuffer(outputBufferIter.second.get());
14975 }
14976 break;
14977 }
14978 }
14979 }
14980
14981 hdrPlusRequest->outputBuffers.clear();
14982 hdrPlusRequest->frameworkOutputBuffers.clear();
14983}
14984
Chien-Yu Chenad9b6632017-08-22 19:09:23 -070014985bool QCamera3HardwareInterface::trySubmittingHdrPlusRequestLocked(
14986 HdrPlusPendingRequest *hdrPlusRequest, const camera3_capture_request_t &request,
14987 const CameraMetadata &metadata)
14988{
14989 if (hdrPlusRequest == nullptr) return false;
14990 if (!isRequestHdrPlusCompatible(request, metadata)) return false;
14991
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070014992 status_t res = OK;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080014993 pbcamera::CaptureRequest pbRequest;
14994 pbRequest.id = request.frame_number;
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070014995 // Iterate through all requested output buffers and add them to an HDR+ request.
14996 for (uint32_t i = 0; i < request.num_output_buffers; i++) {
14997 // Find the index of the stream in mStreamInfo.
14998 uint32_t pbStreamId = 0;
14999 bool found = false;
15000 for (auto streamInfo : mStreamInfo) {
15001 if (streamInfo->stream == request.output_buffers[i].stream) {
15002 pbStreamId = streamInfo->id;
15003 found = true;
15004 break;
15005 }
15006 }
15007
15008 if (!found) {
15009 ALOGE("%s: requested stream was not configured.", __FUNCTION__);
15010 abortPendingHdrplusRequest(hdrPlusRequest);
15011 return false;
15012 }
15013 auto outBuffer = std::make_shared<mm_camera_buf_def_t>();
15014 switch (request.output_buffers[i].stream->format) {
15015 case HAL_PIXEL_FORMAT_BLOB:
15016 {
15017 // For jpeg output, get a YUV buffer from pic channel.
15018 QCamera3PicChannel *picChannel =
15019 (QCamera3PicChannel*)request.output_buffers[i].stream->priv;
15020 res = picChannel->getYuvBufferForRequest(outBuffer.get(), request.frame_number);
15021 if (res != OK) {
15022 ALOGE("%s: Getting an available YUV buffer from pic channel failed: %s (%d)",
15023 __FUNCTION__, strerror(-res), res);
15024 abortPendingHdrplusRequest(hdrPlusRequest);
15025 return false;
15026 }
15027 break;
15028 }
15029 case HAL_PIXEL_FORMAT_YCbCr_420_888:
15030 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
15031 {
15032 // For YUV output, register the buffer and get the buffer def from the channel.
15033 QCamera3ProcessingChannel *channel =
15034 (QCamera3ProcessingChannel*)request.output_buffers[i].stream->priv;
15035 res = channel->registerBufferAndGetBufDef(request.output_buffers[i].buffer,
15036 outBuffer.get());
15037 if (res != OK) {
15038 ALOGE("%s: Getting the buffer def failed: %s (%d)", __FUNCTION__,
15039 strerror(-res), res);
15040 abortPendingHdrplusRequest(hdrPlusRequest);
15041 return false;
15042 }
15043 break;
15044 }
15045 default:
15046 abortPendingHdrplusRequest(hdrPlusRequest);
15047 return false;
15048 }
15049
15050 pbcamera::StreamBuffer buffer;
15051 buffer.streamId = pbStreamId;
15052 buffer.dmaBufFd = outBuffer->fd;
15053 buffer.data = outBuffer->fd == -1 ? outBuffer->buffer : nullptr;
15054 buffer.dataSize = outBuffer->frame_len;
15055
15056 pbRequest.outputBuffers.push_back(buffer);
15057
15058 hdrPlusRequest->outputBuffers.emplace(pbStreamId, outBuffer);
15059 hdrPlusRequest->frameworkOutputBuffers.emplace(pbStreamId, request.output_buffers[i]);
15060 }
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015061
15062 // Submit an HDR+ capture request to HDR+ service.
Chien-Yu Chen17cec362017-07-05 17:10:31 -070015063 res = gHdrPlusClient->submitCaptureRequest(&pbRequest, metadata);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015064 if (res != OK) {
15065 ALOGE("%s: %d: Submitting a capture request failed: %s (%d)", __FUNCTION__, __LINE__,
15066 strerror(-res), res);
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015067 abortPendingHdrplusRequest(hdrPlusRequest);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015068 return false;
15069 }
15070
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015071 return true;
15072}
15073
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015074status_t QCamera3HardwareInterface::openHdrPlusClientAsyncLocked()
15075{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015076 if (gHdrPlusClientOpening || gHdrPlusClient != nullptr) {
15077 return OK;
15078 }
15079
Chien-Yu Chend77a5462017-06-02 18:00:38 -070015080 status_t res = gEaselManagerClient->openHdrPlusClientAsync(this);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015081 if (res != OK) {
15082 ALOGE("%s: Opening HDR+ client asynchronously failed: %s (%d)", __FUNCTION__,
15083 strerror(-res), res);
15084 return res;
15085 }
15086 gHdrPlusClientOpening = true;
15087
15088 return OK;
15089}
15090
Chien-Yu Chenee335912017-02-09 17:53:20 -080015091status_t QCamera3HardwareInterface::enableHdrPlusModeLocked()
15092{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070015093 status_t res;
Chien-Yu Chenee335912017-02-09 17:53:20 -080015094
Chien-Yu Chena6c99062017-05-23 13:45:06 -070015095 if (mHdrPlusModeEnabled) {
15096 return OK;
15097 }
15098
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015099 // Check if gHdrPlusClient is opened or being opened.
15100 if (gHdrPlusClient == nullptr) {
15101 if (gHdrPlusClientOpening) {
15102 // HDR+ client is being opened. HDR+ mode will be enabled when it's opened.
15103 return OK;
15104 }
15105
15106 res = openHdrPlusClientAsyncLocked();
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070015107 if (res != OK) {
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015108 ALOGE("%s: Failed to open HDR+ client asynchronously: %s (%d).", __FUNCTION__,
15109 strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070015110 return res;
15111 }
15112
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015113 // When opening HDR+ client completes, HDR+ mode will be enabled.
15114 return OK;
15115
Chien-Yu Chenee335912017-02-09 17:53:20 -080015116 }
15117
15118 // Configure stream for HDR+.
15119 res = configureHdrPlusStreamsLocked();
15120 if (res != OK) {
15121 LOGE("%s: Failed to configure HDR+ streams: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070015122 return res;
15123 }
15124
15125 // Enable HDR+ mode so Easel will start capturing ZSL raw buffers.
15126 res = gHdrPlusClient->setZslHdrPlusMode(true);
15127 if (res != OK) {
15128 LOGE("%s: Failed to enable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
Chien-Yu Chenee335912017-02-09 17:53:20 -080015129 return res;
15130 }
15131
15132 mHdrPlusModeEnabled = true;
15133 ALOGD("%s: HDR+ mode enabled", __FUNCTION__);
15134
15135 return OK;
15136}
15137
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070015138void QCamera3HardwareInterface::finishHdrPlusClientOpeningLocked(std::unique_lock<std::mutex> &lock)
15139{
15140 if (gHdrPlusClientOpening) {
15141 gHdrPlusClientOpenCond.wait(lock, [&] { return !gHdrPlusClientOpening; });
15142 }
15143}
15144
Chien-Yu Chenee335912017-02-09 17:53:20 -080015145void QCamera3HardwareInterface::disableHdrPlusModeLocked()
15146{
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070015147 // Disable HDR+ mode.
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080015148 if (gHdrPlusClient != nullptr && mHdrPlusModeEnabled) {
Chien-Yu Chena7fe4ed2017-03-13 16:36:57 -070015149 status_t res = gHdrPlusClient->setZslHdrPlusMode(false);
15150 if (res != OK) {
15151 ALOGE("%s: Failed to disable HDR+ mode: %s (%d)", __FUNCTION__, strerror(-res), res);
15152 }
Chien-Yu Chen3b17c672017-04-24 12:49:52 -070015153
15154 // Close HDR+ client so Easel can enter low power mode.
Chien-Yu Chend77a5462017-06-02 18:00:38 -070015155 gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
Chien-Yu Chen3b17c672017-04-24 12:49:52 -070015156 gHdrPlusClient = nullptr;
Chien-Yu Chenee335912017-02-09 17:53:20 -080015157 }
15158
15159 mHdrPlusModeEnabled = false;
15160 ALOGD("%s: HDR+ mode disabled", __FUNCTION__);
15161}
15162
Chien-Yu Chendeaebad2017-06-30 11:46:34 -070015163bool QCamera3HardwareInterface::isSessionHdrPlusModeCompatible()
15164{
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015165 // Check that at least one YUV or one JPEG output is configured.
15166 // TODO: Support RAW (b/36690506)
15167 for (auto streamInfo : mStreamInfo) {
15168 if (streamInfo != nullptr && streamInfo->stream != nullptr) {
15169 if (streamInfo->stream->stream_type == CAMERA3_STREAM_OUTPUT &&
15170 (streamInfo->stream->format == HAL_PIXEL_FORMAT_BLOB ||
15171 streamInfo->stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888 ||
15172 streamInfo->stream->format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED)) {
15173 return true;
15174 }
15175 }
Chien-Yu Chendeaebad2017-06-30 11:46:34 -070015176 }
15177
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015178 return false;
Chien-Yu Chendeaebad2017-06-30 11:46:34 -070015179}
15180
Chien-Yu Chenee335912017-02-09 17:53:20 -080015181status_t QCamera3HardwareInterface::configureHdrPlusStreamsLocked()
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015182{
15183 pbcamera::InputConfiguration inputConfig;
15184 std::vector<pbcamera::StreamConfiguration> outputStreamConfigs;
15185 status_t res = OK;
15186
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015187 // Sensor MIPI will send data to Easel.
15188 inputConfig.isSensorInput = true;
15189 inputConfig.sensorMode.cameraId = mCameraId;
15190 inputConfig.sensorMode.pixelArrayWidth = mSensorModeInfo.pixel_array_size.width;
15191 inputConfig.sensorMode.pixelArrayHeight = mSensorModeInfo.pixel_array_size.height;
15192 inputConfig.sensorMode.activeArrayWidth = mSensorModeInfo.active_array_size.width;
15193 inputConfig.sensorMode.activeArrayHeight = mSensorModeInfo.active_array_size.height;
15194 inputConfig.sensorMode.outputPixelClkHz = mSensorModeInfo.op_pixel_clk;
15195 inputConfig.sensorMode.timestampOffsetNs = mSensorModeInfo.timestamp_offset;
Chien-Yu Chenc8b6ad02017-09-15 13:50:26 -070015196 inputConfig.sensorMode.timestampCropOffsetNs = mSensorModeInfo.timestamp_crop_offset;
15197
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015198 if (mSensorModeInfo.num_raw_bits != 10) {
15199 ALOGE("%s: Only RAW10 is supported but this sensor mode has %d raw bits.", __FUNCTION__,
15200 mSensorModeInfo.num_raw_bits);
15201 return BAD_VALUE;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015202 }
15203
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015204 inputConfig.sensorMode.format = HAL_PIXEL_FORMAT_RAW10;
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015205
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015206 // Iterate through configured output streams in HAL and configure those streams in HDR+
15207 // service.
15208 for (auto streamInfo : mStreamInfo) {
15209 pbcamera::StreamConfiguration outputConfig;
15210 if (streamInfo->stream->stream_type == CAMERA3_STREAM_OUTPUT) {
15211 switch (streamInfo->stream->format) {
15212 case HAL_PIXEL_FORMAT_BLOB:
15213 case HAL_PIXEL_FORMAT_YCbCr_420_888:
15214 case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED:
15215 res = fillPbStreamConfig(&outputConfig, streamInfo->id,
15216 streamInfo->channel, /*stream index*/0);
15217 if (res != OK) {
15218 LOGE("%s: Failed to get fill stream config for YUV stream: %s (%d)",
15219 __FUNCTION__, strerror(-res), res);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015220
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015221 return res;
15222 }
15223
15224 outputStreamConfigs.push_back(outputConfig);
15225 break;
15226 default:
15227 // TODO: handle RAW16 outputs if mRawChannel was created. (b/36690506)
15228 break;
15229 }
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015230 }
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015231 }
15232
Chien-Yu Chen27ec9622017-02-23 13:39:41 -080015233 res = gHdrPlusClient->configureStreams(inputConfig, outputStreamConfigs);
Chien-Yu Chen200b2ce2017-02-01 15:03:02 -080015234 if (res != OK) {
15235 LOGE("%d: Failed to configure streams with HDR+ client: %s (%d)", __FUNCTION__,
15236 strerror(-res), res);
15237 return res;
15238 }
15239
15240 return OK;
15241}
15242
Chien-Yu Chene80574b2017-09-08 19:05:20 -070015243void QCamera3HardwareInterface::handleEaselFatalError()
Chien-Yu Chen90f1fc12017-07-14 14:31:53 -070015244{
Chien-Yu Chen90f1fc12017-07-14 14:31:53 -070015245 pthread_mutex_lock(&mMutex);
15246 mState = ERROR;
15247 pthread_mutex_unlock(&mMutex);
15248
Chien-Yu Chen09cb28e2017-07-21 13:15:52 -070015249 handleCameraDeviceError(/*stopChannelImmediately*/true);
Chien-Yu Chen90f1fc12017-07-14 14:31:53 -070015250}
15251
Chien-Yu Chene80574b2017-09-08 19:05:20 -070015252void QCamera3HardwareInterface::handleEaselFatalErrorAsync()
15253{
15254 if (mEaselErrorFuture.valid()) {
15255 // The error future has been invoked.
15256 return;
15257 }
15258
15259 // Launch a future to handle the fatal error.
15260 mEaselErrorFuture = std::async(std::launch::async,
15261 &QCamera3HardwareInterface::handleEaselFatalError, this);
15262}
15263
15264void QCamera3HardwareInterface::onEaselFatalError(std::string errMsg)
15265{
15266 ALOGE("%s: Got an Easel fatal error: %s", __FUNCTION__, errMsg.c_str());
15267 handleEaselFatalErrorAsync();
15268}
15269
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015270void QCamera3HardwareInterface::onOpened(std::unique_ptr<HdrPlusClient> client)
15271{
Arnd Geis8cbfc182017-09-07 14:46:41 -070015272 int rc = NO_ERROR;
15273
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015274 if (client == nullptr) {
15275 ALOGE("%s: Opened client is null.", __FUNCTION__);
15276 return;
15277 }
15278
Chien-Yu Chene96475e2017-04-11 11:53:26 -070015279 logEaselEvent("EASEL_STARTUP_LATENCY", "HDR+ client opened.");
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015280 ALOGI("%s: HDR+ client opened.", __FUNCTION__);
15281
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070015282 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015283 if (!gHdrPlusClientOpening) {
15284 ALOGW("%s: HDR+ is disabled while HDR+ client is being opened.", __FUNCTION__);
15285 return;
15286 }
15287
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015288 gHdrPlusClient = std::move(client);
15289 gHdrPlusClientOpening = false;
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070015290 gHdrPlusClientOpenCond.notify_one();
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015291
15292 // Set static metadata.
15293 status_t res = gHdrPlusClient->setStaticMetadata(*gStaticMetadata[mCameraId]);
15294 if (res != OK) {
15295 LOGE("%s: Failed to set static metadata in HDR+ client: %s (%d). Closing HDR+ client.",
15296 __FUNCTION__, strerror(-res), res);
Chien-Yu Chend77a5462017-06-02 18:00:38 -070015297 gEaselManagerClient->closeHdrPlusClient(std::move(gHdrPlusClient));
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015298 gHdrPlusClient = nullptr;
15299 return;
15300 }
15301
15302 // Enable HDR+ mode.
15303 res = enableHdrPlusModeLocked();
15304 if (res != OK) {
15305 LOGE("%s: Failed to configure HDR+ streams.", __FUNCTION__);
15306 }
Arnd Geis8cbfc182017-09-07 14:46:41 -070015307
15308 // Get Easel firmware version
15309 if (EaselManagerClientOpened) {
15310 rc = gEaselManagerClient->getFwVersion(mEaselFwVersion);
15311 if (rc != OK) {
15312 ALOGD("%s: Failed to query Easel firmware version", __FUNCTION__);
15313 } else {
15314 mEaselFwUpdated = true;
15315 }
15316 }
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015317}
15318
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015319void QCamera3HardwareInterface::onOpenFailed(status_t err)
15320{
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015321 ALOGE("%s: Opening HDR+ client failed: %s (%d)", __FUNCTION__, strerror(-err), err);
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070015322 std::unique_lock<std::mutex> l(gHdrPlusClientLock);
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015323 gHdrPlusClientOpening = false;
Chien-Yu Chen77ccd022017-06-23 12:00:36 -070015324 gHdrPlusClientOpenCond.notify_one();
Chien-Yu Chen54c19cb2017-03-31 17:53:18 -070015325}
15326
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015327void QCamera3HardwareInterface::onFatalError()
15328{
Chien-Yu Chene80574b2017-09-08 19:05:20 -070015329 ALOGE("%s: HDR+ client encountered a fatal error.", __FUNCTION__);
15330 handleEaselFatalErrorAsync();
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015331}
15332
Chien-Yu Chen29fd1d72017-04-27 18:42:09 -070015333void QCamera3HardwareInterface::onShutter(uint32_t requestId, int64_t apSensorTimestampNs)
15334{
15335 ALOGV("%s: %d: Received a shutter for HDR+ request %d timestamp %" PRId64, __FUNCTION__,
15336 __LINE__, requestId, apSensorTimestampNs);
15337
15338 mShutterDispatcher.markShutterReady(requestId, apSensorTimestampNs);
15339}
15340
Chien-Yu Chendaf68892017-08-25 12:56:40 -070015341void QCamera3HardwareInterface::onNextCaptureReady(uint32_t requestId)
15342{
15343 pthread_mutex_lock(&mMutex);
15344
15345 // Find the pending request for this result metadata.
15346 auto requestIter = mPendingRequestsList.begin();
15347 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != requestId) {
15348 requestIter++;
15349 }
15350
15351 if (requestIter == mPendingRequestsList.end()) {
15352 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, requestId);
15353 pthread_mutex_unlock(&mMutex);
15354 return;
15355 }
15356
15357 requestIter->partial_result_cnt++;
15358
15359 CameraMetadata metadata;
15360 uint8_t ready = true;
15361 metadata.update(NEXUS_EXPERIMENTAL_2017_NEXT_STILL_INTENT_REQUEST_READY, &ready, 1);
15362
15363 // Send it to framework.
15364 camera3_capture_result_t result = {};
15365
15366 result.result = metadata.getAndLock();
15367 // Populate metadata result
15368 result.frame_number = requestId;
15369 result.num_output_buffers = 0;
15370 result.output_buffers = NULL;
15371 result.partial_result = requestIter->partial_result_cnt;
15372
15373 orchestrateResult(&result);
15374 metadata.unlock(result.result);
15375
15376 pthread_mutex_unlock(&mMutex);
15377}
15378
Chien-Yu Chen0a921f92017-08-27 17:25:33 -070015379void QCamera3HardwareInterface::onPostview(uint32_t requestId,
15380 std::unique_ptr<std::vector<uint8_t>> postview, uint32_t width, uint32_t height,
15381 uint32_t stride, int32_t format)
15382{
15383 if (property_get_bool("persist.camera.hdrplus.dump_postview", false)) {
15384 ALOGI("%s: %d: Received a postview %dx%d for HDR+ request %d", __FUNCTION__,
15385 __LINE__, width, height, requestId);
15386 char buf[FILENAME_MAX] = {};
15387 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"postview_%d_%dx%d.ppm",
15388 requestId, width, height);
15389
15390 pbcamera::StreamConfiguration config = {};
15391 config.image.width = width;
15392 config.image.height = height;
15393 config.image.format = format;
15394
15395 pbcamera::PlaneConfiguration plane = {};
15396 plane.stride = stride;
15397 plane.scanline = height;
15398
15399 config.image.planes.push_back(plane);
15400
15401 pbcamera::StreamBuffer buffer = {};
15402 buffer.streamId = 0;
15403 buffer.dmaBufFd = -1;
15404 buffer.data = postview->data();
15405 buffer.dataSize = postview->size();
15406
15407 hdrplus_client_utils::writePpm(buf, config, buffer);
15408 }
15409
15410 pthread_mutex_lock(&mMutex);
15411
15412 // Find the pending request for this result metadata.
15413 auto requestIter = mPendingRequestsList.begin();
15414 while (requestIter != mPendingRequestsList.end() && requestIter->frame_number != requestId) {
15415 requestIter++;
15416 }
15417
15418 if (requestIter == mPendingRequestsList.end()) {
15419 ALOGE("%s: Cannot find a pending request for frame number %u.", __FUNCTION__, requestId);
15420 pthread_mutex_unlock(&mMutex);
15421 return;
15422 }
15423
15424 requestIter->partial_result_cnt++;
15425
15426 CameraMetadata metadata;
15427 int32_t config[3] = {static_cast<int32_t>(width), static_cast<int32_t>(height),
15428 static_cast<int32_t>(stride)};
15429 metadata.update(NEXUS_EXPERIMENTAL_2017_POSTVIEW_CONFIG, config, 3);
15430 metadata.update(NEXUS_EXPERIMENTAL_2017_POSTVIEW_DATA, postview->data(), postview->size());
15431
15432 // Send it to framework.
15433 camera3_capture_result_t result = {};
15434
15435 result.result = metadata.getAndLock();
15436 // Populate metadata result
15437 result.frame_number = requestId;
15438 result.num_output_buffers = 0;
15439 result.output_buffers = NULL;
15440 result.partial_result = requestIter->partial_result_cnt;
15441
15442 orchestrateResult(&result);
15443 metadata.unlock(result.result);
15444
15445 pthread_mutex_unlock(&mMutex);
15446}
15447
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015448void QCamera3HardwareInterface::onCaptureResult(pbcamera::CaptureResult *result,
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015449 const camera_metadata_t &resultMetadata)
15450{
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015451 if (result == nullptr) {
15452 ALOGE("%s: result is nullptr.", __FUNCTION__);
15453 return;
15454 }
15455
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015456 // Find the pending HDR+ request.
15457 HdrPlusPendingRequest pendingRequest;
15458 {
15459 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
15460 auto req = mHdrPlusPendingRequests.find(result->requestId);
15461 pendingRequest = req->second;
15462 }
15463
15464 // Update the result metadata with the settings of the HDR+ still capture request because
15465 // the result metadata belongs to a ZSL buffer.
15466 CameraMetadata metadata;
15467 metadata = &resultMetadata;
15468 updateHdrPlusResultMetadata(metadata, pendingRequest.settings);
15469 camera_metadata_t* updatedResultMetadata = metadata.release();
15470
15471 uint32_t halSnapshotStreamId = 0;
15472 if (mPictureChannel != nullptr) {
15473 halSnapshotStreamId = mPictureChannel->getStreamID(mPictureChannel->getStreamTypeMask());
15474 }
15475
15476 auto halMetadata = std::make_shared<metadata_buffer_t>();
15477 clear_metadata_buffer(halMetadata.get());
15478
15479 // Convert updated result metadata to HAL metadata.
15480 status_t res = translateFwkMetadataToHalMetadata(updatedResultMetadata, halMetadata.get(),
15481 halSnapshotStreamId, /*minFrameDuration*/0);
15482 if (res != 0) {
15483 ALOGE("%s: Translating metadata failed: %s (%d)", __FUNCTION__, strerror(-res), res);
15484 }
15485
15486 for (auto &outputBuffer : result->outputBuffers) {
15487 uint32_t streamId = outputBuffer.streamId;
15488
15489 // Find the framework output buffer in the pending request.
15490 auto frameworkOutputBufferIter = pendingRequest.frameworkOutputBuffers.find(streamId);
15491 if (frameworkOutputBufferIter == pendingRequest.frameworkOutputBuffers.end()) {
15492 ALOGE("%s: Couldn't find framework output buffers for stream id %u", __FUNCTION__,
15493 streamId);
15494 continue;
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015495 }
15496
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015497 camera3_stream_buffer_t *frameworkOutputBuffer = &frameworkOutputBufferIter->second;
15498
15499 // Find the channel for the output buffer.
15500 QCamera3ProcessingChannel *channel =
15501 (QCamera3ProcessingChannel*)frameworkOutputBuffer->stream->priv;
15502
15503 // Find the output buffer def.
15504 auto outputBufferIter = pendingRequest.outputBuffers.find(streamId);
15505 if (outputBufferIter == pendingRequest.outputBuffers.end()) {
15506 ALOGE("%s: Cannot find output buffer", __FUNCTION__);
15507 continue;
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015508 }
15509
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015510 std::shared_ptr<mm_camera_buf_def_t> outputBufferDef = outputBufferIter->second;
Chien-Yu Chendaf68892017-08-25 12:56:40 -070015511
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015512 // Check whether to dump the buffer.
15513 if (frameworkOutputBuffer->stream->format == HAL_PIXEL_FORMAT_YCbCr_420_888 ||
15514 frameworkOutputBuffer->stream->format == HAL_PIXEL_FORMAT_BLOB) {
15515 // If the stream format is YUV or jpeg, check if dumping HDR+ YUV output is enabled.
15516 char prop[PROPERTY_VALUE_MAX];
15517 property_get("persist.camera.hdrplus.dump_yuv", prop, "0");
15518 bool dumpYuvOutput = atoi(prop);
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015519
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015520 if (dumpYuvOutput) {
15521 // Dump yuv buffer to a ppm file.
15522 pbcamera::StreamConfiguration outputConfig;
15523 status_t rc = fillPbStreamConfig(&outputConfig, streamId,
15524 channel, /*stream index*/0);
15525 if (rc == OK) {
15526 char buf[FILENAME_MAX] = {};
15527 snprintf(buf, sizeof(buf), QCAMERA_DUMP_FRM_LOCATION"s_%d_%d_%dx%d.ppm",
15528 result->requestId, streamId,
15529 outputConfig.image.width, outputConfig.image.height);
Chien-Yu Chen92724a82017-01-06 11:50:30 -080015530
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015531 hdrplus_client_utils::writePpm(buf, outputConfig, outputBuffer);
15532 } else {
15533 LOGW("%s: Couldn't dump YUV buffer because getting stream config failed: "
15534 "%s (%d).", __FUNCTION__, strerror(-rc), rc);
15535 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015536 }
15537 }
15538
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015539 if (channel == mPictureChannel) {
Chien-Yu Chen92724a82017-01-06 11:50:30 -080015540 // Return the buffer to pic channel for encoding.
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015541 mPictureChannel->returnYuvBufferAndEncode(outputBufferDef.get(),
15542 frameworkOutputBuffer->buffer, result->requestId,
Chien-Yu Chen92724a82017-01-06 11:50:30 -080015543 halMetadata);
15544 } else {
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015545 // Return the buffer to camera framework.
Chien-Yu Chen92724a82017-01-06 11:50:30 -080015546 pthread_mutex_lock(&mMutex);
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015547 handleBufferWithLock(frameworkOutputBuffer, result->requestId);
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015548 channel->unregisterBuffer(outputBufferDef.get());
Chien-Yu Chen0c8eaaa2017-09-19 14:13:14 -070015549 pthread_mutex_unlock(&mMutex);
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015550 }
15551 }
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015552
15553 // Send HDR+ metadata to framework.
15554 {
15555 pthread_mutex_lock(&mMutex);
15556
15557 // updatedResultMetadata will be freed in handlePendingResultMetadataWithLock.
15558 handlePendingResultMetadataWithLock(result->requestId, updatedResultMetadata);
15559 pthread_mutex_unlock(&mMutex);
15560 }
15561
15562 // Remove the HDR+ pending request.
15563 {
15564 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
15565 auto req = mHdrPlusPendingRequests.find(result->requestId);
15566 mHdrPlusPendingRequests.erase(req);
15567 }
Chien-Yu Chen8e599492016-11-01 13:37:46 -070015568}
15569
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015570void QCamera3HardwareInterface::onFailedCaptureResult(pbcamera::CaptureResult *failedResult)
15571{
15572 if (failedResult == nullptr) {
15573 ALOGE("%s: Got an empty failed result.", __FUNCTION__);
15574 return;
15575 }
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015576
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015577 ALOGE("%s: Got a failed HDR+ result for request %d", __FUNCTION__, failedResult->requestId);
Chien-Yu Chene687bd02016-12-07 18:30:26 -080015578
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015579 // Find the pending HDR+ request.
15580 HdrPlusPendingRequest pendingRequest;
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015581 {
15582 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015583 auto req = mHdrPlusPendingRequests.find(failedResult->requestId);
15584 if (req == mHdrPlusPendingRequests.end()) {
15585 ALOGE("%s: Couldn't find pending request %d", __FUNCTION__, failedResult->requestId);
15586 return;
15587 }
15588 pendingRequest = req->second;
15589 }
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015590
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015591 for (auto &outputBuffer : failedResult->outputBuffers) {
15592 uint32_t streamId = outputBuffer.streamId;
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015593
Chien-Yu Chen14d3e392017-07-10 18:27:05 -070015594 // Find the channel
15595 // Find the framework output buffer in the pending request.
15596 auto frameworkOutputBufferIter = pendingRequest.frameworkOutputBuffers.find(streamId);
15597 if (frameworkOutputBufferIter == pendingRequest.frameworkOutputBuffers.end()) {
15598 ALOGE("%s: Couldn't find framework output buffers for stream id %u", __FUNCTION__,
15599 streamId);
15600 continue;
15601 }
15602
15603 camera3_stream_buffer_t *frameworkOutputBuffer = &frameworkOutputBufferIter->second;
15604
15605 // Find the channel for the output buffer.
15606 QCamera3ProcessingChannel *channel =
15607 (QCamera3ProcessingChannel*)frameworkOutputBuffer->stream->priv;
15608
15609 // Find the output buffer def.
15610 auto outputBufferIter = pendingRequest.outputBuffers.find(streamId);
15611 if (outputBufferIter == pendingRequest.outputBuffers.end()) {
15612 ALOGE("%s: Cannot find output buffer", __FUNCTION__);
15613 continue;
15614 }
15615
15616 std::shared_ptr<mm_camera_buf_def_t> outputBufferDef = outputBufferIter->second;
15617
15618 if (channel == mPictureChannel) {
15619 // Return the buffer to pic channel.
15620 mPictureChannel->returnYuvBuffer(outputBufferDef.get());
15621 } else {
15622 channel->unregisterBuffer(outputBufferDef.get());
15623 }
15624 }
15625
15626 // Remove the HDR+ pending request.
15627 {
15628 Mutex::Autolock lock(mHdrPlusPendingRequestsLock);
15629 auto req = mHdrPlusPendingRequests.find(failedResult->requestId);
15630 mHdrPlusPendingRequests.erase(req);
Chien-Yu Chen411e9c52017-05-08 11:58:50 -070015631 }
15632
15633 pthread_mutex_lock(&mMutex);
15634
15635 // Find the pending buffers.
15636 auto pendingBuffers = mPendingBuffersMap.mPendingBuffersInRequest.begin();
15637 while (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
15638 if (pendingBuffers->frame_number == failedResult->requestId) {
15639 break;
15640 }
15641 pendingBuffers++;
15642 }
15643
15644 // Send out buffer errors for the pending buffers.
15645 if (pendingBuffers != mPendingBuffersMap.mPendingBuffersInRequest.end()) {
15646 std::vector<camera3_stream_buffer_t> streamBuffers;
15647 for (auto &buffer : pendingBuffers->mPendingBufferList) {
15648 // Prepare a stream buffer.
15649 camera3_stream_buffer_t streamBuffer = {};
15650 streamBuffer.stream = buffer.stream;
15651 streamBuffer.buffer = buffer.buffer;
15652 streamBuffer.status = CAMERA3_BUFFER_STATUS_ERROR;
15653 streamBuffer.acquire_fence = -1;
15654 streamBuffer.release_fence = -1;
15655
15656 streamBuffers.push_back(streamBuffer);
15657
15658 // Send out error buffer event.
15659 camera3_notify_msg_t notify_msg = {};
15660 notify_msg.type = CAMERA3_MSG_ERROR;
15661 notify_msg.message.error.frame_number = pendingBuffers->frame_number;
15662 notify_msg.message.error.error_code = CAMERA3_MSG_ERROR_BUFFER;
15663 notify_msg.message.error.error_stream = buffer.stream;
15664
15665 orchestrateNotify(&notify_msg);
15666 }
15667
15668 camera3_capture_result_t result = {};
15669 result.frame_number = pendingBuffers->frame_number;
15670 result.num_output_buffers = streamBuffers.size();
15671 result.output_buffers = &streamBuffers[0];
15672
15673 // Send out result with buffer errors.
15674 orchestrateResult(&result);
15675
15676 // Remove pending buffers.
15677 mPendingBuffersMap.mPendingBuffersInRequest.erase(pendingBuffers);
15678 }
15679
15680 // Remove pending request.
15681 auto halRequest = mPendingRequestsList.begin();
15682 while (halRequest != mPendingRequestsList.end()) {
15683 if (halRequest->frame_number == failedResult->requestId) {
15684 mPendingRequestsList.erase(halRequest);
15685 break;
15686 }
15687 halRequest++;
15688 }
15689
15690 pthread_mutex_unlock(&mMutex);
Chien-Yu Chen8e599492016-11-01 13:37:46 -070015691}
15692
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015693
15694ShutterDispatcher::ShutterDispatcher(QCamera3HardwareInterface *parent) :
15695 mParent(parent) {}
15696
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015697void ShutterDispatcher::expectShutter(uint32_t frameNumber, bool isReprocess)
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015698{
15699 std::lock_guard<std::mutex> lock(mLock);
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015700
15701 if (isReprocess) {
15702 mReprocessShutters.emplace(frameNumber, Shutter());
15703 } else {
15704 mShutters.emplace(frameNumber, Shutter());
15705 }
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015706}
15707
15708void ShutterDispatcher::markShutterReady(uint32_t frameNumber, uint64_t timestamp)
15709{
15710 std::lock_guard<std::mutex> lock(mLock);
15711
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015712 std::map<uint32_t, Shutter> *shutters = nullptr;
15713
15714 // Find the shutter entry.
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015715 auto shutter = mShutters.find(frameNumber);
15716 if (shutter == mShutters.end()) {
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015717 shutter = mReprocessShutters.find(frameNumber);
15718 if (shutter == mReprocessShutters.end()) {
15719 // Shutter was already sent.
15720 return;
15721 }
15722 shutters = &mReprocessShutters;
15723 } else {
15724 shutters = &mShutters;
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015725 }
15726
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015727 // Make this frame's shutter ready.
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015728 shutter->second.ready = true;
15729 shutter->second.timestamp = timestamp;
15730
15731 // Iterate throught the shutters and send out shuters until the one that's not ready yet.
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015732 shutter = shutters->begin();
15733 while (shutter != shutters->end()) {
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015734 if (!shutter->second.ready) {
15735 // If this shutter is not ready, the following shutters can't be sent.
15736 break;
15737 }
15738
15739 camera3_notify_msg_t msg = {};
15740 msg.type = CAMERA3_MSG_SHUTTER;
15741 msg.message.shutter.frame_number = shutter->first;
15742 msg.message.shutter.timestamp = shutter->second.timestamp;
15743 mParent->orchestrateNotify(&msg);
15744
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015745 shutter = shutters->erase(shutter);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015746 }
15747}
15748
15749void ShutterDispatcher::clear(uint32_t frameNumber)
15750{
15751 std::lock_guard<std::mutex> lock(mLock);
15752 mShutters.erase(frameNumber);
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015753 mReprocessShutters.erase(frameNumber);
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015754}
15755
15756void ShutterDispatcher::clear()
15757{
15758 std::lock_guard<std::mutex> lock(mLock);
15759
15760 // Log errors for stale shutters.
15761 for (auto &shutter : mShutters) {
15762 ALOGE("%s: stale shutter: frame number %u, ready %d, timestamp %" PRId64,
15763 __FUNCTION__, shutter.first, shutter.second.ready,
15764 shutter.second.timestamp);
15765 }
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015766
15767 // Log errors for stale reprocess shutters.
15768 for (auto &shutter : mReprocessShutters) {
15769 ALOGE("%s: stale reprocess shutter: frame number %u, ready %d, timestamp %" PRId64,
15770 __FUNCTION__, shutter.first, shutter.second.ready,
15771 shutter.second.timestamp);
15772 }
15773
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015774 mShutters.clear();
Chien-Yu Chena7f98612017-06-20 16:54:10 -070015775 mReprocessShutters.clear();
Chien-Yu Chen3f303522017-05-19 15:21:45 -070015776}
15777
15778OutputBufferDispatcher::OutputBufferDispatcher(QCamera3HardwareInterface *parent) :
15779 mParent(parent) {}
15780
15781status_t OutputBufferDispatcher::configureStreams(camera3_stream_configuration_t *streamList)
15782{
15783 std::lock_guard<std::mutex> lock(mLock);
15784 mStreamBuffers.clear();
15785 if (!streamList) {
15786 ALOGE("%s: streamList is nullptr.", __FUNCTION__);
15787 return -EINVAL;
15788 }
15789
15790 // Create a "frame-number -> buffer" map for each stream.
15791 for (uint32_t i = 0; i < streamList->num_streams; i++) {
15792 mStreamBuffers.emplace(streamList->streams[i], std::map<uint32_t, Buffer>());
15793 }
15794
15795 return OK;
15796}
15797
15798status_t OutputBufferDispatcher::expectBuffer(uint32_t frameNumber, camera3_stream_t *stream)
15799{
15800 std::lock_guard<std::mutex> lock(mLock);
15801
15802 // Find the "frame-number -> buffer" map for the stream.
15803 auto buffers = mStreamBuffers.find(stream);
15804 if (buffers == mStreamBuffers.end()) {
15805 ALOGE("%s: Stream %p was not configured.", __FUNCTION__, stream);
15806 return -EINVAL;
15807 }
15808
15809 // Create an unready buffer for this frame number.
15810 buffers->second.emplace(frameNumber, Buffer());
15811 return OK;
15812}
15813
15814void OutputBufferDispatcher::markBufferReady(uint32_t frameNumber,
15815 const camera3_stream_buffer_t &buffer)
15816{
15817 std::lock_guard<std::mutex> lock(mLock);
15818
15819 // Find the frame number -> buffer map for the stream.
15820 auto buffers = mStreamBuffers.find(buffer.stream);
15821 if (buffers == mStreamBuffers.end()) {
15822 ALOGE("%s: Cannot find pending buffers for stream %p.", __FUNCTION__, buffer.stream);
15823 return;
15824 }
15825
15826 // Find the unready buffer this frame number and mark it ready.
15827 auto pendingBuffer = buffers->second.find(frameNumber);
15828 if (pendingBuffer == buffers->second.end()) {
15829 ALOGE("%s: Cannot find the pending buffer for frame number %u.", __FUNCTION__, frameNumber);
15830 return;
15831 }
15832
15833 pendingBuffer->second.ready = true;
15834 pendingBuffer->second.buffer = buffer;
15835
15836 // Iterate through the buffers and send out buffers until the one that's not ready yet.
15837 pendingBuffer = buffers->second.begin();
15838 while (pendingBuffer != buffers->second.end()) {
15839 if (!pendingBuffer->second.ready) {
15840 // If this buffer is not ready, the following buffers can't be sent.
15841 break;
15842 }
15843
15844 camera3_capture_result_t result = {};
15845 result.frame_number = pendingBuffer->first;
15846 result.num_output_buffers = 1;
15847 result.output_buffers = &pendingBuffer->second.buffer;
15848
15849 // Send out result with buffer errors.
15850 mParent->orchestrateResult(&result);
15851
15852 pendingBuffer = buffers->second.erase(pendingBuffer);
15853 }
15854}
15855
15856void OutputBufferDispatcher::clear(bool clearConfiguredStreams)
15857{
15858 std::lock_guard<std::mutex> lock(mLock);
15859
15860 // Log errors for stale buffers.
15861 for (auto &buffers : mStreamBuffers) {
15862 for (auto &buffer : buffers.second) {
15863 ALOGE("%s: stale buffer: stream %p, frame number %u, ready %d",
15864 __FUNCTION__, buffers.first, buffer.first, buffer.second.ready);
15865 }
15866 buffers.second.clear();
15867 }
15868
15869 if (clearConfiguredStreams) {
15870 mStreamBuffers.clear();
15871 }
15872}
15873
Thierry Strudel3d639192016-09-09 11:52:26 -070015874}; //end namespace qcamera