merge in jb-mr2-release history after reset to jb-mr2-dev
diff --git a/Android.mk b/Android.mk
index 282b1fa..2a8ef42 100644
--- a/Android.mk
+++ b/Android.mk
@@ -37,5 +37,4 @@
endif
-include $(call all-makefiles-under,$(LOCAL_PATH))
-
+include $(call first-makefiles-under,$(LOCAL_PATH))
diff --git a/BoardConfig.mk b/BoardConfig.mk
index fae7702..5066aba 100644
--- a/BoardConfig.mk
+++ b/BoardConfig.mk
@@ -94,4 +94,6 @@
BOARD_CHARGER_ENABLE_SUSPEND := true
+USE_DEVICE_SPECIFIC_CAMERA := true
+
-include vendor/lge/mako/BoardConfigVendor.mk
diff --git a/camera/Android.mk b/camera/Android.mk
new file mode 100755
index 0000000..3ce0d1a
--- /dev/null
+++ b/camera/Android.mk
@@ -0,0 +1,135 @@
+LOCAL_PATH:= $(call my-dir)
+
+ifeq ($(TARGET_BOARD_PLATFORM),msm8960)
+ifneq ($(USE_CAMERA_STUB),true)
+ifeq ($(USE_DEVICE_SPECIFIC_CAMERA),true)
+
+ # When zero we link against libmmcamera; when 1, we dlopen libmmcamera.
+ DLOPEN_LIBMMCAMERA:=1
+ ifneq ($(BUILD_TINY_ANDROID),true)
+ V4L2_BASED_LIBCAM := true
+
+ LOCAL_PATH1:= $(call my-dir)
+
+ include $(CLEAR_VARS)
+
+ LOCAL_CFLAGS:= -DDLOPEN_LIBMMCAMERA=$(DLOPEN_LIBMMCAMERA)
+
+ #define BUILD_UNIFIED_CODE
+ ifeq ($(TARGET_BOARD_PLATFORM),msm7627a)
+ BUILD_UNIFIED_CODE := true
+ else
+ BUILD_UNIFIED_CODE := false
+ endif
+
+ ifeq ($(TARGET_BOARD_PLATFORM),msm7627a)
+ LOCAL_CFLAGS+= -DVFE_7X27A
+ endif
+
+ ifeq ($(strip $(TARGET_USES_ION)),true)
+ LOCAL_CFLAGS += -DUSE_ION
+ endif
+
+ LOCAL_CFLAGS += -DCAMERA_ION_HEAP_ID=ION_CP_MM_HEAP_ID # 8660=SMI, Rest=EBI
+ LOCAL_CFLAGS += -DCAMERA_ZSL_ION_HEAP_ID=ION_CP_MM_HEAP_ID
+ ifeq ($(TARGET_BOARD_PLATFORM),msm8960)
+ LOCAL_CFLAGS += -DCAMERA_GRALLOC_HEAP_ID=GRALLOC_USAGE_PRIVATE_MM_HEAP
+ LOCAL_CFLAGS += -DCAMERA_GRALLOC_FALLBACK_HEAP_ID=GRALLOC_USAGE_PRIVATE_IOMMU_HEAP
+ LOCAL_CFLAGS += -DCAMERA_ION_FALLBACK_HEAP_ID=ION_IOMMU_HEAP_ID
+ LOCAL_CFLAGS += -DCAMERA_ZSL_ION_FALLBACK_HEAP_ID=ION_IOMMU_HEAP_ID
+ LOCAL_CFLAGS += -DCAMERA_GRALLOC_CACHING_ID=0
+ else ifeq ($(TARGET_BOARD_PLATFORM),msm8660)
+ LOCAL_CFLAGS += -DCAMERA_GRALLOC_HEAP_ID=GRALLOC_USAGE_PRIVATE_CAMERA_HEAP
+ LOCAL_CFLAGS += -DCAMERA_GRALLOC_FALLBACK_HEAP_ID=GRALLOC_USAGE_PRIVATE_CAMERA_HEAP # Don't Care
+ LOCAL_CFLAGS += -DCAMERA_ION_FALLBACK_HEAP_ID=ION_CAMERA_HEAP_ID # EBI
+ LOCAL_CFLAGS += -DCAMERA_ZSL_ION_FALLBACK_HEAP_ID=ION_CAMERA_HEAP_ID
+ LOCAL_CFLAGS += -DCAMERA_GRALLOC_CACHING_ID=0
+ else
+ LOCAL_CFLAGS += -DCAMERA_GRALLOC_HEAP_ID=GRALLOC_USAGE_PRIVATE_ADSP_HEAP
+ LOCAL_CFLAGS += -DCAMERA_GRALLOC_FALLBACK_HEAP_ID=GRALLOC_USAGE_PRIVATE_ADSP_HEAP # Don't Care
+ LOCAL_CFLAGS += -DCAMERA_GRALLOC_CACHING_ID=GRALLOC_USAGE_PRIVATE_UNCACHED #uncached
+ endif
+
+ ifeq ($(V4L2_BASED_LIBCAM),true)
+ ifeq ($(TARGET_BOARD_PLATFORM),msm7627a)
+ LOCAL_HAL_FILES := QCameraHAL.cpp QCameraHWI_Parm.cpp\
+ QCameraHWI.cpp QCameraHWI_Preview.cpp \
+ QCameraHWI_Record_7x27A.cpp QCameraHWI_Still.cpp \
+ QCameraHWI_Mem.cpp QCameraHWI_Display.cpp \
+ QCameraStream.cpp QualcommCamera2.cpp
+ else
+ LOCAL_HAL_FILES := QCameraHAL.cpp QCameraHWI_Parm.cpp\
+ QCameraHWI.cpp QCameraHWI_Preview.cpp \
+ QCameraHWI_Record.cpp QCameraHWI_Still.cpp \
+ QCameraHWI_Mem.cpp QCameraHWI_Display.cpp \
+ QCameraStream.cpp QualcommCamera2.cpp QCameraParameters.cpp
+ endif
+
+ else
+ LOCAL_HAL_FILES := QualcommCamera.cpp QualcommCameraHardware.cpp
+ endif
+
+ LOCAL_CFLAGS+= -DHW_ENCODE
+
+ # if debug service layer and up , use stub camera!
+ LOCAL_C_INCLUDES += \
+ frameworks/base/services/camera/libcameraservice #
+
+ LOCAL_SRC_FILES := $(MM_CAM_FILES) $(LOCAL_HAL_FILES)
+
+ ifeq ($(TARGET_BOARD_PLATFORM),msm7627a)
+ LOCAL_CFLAGS+= -DNUM_PREVIEW_BUFFERS=6 -D_ANDROID_
+ else
+ LOCAL_CFLAGS+= -DNUM_PREVIEW_BUFFERS=4 -D_ANDROID_
+ endif
+
+ # To Choose neon/C routines for YV12 conversion
+ LOCAL_CFLAGS+= -DUSE_NEON_CONVERSION
+ # Uncomment below line to enable smooth zoom
+ #LOCAL_CFLAGS+= -DCAMERA_SMOOTH_ZOOM
+
+ ifeq ($(V4L2_BASED_LIBCAM),true)
+ LOCAL_C_INCLUDES+= hardware/qcom/media/mm-core/inc
+ LOCAL_C_INCLUDES+= $(LOCAL_PATH)/mm-camera-interface
+ endif
+
+ LOCAL_C_INCLUDES+= hardware/qcom/display/libgralloc
+ LOCAL_C_INCLUDES+= hardware/qcom/display/libgenlock
+ LOCAL_C_INCLUDES+= hardware/qcom/media/libstagefrighthw
+
+
+ ifeq ($(V4L2_BASED_LIBCAM),true)
+ LOCAL_SHARED_LIBRARIES:= libutils libui libcamera_client liblog libcutils
+ LOCAL_SHARED_LIBRARIES += libmmcamera_interface2
+ else
+ LOCAL_SHARED_LIBRARIES:= libutils libui libcamera_client liblog libcutils libmmjpeg
+ endif
+
+ LOCAL_SHARED_LIBRARIES+= libgenlock libbinder
+ ifneq ($(DLOPEN_LIBMMCAMERA),1)
+ LOCAL_SHARED_LIBRARIES+= liboemcamera
+ else
+ LOCAL_SHARED_LIBRARIES+= libdl
+ endif
+
+ LOCAL_CFLAGS += -include bionic/libc/include/sys/socket.h
+
+ LOCAL_MODULE_PATH := $(TARGET_OUT_SHARED_LIBRARIES)/hw
+ LOCAL_MODULE:= camera.$(TARGET_DEVICE)
+ LOCAL_MODULE_TAGS := optional
+ include $(BUILD_SHARED_LIBRARY)
+
+ endif # BUILD_TINY_ANDROID
+
+ifeq ($(V4L2_BASED_LIBCAM),true)
+include $(LOCAL_PATH)/mm-camera-interface/Android.mk
+endif
+
+#Enable only to compile new interafece and HAL files.
+ifeq ($(V4L2_BASED_LIBCAM),true)
+#include $(LOCAL_PATH1)/QCamera/Android.mk
+endif
+
+endif # USE_CAMERA_STUB
+endif
+endif
diff --git a/camera/CleanSpec.mk b/camera/CleanSpec.mk
new file mode 100644
index 0000000..bb86ad0
--- /dev/null
+++ b/camera/CleanSpec.mk
@@ -0,0 +1,47 @@
+# Copyright (C) 2007 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# If you don't need to do a full clean build but would like to touch
+# a file or delete some intermediate files, add a clean step to the end
+# of the list. These steps will only be run once, if they haven't been
+# run before.
+#
+# E.g.:
+# $(call add-clean-step, touch -c external/sqlite/sqlite3.h)
+# $(call add-clean-step, rm -rf $(PRODUCT_OUT)/obj/STATIC_LIBRARIES/libz_intermediates)
+#
+# Always use "touch -c" and "rm -f" or "rm -rf" to gracefully deal with
+# files that are missing or have been moved.
+#
+# Use $(PRODUCT_OUT) to get to the "out/target/product/blah/" directory.
+# Use $(OUT_DIR) to refer to the "out" directory.
+#
+# If you need to re-do something that's already mentioned, just copy
+# the command and add it to the bottom of the list. E.g., if a change
+# that you made last week required touching a file and a change you
+# made today requires touching the same file, just copy the old
+# touch step and add it to the end of the list.
+#
+# ************************************************
+# NEWER CLEAN STEPS MUST BE AT THE END OF THE LIST
+# ************************************************
+
+# For example:
+#$(call add-clean-step, rm -rf $(OUT_DIR)/target/common/obj/APPS/AndroidTests_intermediates)
+#$(call add-clean-step, rm -rf $(OUT_DIR)/target/common/obj/JAVA_LIBRARIES/core_intermediates)
+#$(call add-clean-step, find $(OUT_DIR) -type f -name "IGTalkSession*" -print0 | xargs -0 rm -f)
+#$(call add-clean-step, rm -rf $(PRODUCT_OUT)/data/*)
+
+$(call add-clean-step, find $(OUT_DIR) -name "camera.msm8960*" -print0 | xargs -0 rm -rf)
diff --git a/camera/MODULE_LICENSE_BSD b/camera/MODULE_LICENSE_BSD
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/camera/MODULE_LICENSE_BSD
diff --git a/camera/Makefile.am b/camera/Makefile.am
new file mode 100644
index 0000000..751e362
--- /dev/null
+++ b/camera/Makefile.am
@@ -0,0 +1,75 @@
+ACLOCAL_AMFLAGS = -I m4
+
+#libcamera.so
+lib_LTLIBRARIES = libcamera.la
+
+libcamera_la_CFLAGS := $(DEBUG_CFLAGS)
+libcamera_la_CFLAGS += -DAMSS_VERSION=$(AMSS_VERSION)
+libcamera_la_CFLAGS += -DMSM_CAMERA_GCC
+libcamera_la_CFLAGS += -DLINUX_ENABLED
+libcamera_la_CFLAGS += -Dstrlcpy=g_strlcpy
+libcamera_la_CFLAGS += -fPIC
+
+LOCAL_CFLAGS:= -DDLOPEN_LIBMMCAMERA=1
+
+libcamera_la_CFLAGS += -DHW_ENCODE
+
+if MSM7X27A
+libcamera_la_CFLAGS+= -DNUM_PREVIEW_BUFFERS=6
+libcamera_la_CFLAGS+= -DVFE_7X27A
+else
+libcamera_la_CFLAGS+= -DNUM_PREVIEW_BUFFERS=4
+endif
+
+# To Choose neon/C routines for YV12 conversion
+libcamera_la_CFLAGS+= -DUSE_NEON_CONVERSION
+# Uncomment below line to enable smooth zoom
+#libcamera_la_CFLAGS+= -DCAMERA_SMOOTH_ZOOM
+
+libcamera_la_SOURCES := mm_camera.c
+libcamera_la_SOURCES += mm_camera_channel.c
+libcamera_la_SOURCES += mm_camera_helper.c
+libcamera_la_SOURCES += mm_camera_interface2.c
+libcamera_la_SOURCES += mm_camera_notify.c
+libcamera_la_SOURCES += mm_camera_poll_thread.c
+libcamera_la_SOURCES += mm_camera_sock.c
+libcamera_la_SOURCES += mm_camera_stream.c
+
+if BUILD_JPEG
+libcamera_la_SOURCES += mm_jpeg_encoder.c
+libcamera_la_SOURCES += mm_omx_jpeg_encoder.c
+endif
+
+if BUILD_UNIFIED_CODE
+if MSM8960
+libcamera_la_SOURCES += QCameraHAL.cpp
+libcamera_la_SOURCES += QCameraHWI_Parm.cpp
+libcamera_la_SOURCES += QCameraHWI.cpp
+libcamera_la_SOURCES += QCameraHWI_Preview.cpp
+libcamera_la_SOURCES += QCameraHWI_Record.cpp
+libcamera_la_SOURCES += QCameraHWI_Still.cpp
+libcamera_la_SOURCES += QCameraHWI_Mem.cpp
+libcamera_la_SOURCES += QCameraHWI_Display.cpp
+libcamera_la_SOURCES += QCameraStream.cpp
+libcamera_la_SOURCES += QualcommCamera2.cpp
+elif MSM7X27A
+libcamera_la_SOURCES += QCameraHAL.cpp
+libcamera_la_SOURCES += QCameraHWI_Parm.cpp
+libcamera_la_SOURCES += QCameraHWI.cpp
+libcamera_la_SOURCES += QCameraHWI_Preview_7x27A.cpp
+libcamera_la_SOURCES += QCameraHWI_Record_7x27A.cpp
+libcamera_la_SOURCES += QCameraHWI_Still.cpp
+libcamera_la_SOURCES += QCameraHWI_Mem.cpp
+libcamera_la_SOURCES += QCameraHWI_Display.cpp
+libcamera_la_SOURCES += QCameraStream.cpp
+libcamera_la_SOURCES += QualcommCamera2.cpp
+endif
+endif
+
+libcamera_la_LDFLAGS := $(DEBUG_LDFLAGS)
+libcamera_la_LDFLAGS += -shared
+libcamera_la_LIBADD = -ldl
+
+dirs :=
+SUBDIRS = $(dirs)
+
diff --git a/camera/QCamera/Android.mk b/camera/QCamera/Android.mk
new file mode 100644
index 0000000..c36c538
--- /dev/null
+++ b/camera/QCamera/Android.mk
@@ -0,0 +1,3 @@
+ifeq ($(TARGET_ARCH),arm)
+ include $(call all-subdir-makefiles)
+endif
diff --git a/camera/QCamera/HAL/Android.mk b/camera/QCamera/HAL/Android.mk
new file mode 100644
index 0000000..e520431
--- /dev/null
+++ b/camera/QCamera/HAL/Android.mk
@@ -0,0 +1,3 @@
+LOCAL_PATH:= $(call my-dir)
+#include $(LOCAL_PATH)/core/Android.mk
+#include $(LOCAL_PATH)/test/Android.mk
diff --git a/camera/QCamera/HAL/core/Android.mk b/camera/QCamera/HAL/core/Android.mk
new file mode 100644
index 0000000..f8ea0aa
--- /dev/null
+++ b/camera/QCamera/HAL/core/Android.mk
@@ -0,0 +1,91 @@
+
+#ifeq ($(call is-board-platform,msm8960),true)
+OLD_LOCAL_PATH := $(LOCAL_PATH)
+LOCAL_PATH := $(call my-dir)
+
+include $(CLEAR_VARS)
+
+ifeq ($(strip $(TARGET_USES_ION)),true)
+LOCAL_CFLAGS += -DUSE_ION
+endif
+
+DLOPEN_LIBMMCAMERA:=0
+
+LOCAL_CFLAGS:= -DDLOPEN_LIBMMCAMERA=$(DLOPEN_LIBMMCAMERA)
+
+LOCAL_CFLAGS += -DCAMERA_ION_HEAP_ID=ION_CP_MM_HEAP_ID # 8660=SMI, Rest=EBI
+LOCAL_CFLAGS += -DCAMERA_ZSL_ION_HEAP_ID=ION_CP_MM_HEAP_ID
+
+LOCAL_CFLAGS+= -DHW_ENCODE
+LOCAL_CFLAGS+= -DUSE_NEON_CONVERSION
+
+ifeq ($(call is-board-platform,msm8960),true)
+ LOCAL_CFLAGS += -DCAMERA_GRALLOC_HEAP_ID=GRALLOC_USAGE_PRIVATE_MM_HEAP
+ LOCAL_CFLAGS += -DCAMERA_GRALLOC_FALLBACK_HEAP_ID=GRALLOC_USAGE_PRIVATE_IOMMU_HEAP
+ LOCAL_CFLAGS += -DCAMERA_ION_FALLBACK_HEAP_ID=ION_IOMMU_HEAP_ID
+ LOCAL_CFLAGS += -DCAMERA_ZSL_ION_FALLBACK_HEAP_ID=ION_IOMMU_HEAP_ID
+ LOCAL_CFLAGS += -DCAMERA_GRALLOC_CACHING_ID=0
+else ifeq ($(call is-chipset-prefix-in-board-platform,msm8660),true)
+ LOCAL_CFLAGS += -DCAMERA_GRALLOC_HEAP_ID=GRALLOC_USAGE_PRIVATE_CAMERA_HEAP
+ LOCAL_CFLAGS += -DCAMERA_GRALLOC_FALLBACK_HEAP_ID=GRALLOC_USAGE_PRIVATE_CAMERA_HEAP # Don't Care
+ LOCAL_CFLAGS += -DCAMERA_ION_FALLBACK_HEAP_ID=ION_CAMERA_HEAP_ID # EBI
+ LOCAL_CFLAGS += -DCAMERA_ZSL_ION_FALLBACK_HEAP_ID=ION_CAMERA_HEAP_ID
+ LOCAL_CFLAGS += -DCAMERA_GRALLOC_CACHING_ID=0
+else
+ LOCAL_CFLAGS += -DCAMERA_GRALLOC_HEAP_ID=GRALLOC_USAGE_PRIVATE_ADSP_HEAP
+ LOCAL_CFLAGS += -DCAMERA_GRALLOC_FALLBACK_HEAP_ID=GRALLOC_USAGE_PRIVATE_ADSP_HEAP # Don't Care
+ LOCAL_CFLAGS += -DCAMERA_GRALLOC_CACHING_ID=GRALLOC_USAGE_PRIVATE_UNCACHED #uncached
+endif
+
+LOCAL_HAL_FILES := \
+ src/QCameraHAL.cpp \
+ src/QCameraHWI_Parm.cpp \
+ src/QCameraHWI.cpp \
+ src/QCameraHWI_Preview.cpp \
+ src/QCameraHWI_Record.cpp \
+ src/QCameraHWI_Still.cpp \
+ src/QCameraHWI_Mem.cpp \
+ src/QCameraStream.cpp
+
+LOCAL_HAL_WRAPPER_FILES := ../wrapper/QualcommCamera.cpp
+
+LOCAL_C_INCLUDES := \
+ $(LOCAL_PATH)/../wrapper \
+ $(LOCAL_PATH)/inc \
+ $(TARGET_OUT_INTERMEDIATES)/include/mm-camera-interface_badger \
+
+# may need remove this includes
+LOCAL_C_INCLUDES += $(TARGET_OUT_HEADERS)/mm-camera
+LOCAL_C_INCLUDES += $(TARGET_OUT_HEADERS)/mm-still \
+ $(TARGET_OUT_HEADERS)/mm-still/jpeg
+#end
+
+LOCAL_C_INCLUDES += $(TARGET_OUT_INTERMEDIATES)/KERNEL_OBJ/usr/include
+LOCAL_C_INCLUDES += $(TARGET_OUT_INTERMEDIATES)/KERNEL_OBJ/usr/include/media
+LOCAL_ADDITIONAL_DEPENDENCIES := $(TARGET_OUT_INTERMEDIATES)/KERNEL_OBJ/usr
+
+LOCAL_C_INCLUDES += hardware/qcom/display/libgralloc \
+ hardware/qcom/display/libgenlock \
+ hardware/qcom/media/libstagefrighthw
+
+# if debug service layer and up , use stub camera!
+LOCAL_C_INCLUDES += \
+ frameworks/base/services/camera/libcameraservice
+
+LOCAL_SRC_FILES := \
+ $(LOCAL_HAL_WRAPPER_FILES) \
+ $(LOCAL_HAL_FILES)
+
+LOCAL_SHARED_LIBRARIES := libutils libui libcamera_client liblog libcutils
+LOCAL_SHARED_LIBRARIES += libmmcamera_interface_badger
+LOCAL_SHARED_LIBRARIES+= libgenlock libbinder
+
+LOCAL_CFLAGS += -include bionic/libc/include/sys/socket.h
+
+LOCAL_MODULE_PATH := $(TARGET_OUT_SHARED_LIBRARIES)/hw
+LOCAL_MODULE:= camera_badger.$(TARGET_BOARD_PLATFORM)
+LOCAL_MODULE_TAGS := optional
+include $(BUILD_SHARED_LIBRARY)
+
+LOCAL_PATH := $(OLD_LOCAL_PATH)
+
diff --git a/camera/QCamera/HAL/core/inc/QCameraHAL.h b/camera/QCamera/HAL/core/inc/QCameraHAL.h
new file mode 100644
index 0000000..4450e4f
--- /dev/null
+++ b/camera/QCamera/HAL/core/inc/QCameraHAL.h
@@ -0,0 +1,37 @@
+/*
+** Copyright (c) 2011 The Linux Foundation. All rights reserved.
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+** http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+#ifndef ANDROID_HARDWARE_QCAMERA_HAL_H
+#define ANDROID_HARDWARE_QCAMERA_HAL_H
+
+
+#include "QCameraHWI.h"
+
+extern "C" {
+#include <mm_camera_interface2.h>
+}
+namespace android {
+
+/* HAL should return NULL if it fails to open camera hardware. */
+extern "C" void *
+ QCameraHAL_openCameraHardware(int cameraId, int mode);
+extern "C" int HAL_getNumberOfCameras();
+extern "C" void HAL_getCameraInfo(int cameraId, struct CameraInfo* cameraInfo);
+
+}; // namespace android
+
+#endif
+
diff --git a/camera/QCamera/HAL/core/inc/QCameraHWI.h b/camera/QCamera/HAL/core/inc/QCameraHWI.h
new file mode 100644
index 0000000..a666383
--- /dev/null
+++ b/camera/QCamera/HAL/core/inc/QCameraHWI.h
@@ -0,0 +1,834 @@
+/*
+** Copyright (c) 2011-2012 The Linux Foundation. All rights reserved.
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+** http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+#ifndef ANDROID_HARDWARE_QCAMERA_HARDWARE_INTERFACE_H
+#define ANDROID_HARDWARE_QCAMERA_HARDWARE_INTERFACE_H
+
+
+#include <utils/threads.h>
+//#include <camera/CameraHardwareInterface.h>
+#include <hardware/camera.h>
+#include <binder/MemoryBase.h>
+#include <binder/MemoryHeapBase.h>
+#include <binder/MemoryHeapPmem.h>
+#include <utils/threads.h>
+#include <cutils/properties.h>
+#include <camera/Camera.h>
+#include <camera/CameraParameters.h>
+#include <system/window.h>
+#include <system/camera.h>
+#include <hardware/camera.h>
+#include <gralloc_priv.h>
+#include <QComOMXMetadata.h>
+
+extern "C" {
+#include <linux/android_pmem.h>
+#include <linux/ion.h>
+#include <mm_camera_interface2.h>
+
+#include "mm_omx_jpeg_encoder.h"
+
+} //extern C
+
+#include "QCameraHWI_Mem.h"
+#include "QCameraStream.h"
+
+//Error codes
+#define NOT_FOUND -1
+#define MAX_ZOOM_RATIOS 62
+
+#ifdef Q12
+#undef Q12
+#endif
+
+#define Q12 4096
+#define QCAMERA_PARM_ENABLE 1
+#define QCAMERA_PARM_DISABLE 0
+#define PREVIEW_TBL_MAX_SIZE 14
+#define VIDEO_TBL_MAX_SIZE 14
+#define THUMB_TBL_MAX_SIZE 16
+#define HFR_TBL_MAX_SIZE 2
+
+struct str_map {
+ const char *const desc;
+ int val;
+};
+
+struct preview_format_info_t {
+ int Hal_format;
+ cam_format_t mm_cam_format;
+ cam_pad_format_t padding;
+ int num_planar;
+};
+
+typedef enum {
+ CAMERA_STATE_UNINITED,
+ CAMERA_STATE_READY,
+ CAMERA_STATE_PREVIEW_START_CMD_SENT,
+ CAMERA_STATE_PREVIEW_STOP_CMD_SENT,
+ CAMERA_STATE_PREVIEW,
+ CAMERA_STATE_RECORD_START_CMD_SENT, /*5*/
+ CAMERA_STATE_RECORD_STOP_CMD_SENT,
+ CAMERA_STATE_RECORD,
+ CAMERA_STATE_SNAP_START_CMD_SENT,
+ CAMERA_STATE_SNAP_STOP_CMD_SENT,
+ CAMERA_STATE_SNAP_CMD_ACKED, /*10 - snapshot comd acked, snapshot not done yet*/
+ CAMERA_STATE_ZSL_START_CMD_SENT,
+ CAMERA_STATE_ZSL,
+ CAMERA_STATE_AF_START_CMD_SENT,
+ CAMERA_STATE_AF_STOP_CMD_SENT,
+ CAMERA_STATE_ERROR, /*15*/
+
+ /*Add any new state above*/
+ CAMERA_STATE_MAX
+} HAL_camera_state_type_t;
+
+enum {
+ BUFFER_NOT_OWNED,
+ BUFFER_UNLOCKED,
+ BUFFER_LOCKED,
+};
+
+typedef enum {
+ HAL_DUMP_FRM_PREVIEW = 1,
+ HAL_DUMP_FRM_VIDEO = 1<<1,
+ HAL_DUMP_FRM_MAIN = 1<<2,
+ HAL_DUMP_FRM_THUMBNAIL = 1<<3,
+
+ /*8 bits mask*/
+ HAL_DUMP_FRM_MAX = 1 << 8
+} HAL_cam_dump_frm_type_t;
+
+
+typedef enum {
+ HAL_CAM_MODE_ZSL = 1,
+
+ /*add new entry before and update the max entry*/
+ HAL_CAM_MODE_MAX = HAL_CAM_MODE_ZSL << 1,
+} qQamera_mode_t;
+
+#define HAL_DUMP_FRM_MASK_ALL ( HAL_DUMP_FRM_PREVIEW + HAL_DUMP_FRM_VIDEO + \
+ HAL_DUMP_FRM_MAIN + HAL_DUMP_FRM_THUMBNAIL)
+#define QCAMERA_HAL_PREVIEW_STOPPED 0
+#define QCAMERA_HAL_PREVIEW_START 1
+#define QCAMERA_HAL_PREVIEW_STARTED 2
+#define QCAMERA_HAL_RECORDING_STARTED 3
+#define QCAMERA_HAL_TAKE_PICTURE 4
+
+
+typedef struct {
+ int buffer_count;
+ buffer_handle_t *buffer_handle[MM_CAMERA_MAX_NUM_FRAMES];
+ struct private_handle_t *private_buffer_handle[MM_CAMERA_MAX_NUM_FRAMES];
+ int stride[MM_CAMERA_MAX_NUM_FRAMES];
+ uint32_t addr_offset[MM_CAMERA_MAX_NUM_FRAMES];
+ uint8_t local_flag[MM_CAMERA_MAX_NUM_FRAMES];
+ camera_memory_t *camera_memory[MM_CAMERA_MAX_NUM_FRAMES];
+ int main_ion_fd[MM_CAMERA_MAX_NUM_FRAMES];
+ struct ion_fd_data ion_info_fd[MM_CAMERA_MAX_NUM_FRAMES];
+} QCameraHalMemory_t;
+
+
+typedef struct {
+ int buffer_count;
+ uint32_t size;
+ uint32_t y_offset;
+ uint32_t cbcr_offset;
+ int fd[MM_CAMERA_MAX_NUM_FRAMES];
+ int local_flag[MM_CAMERA_MAX_NUM_FRAMES];
+ camera_memory_t* camera_memory[MM_CAMERA_MAX_NUM_FRAMES];
+ camera_memory_t* metadata_memory[MM_CAMERA_MAX_NUM_FRAMES];
+ int main_ion_fd[MM_CAMERA_MAX_NUM_FRAMES];
+ struct ion_allocation_data alloc[MM_CAMERA_MAX_NUM_FRAMES];
+ struct ion_fd_data ion_info_fd[MM_CAMERA_MAX_NUM_FRAMES];
+} QCameraHalHeap_t;
+
+typedef struct {
+ camera_memory_t* camera_memory[3];
+ int main_ion_fd[3];
+ struct ion_allocation_data alloc[3];
+ struct ion_fd_data ion_info_fd[3];
+ int fd[3];
+ int size;
+} QCameraStatHeap_t;
+
+typedef struct {
+ int32_t msg_type;
+ int32_t ext1;
+ int32_t ext2;
+ void *cookie;
+} argm_notify_t;
+
+typedef struct {
+ int32_t msg_type;
+ camera_memory_t *data;
+ unsigned int index;
+ camera_frame_metadata_t *metadata;
+ void *cookie;
+} argm_data_cb_t;
+
+typedef struct {
+ camera_notify_callback notifyCb;
+ camera_data_callback dataCb;
+ argm_notify_t argm_notify;
+ argm_data_cb_t argm_data_cb;
+} app_notify_cb_t;
+
+/* camera_area_t
+ * rectangle with weight to store the focus and metering areas.
+ * x1, y1, x2, y2: from -1000 to 1000
+ * weight: 0 to 1000
+ */
+typedef struct {
+ int x1, y1, x2, y2;
+ int weight;
+} camera_area_t;
+
+//EXIF globals
+static const char ExifAsciiPrefix[] = { 0x41, 0x53, 0x43, 0x49, 0x49, 0x0, 0x0, 0x0 }; // "ASCII\0\0\0"
+static const char ExifUndefinedPrefix[] = { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 }; // "\0\0\0\0\0\0\0\0"
+
+//EXIF detfines
+#define MAX_EXIF_TABLE_ENTRIES 14
+#define GPS_PROCESSING_METHOD_SIZE 101
+#define FOCAL_LENGTH_DECIMAL_PRECISION 100
+#define EXIF_ASCII_PREFIX_SIZE 8 //(sizeof(ExifAsciiPrefix))
+
+typedef struct{
+ //GPS tags
+ rat_t latitude[3];
+ rat_t longitude[3];
+ char lonRef[2];
+ char latRef[2];
+ rat_t altitude;
+ rat_t gpsTimeStamp[3];
+ char gpsDateStamp[20];
+ char gpsProcessingMethod[EXIF_ASCII_PREFIX_SIZE+GPS_PROCESSING_METHOD_SIZE];
+ //Other tags
+ char dateTime[20];
+ rat_t focalLength;
+ uint16_t flashMode;
+ uint16_t isoSpeed;
+
+ bool mAltitude;
+ bool mLongitude;
+ bool mLatitude;
+ bool mTimeStamp;
+ bool mGpsProcess;
+
+ int mAltitude_ref;
+ long mGPSTimestamp;
+
+} exif_values_t;
+
+namespace android {
+
+class QCameraStream;
+
+class QCameraHardwareInterface : public virtual RefBase {
+public:
+
+ QCameraHardwareInterface(int cameraId, int mode);
+
+ /** Set the ANativeWindow to which preview frames are sent */
+ int setPreviewWindow(preview_stream_ops_t* window);
+
+ /** Set the notification and data callbacks */
+ void setCallbacks(camera_notify_callback notify_cb,
+ camera_data_callback data_cb,
+ camera_data_timestamp_callback data_cb_timestamp,
+ camera_request_memory get_memory,
+ void *user);
+
+ /**
+ * The following three functions all take a msg_type, which is a bitmask of
+ * the messages defined in include/ui/Camera.h
+ */
+
+ /**
+ * Enable a message, or set of messages.
+ */
+ void enableMsgType(int32_t msg_type);
+
+ /**
+ * Disable a message, or a set of messages.
+ *
+ * Once received a call to disableMsgType(CAMERA_MSG_VIDEO_FRAME), camera
+ * HAL should not rely on its client to call releaseRecordingFrame() to
+ * release video recording frames sent out by the cameral HAL before and
+ * after the disableMsgType(CAMERA_MSG_VIDEO_FRAME) call. Camera HAL
+ * clients must not modify/access any video recording frame after calling
+ * disableMsgType(CAMERA_MSG_VIDEO_FRAME).
+ */
+ void disableMsgType(int32_t msg_type);
+
+ /**
+ * Query whether a message, or a set of messages, is enabled. Note that
+ * this is operates as an AND, if any of the messages queried are off, this
+ * will return false.
+ */
+ int msgTypeEnabled(int32_t msg_type);
+
+ /**
+ * Start preview mode.
+ */
+ int startPreview();
+ int startPreview2();
+
+ /**
+ * Stop a previously started preview.
+ */
+ void stopPreview();
+
+ /**
+ * Returns true if preview is enabled.
+ */
+ int previewEnabled();
+
+
+ /**
+ * Request the camera HAL to store meta data or real YUV data in the video
+ * buffers sent out via CAMERA_MSG_VIDEO_FRAME for a recording session. If
+ * it is not called, the default camera HAL behavior is to store real YUV
+ * data in the video buffers.
+ *
+ * This method should be called before startRecording() in order to be
+ * effective.
+ *
+ * If meta data is stored in the video buffers, it is up to the receiver of
+ * the video buffers to interpret the contents and to find the actual frame
+ * data with the help of the meta data in the buffer. How this is done is
+ * outside of the scope of this method.
+ *
+ * Some camera HALs may not support storing meta data in the video buffers,
+ * but all camera HALs should support storing real YUV data in the video
+ * buffers. If the camera HAL does not support storing the meta data in the
+ * video buffers when it is requested to do do, INVALID_OPERATION must be
+ * returned. It is very useful for the camera HAL to pass meta data rather
+ * than the actual frame data directly to the video encoder, since the
+ * amount of the uncompressed frame data can be very large if video size is
+ * large.
+ *
+ * @param enable if true to instruct the camera HAL to store
+ * meta data in the video buffers; false to instruct
+ * the camera HAL to store real YUV data in the video
+ * buffers.
+ *
+ * @return OK on success.
+ */
+ int storeMetaDataInBuffers(int enable);
+
+ /**
+ * Start record mode. When a record image is available, a
+ * CAMERA_MSG_VIDEO_FRAME message is sent with the corresponding
+ * frame. Every record frame must be released by a camera HAL client via
+ * releaseRecordingFrame() before the client calls
+ * disableMsgType(CAMERA_MSG_VIDEO_FRAME). After the client calls
+ * disableMsgType(CAMERA_MSG_VIDEO_FRAME), it is the camera HAL's
+ * responsibility to manage the life-cycle of the video recording frames,
+ * and the client must not modify/access any video recording frames.
+ */
+ int startRecording();
+
+ /**
+ * Stop a previously started recording.
+ */
+ void stopRecording();
+
+ /**
+ * Returns true if recording is enabled.
+ */
+ int recordingEnabled();
+
+ /**
+ * Release a record frame previously returned by CAMERA_MSG_VIDEO_FRAME.
+ *
+ * It is camera HAL client's responsibility to release video recording
+ * frames sent out by the camera HAL before the camera HAL receives a call
+ * to disableMsgType(CAMERA_MSG_VIDEO_FRAME). After it receives the call to
+ * disableMsgType(CAMERA_MSG_VIDEO_FRAME), it is the camera HAL's
+ * responsibility to manage the life-cycle of the video recording frames.
+ */
+ void releaseRecordingFrame(const void *opaque);
+
+ /**
+ * Start auto focus, the notification callback routine is called with
+ * CAMERA_MSG_FOCUS once when focusing is complete. autoFocus() will be
+ * called again if another auto focus is needed.
+ */
+ int autoFocus();
+
+ /**
+ * Cancels auto-focus function. If the auto-focus is still in progress,
+ * this function will cancel it. Whether the auto-focus is in progress or
+ * not, this function will return the focus position to the default. If
+ * the camera does not support auto-focus, this is a no-op.
+ */
+ int cancelAutoFocus();
+
+ /**
+ * Take a picture.
+ */
+ int takePicture();
+
+ /**
+ * Cancel a picture that was started with takePicture. Calling this method
+ * when no picture is being taken is a no-op.
+ */
+ int cancelPicture();
+
+ /**
+ * Set the camera parameters. This returns BAD_VALUE if any parameter is
+ * invalid or not supported.
+ */
+ int setParameters(const char *parms);
+
+ //status_t setParameters(const CameraParameters& params);
+ /** Retrieve the camera parameters. The buffer returned by the camera HAL
+ must be returned back to it with put_parameters, if put_parameters
+ is not NULL.
+ */
+ int getParameters(char **parms);
+
+ /** The camera HAL uses its own memory to pass us the parameters when we
+ call get_parameters. Use this function to return the memory back to
+ the camera HAL, if put_parameters is not NULL. If put_parameters
+ is NULL, then you have to use free() to release the memory.
+ */
+ void putParameters(char *);
+
+ /**
+ * Send command to camera driver.
+ */
+ int sendCommand(int32_t cmd, int32_t arg1, int32_t arg2);
+
+ /**
+ * Release the hardware resources owned by this object. Note that this is
+ * *not* done in the destructor.
+ */
+ void release();
+
+ /**
+ * Dump state of the camera hardware
+ */
+ int dump(int fd);
+
+ //virtual sp<IMemoryHeap> getPreviewHeap() const;
+ //virtual sp<IMemoryHeap> getRawHeap() const;
+
+
+ status_t takeLiveSnapshot();
+ status_t takeFullSizeLiveshot();
+ bool canTakeFullSizeLiveshot();
+
+ //virtual status_t getBufferInfo( sp<IMemory>& Frame,
+ //size_t *alignedSize);
+ void getPictureSize(int *picture_width, int *picture_height) const;
+ void getPreviewSize(int *preview_width, int *preview_height) const;
+ cam_format_t getPreviewFormat() const;
+
+ cam_pad_format_t getPreviewPadding() const;
+
+ //bool useOverlay(void);
+ //virtual status_t setOverlay(const sp<Overlay> &overlay);
+ void processEvent(mm_camera_event_t *);
+ int getJpegQuality() const;
+ int getNumOfSnapshots(void) const;
+ int getNumOfSnapshots(const CameraParameters& params);
+ int getThumbSizesFromAspectRatio(uint32_t aspect_ratio,
+ int *picture_width,
+ int *picture_height);
+ bool isRawSnapshot();
+ bool mShutterSoundPlayed;
+ void dumpFrameToFile(struct msm_frame*, HAL_cam_dump_frm_type_t);
+
+ static QCameraHardwareInterface *createInstance(int, int);
+ status_t setZSLBurstLookBack(const CameraParameters& params);
+ status_t setZSLBurstInterval(const CameraParameters& params);
+ int getZSLBurstInterval(void);
+ int getZSLQueueDepth(void) const;
+ int getZSLBackLookCount(void) const;
+
+ ~QCameraHardwareInterface();
+ int initHeapMem(QCameraHalHeap_t *heap, int num_of_buf, int pmem_type,
+ int frame_len, int cbcr_off, int y_off, mm_cameara_stream_buf_t *StreamBuf,
+ mm_camera_buf_def_t *buf_def, uint8_t num_planes, uint32_t *planes);
+
+ int releaseHeapMem( QCameraHalHeap_t *heap);
+ status_t sendMappingBuf(int ext_mode, int idx, int fd, uint32_t size,
+ int cameraid, mm_camera_socket_msg_type msg_type);
+ status_t sendUnMappingBuf(int ext_mode, int idx, int cameraid,
+ mm_camera_socket_msg_type msg_type);
+
+ int allocate_ion_memory(QCameraHalHeap_t *p_camera_memory, int cnt,
+ int ion_type);
+ int deallocate_ion_memory(QCameraHalHeap_t *p_camera_memory, int cnt);
+
+ int allocate_ion_memory(QCameraStatHeap_t *p_camera_memory, int cnt,
+ int ion_type);
+ int deallocate_ion_memory(QCameraStatHeap_t *p_camera_memory, int cnt);
+
+ int cache_ops(int ion_fd, struct ion_flush_data *cache_inv_data, int type);
+
+ void dumpFrameToFile(const void * data, uint32_t size, char* name,
+ char* ext, int index);
+ preview_format_info_t getPreviewFormatInfo( );
+ bool isCameraReady();
+ bool isNoDisplayMode();
+
+private:
+ int16_t zoomRatios[MAX_ZOOM_RATIOS];
+ struct camera_size_type default_preview_sizes[PREVIEW_TBL_MAX_SIZE];
+ struct camera_size_type default_video_sizes[VIDEO_TBL_MAX_SIZE];
+ struct camera_size_type default_hfr_sizes[HFR_TBL_MAX_SIZE];
+ struct camera_size_type default_thumbnail_sizes[THUMB_TBL_MAX_SIZE];
+ unsigned int preview_sizes_count;
+ unsigned int video_sizes_count;
+ unsigned int thumbnail_sizes_count;
+ unsigned int hfr_sizes_count;
+
+
+ bool mUseOverlay;
+
+ void loadTables();
+ void initDefaultParameters();
+ bool getMaxPictureDimension(mm_camera_dimension_t *dim);
+
+ status_t updateFocusDistances();
+
+ bool native_set_parms(mm_camera_parm_type_t type, uint16_t length, void *value);
+ bool native_set_parms( mm_camera_parm_type_t type, uint16_t length, void *value, int *result);
+
+ void hasAutoFocusSupport();
+ void debugShowPreviewFPS() const;
+ //void prepareSnapshotAndWait();
+
+ bool isPreviewRunning();
+ bool isRecordingRunning();
+ bool isSnapshotRunning();
+
+ void processChannelEvent(mm_camera_ch_event_t *, app_notify_cb_t *);
+ void processPreviewChannelEvent(mm_camera_ch_event_type_t channelEvent, app_notify_cb_t *);
+ void processRecordChannelEvent(mm_camera_ch_event_type_t channelEvent, app_notify_cb_t *);
+ void processSnapshotChannelEvent(mm_camera_ch_event_type_t channelEvent, app_notify_cb_t *);
+ void processCtrlEvent(mm_camera_ctrl_event_t *, app_notify_cb_t *);
+ void processStatsEvent(mm_camera_stats_event_t *, app_notify_cb_t *);
+ void processInfoEvent(mm_camera_info_event_t *event, app_notify_cb_t *);
+ void processprepareSnapshotEvent(cam_ctrl_status_t *);
+ void roiEvent(fd_roi_t roi, app_notify_cb_t *);
+ void zoomEvent(cam_ctrl_status_t *status, app_notify_cb_t *);
+ void autofocusevent(cam_ctrl_status_t *status, app_notify_cb_t *);
+ void handleZoomEventForPreview(app_notify_cb_t *);
+ void handleZoomEventForSnapshot(void);
+ status_t autoFocusEvent(cam_ctrl_status_t *, app_notify_cb_t *);
+
+ void filterPictureSizes();
+ bool supportsSceneDetection();
+ bool supportsSelectableZoneAf();
+ bool supportsFaceDetection();
+ bool supportsRedEyeReduction();
+ bool preview_parm_config (cam_ctrl_dimension_t* dim,CameraParameters& parm);
+
+ void stopPreviewInternal();
+ void stopRecordingInternal();
+ //void stopPreviewZSL();
+ status_t cancelPictureInternal();
+ //status_t startPreviewZSL();
+ void pausePreviewForSnapshot();
+ void pausePreviewForZSL();
+ status_t resumePreviewAfterSnapshot();
+
+ status_t runFaceDetection();
+
+ status_t setParameters(const CameraParameters& params);
+ CameraParameters& getParameters() ;
+
+ status_t setCameraMode(const CameraParameters& params);
+ status_t setPictureSizeTable(void);
+ status_t setPreviewSizeTable(void);
+ status_t setVideoSizeTable(void);
+ status_t setPreviewSize(const CameraParameters& params);
+ status_t setJpegThumbnailSize(const CameraParameters& params);
+ status_t setPreviewFpsRange(const CameraParameters& params);
+ status_t setPreviewFrameRate(const CameraParameters& params);
+ status_t setPreviewFrameRateMode(const CameraParameters& params);
+ status_t setVideoSize(const CameraParameters& params);
+ status_t setPictureSize(const CameraParameters& params);
+ status_t setJpegQuality(const CameraParameters& params);
+ status_t setNumOfSnapshot(const CameraParameters& params);
+ status_t setJpegRotation(int isZSL);
+ int getJpegRotation(void);
+ int getISOSpeedValue();
+ status_t setAntibanding(const CameraParameters& params);
+ status_t setEffect(const CameraParameters& params);
+ status_t setExposureCompensation(const CameraParameters ¶ms);
+ status_t setAutoExposure(const CameraParameters& params);
+ status_t setWhiteBalance(const CameraParameters& params);
+ status_t setFlash(const CameraParameters& params);
+ status_t setGpsLocation(const CameraParameters& params);
+ status_t setRotation(const CameraParameters& params);
+ status_t setZoom(const CameraParameters& params);
+ status_t setFocusMode(const CameraParameters& params);
+ status_t setBrightness(const CameraParameters& params);
+ status_t setSkinToneEnhancement(const CameraParameters& params);
+ status_t setOrientation(const CameraParameters& params);
+ status_t setLensshadeValue(const CameraParameters& params);
+ status_t setMCEValue(const CameraParameters& params);
+ status_t setISOValue(const CameraParameters& params);
+ status_t setPictureFormat(const CameraParameters& params);
+ status_t setSharpness(const CameraParameters& params);
+ status_t setContrast(const CameraParameters& params);
+ status_t setSaturation(const CameraParameters& params);
+ status_t setWaveletDenoise(const CameraParameters& params);
+ status_t setSceneMode(const CameraParameters& params);
+ status_t setContinuousAf(const CameraParameters& params);
+ status_t setFaceDetection(const char *str);
+ status_t setSceneDetect(const CameraParameters& params);
+ status_t setStrTextures(const CameraParameters& params);
+ status_t setPreviewFormat(const CameraParameters& params);
+ status_t setSelectableZoneAf(const CameraParameters& params);
+ status_t setOverlayFormats(const CameraParameters& params);
+ status_t setHighFrameRate(const CameraParameters& params);
+ status_t setRedeyeReduction(const CameraParameters& params);
+ status_t setAEBracket(const CameraParameters& params);
+ status_t setFaceDetect(const CameraParameters& params);
+ status_t setDenoise(const CameraParameters& params);
+ status_t setAecAwbLock(const CameraParameters & params);
+ status_t setHistogram(int histogram_en);
+ status_t setRecordingHint(const CameraParameters& params);
+ status_t setRecordingHintValue(const int32_t value);
+ status_t setFocusAreas(const CameraParameters& params);
+ status_t setMeteringAreas(const CameraParameters& params);
+ status_t setFullLiveshot(void);
+ status_t setDISMode(void);
+ status_t setCaptureBurstExp(void);
+ status_t setPowerMode(const CameraParameters& params);
+ void takePicturePrepareHardware( );
+ status_t setNoDisplayMode(const CameraParameters& params);
+
+ isp3a_af_mode_t getAutoFocusMode(const CameraParameters& params);
+ bool isValidDimension(int w, int h);
+
+ String8 create_values_str(const str_map *values, int len);
+
+ void setMyMode(int mode);
+ bool isZSLMode();
+ bool isWDenoiseEnabled();
+ void wdenoiseEvent(cam_ctrl_status_t status, void *cookie);
+ bool isLowPowerCamcorder();
+ void freePictureTable(void);
+ void freeVideoSizeTable(void);
+
+ int32_t createPreview();
+ int32_t createRecord();
+ int32_t createSnapshot();
+
+ int getHDRMode();
+ //EXIF
+ void addExifTag(exif_tag_id_t tagid, exif_tag_type_t type,
+ uint32_t count, uint8_t copy, void *data);
+ void setExifTags();
+ void initExifData();
+ void deinitExifData();
+ void setExifTagsGPS();
+ exif_tags_info_t* getExifData(){ return mExifData; }
+ int getExifTableNumEntries() { return mExifTableNumEntries; }
+ void parseGPSCoordinate(const char *latlonString, rat_t* coord);
+
+ int mCameraId;
+ camera_mode_t myMode;
+
+ CameraParameters mParameters;
+ //sp<Overlay> mOverlay;
+ int32_t mMsgEnabled;
+
+ camera_notify_callback mNotifyCb;
+ camera_data_callback mDataCb;
+ camera_data_timestamp_callback mDataCbTimestamp;
+ camera_request_memory mGetMemory;
+ void *mCallbackCookie;
+
+ //sp<MemoryHeapBase> mPreviewHeap; //@Guru : Need to remove
+ sp<AshmemPool> mMetaDataHeap;
+
+ mutable Mutex mLock;
+ //mutable Mutex eventLock;
+ Mutex mCallbackLock;
+ Mutex mPreviewMemoryLock;
+ Mutex mRecordingMemoryLock;
+ Mutex mAutofocusLock;
+ Mutex mMetaDataWaitLock;
+ Mutex mRecordFrameLock;
+ Mutex mRecordLock;
+ Condition mRecordWait;
+ pthread_mutex_t mAsyncCmdMutex;
+ pthread_cond_t mAsyncCmdWait;
+
+ QCameraStream *mStreamDisplay;
+ QCameraStream *mStreamRecord;
+ QCameraStream *mStreamSnap;
+ QCameraStream *mStreamLiveSnap;
+
+ cam_ctrl_dimension_t mDimension;
+ int mPreviewWidth, mPreviewHeight;
+ int videoWidth, videoHeight;
+ int thumbnailWidth, thumbnailHeight;
+ int maxSnapshotWidth, maxSnapshotHeight;
+ int mPreviewFormat;
+ int mFps;
+ int mDebugFps;
+ int mBrightness;
+ int mContrast;
+ int mBestShotMode;
+ int mEffects;
+ int mSkinToneEnhancement;
+ int mDenoiseValue;
+ int mHJR;
+ int mRotation;
+ int mJpegQuality;
+ int mThumbnailQuality;
+ int mTargetSmoothZoom;
+ int mSmoothZoomStep;
+ int mMaxZoom;
+ int mCurrentZoom;
+ int mSupportedPictureSizesCount;
+ int mFaceDetectOn;
+ int mDumpFrmCnt;
+ int mDumpSkipCnt;
+ int mFocusMode;
+
+ unsigned int mPictureSizeCount;
+ unsigned int mPreviewSizeCount;
+ int mPowerMode;
+ unsigned int mVideoSizeCount;
+
+ bool mAutoFocusRunning;
+ bool mMultiTouch;
+ bool mHasAutoFocusSupport;
+ bool mInitialized;
+ bool mDisEnabled;
+ bool strTexturesOn;
+ bool mIs3DModeOn;
+ bool mSmoothZoomRunning;
+ bool mPreparingSnapshot;
+ bool mParamStringInitialized;
+ bool mZoomSupported;
+ bool mSendMetaData;
+ bool mFullLiveshotEnabled;
+ bool mRecordingHint;
+ bool mStartRecording;
+ bool mReleasedRecordingFrame;
+ int mHdrMode;
+ int mSnapshotFormat;
+ int mZslInterval;
+ bool mRestartPreview;
+
+/*for histogram*/
+ int mStatsOn;
+ int mCurrentHisto;
+ bool mSendData;
+ sp<AshmemPool> mStatHeap;
+ camera_memory_t *mStatsMapped[3];
+ QCameraStatHeap_t mHistServer;
+ int32_t mStatSize;
+
+ bool mZslLookBackMode;
+ int mZslLookBackValue;
+ int mHFRLevel;
+ bool mZslEmptyQueueFlag;
+ String8 mEffectValues;
+ String8 mIsoValues;
+ String8 mSceneModeValues;
+ String8 mSceneDetectValues;
+ String8 mFocusModeValues;
+ String8 mSelectableZoneAfValues;
+ String8 mAutoExposureValues;
+ String8 mWhitebalanceValues;
+ String8 mAntibandingValues;
+ String8 mFrameRateModeValues;
+ String8 mTouchAfAecValues;
+ String8 mPreviewSizeValues;
+ String8 mPictureSizeValues;
+ String8 mVideoSizeValues;
+ String8 mFlashValues;
+ String8 mLensShadeValues;
+ String8 mMceValues;
+ String8 mHistogramValues;
+ String8 mSkinToneEnhancementValues;
+ String8 mPictureFormatValues;
+ String8 mDenoiseValues;
+ String8 mZoomRatioValues;
+ String8 mPreviewFrameRateValues;
+ String8 mPreviewFormatValues;
+ String8 mFaceDetectionValues;
+ String8 mHfrValues;
+ String8 mHfrSizeValues;
+ String8 mRedeyeReductionValues;
+ String8 denoise_value;
+ String8 mFpsRangesSupportedValues;
+ String8 mZslValues;
+ String8 mFocusDistance;
+
+ friend class QCameraStream;
+ friend class QCameraStream_record;
+ friend class QCameraStream_preview;
+ friend class QCameraStream_Snapshot;
+
+ camera_size_type* mPictureSizes;
+ camera_size_type* mPreviewSizes;
+ camera_size_type* mVideoSizes;
+ const camera_size_type * mPictureSizesPtr;
+ HAL_camera_state_type_t mCameraState;
+
+ /* Temporary - can be removed after Honeycomb*/
+#ifdef USE_ION
+ sp<IonPool> mPostPreviewHeap;
+#else
+ sp<PmemPool> mPostPreviewHeap;
+#endif
+ mm_cameara_stream_buf_t mPrevForPostviewBuf;
+ int mStoreMetaDataInFrame;
+ preview_stream_ops_t *mPreviewWindow;
+ Mutex mStateLock;
+ int mPreviewState;
+ /*preview memory with display case: memory is allocated and freed via
+ gralloc */
+ QCameraHalMemory_t mPreviewMemory;
+
+ /*preview memory without display case: memory is allocated
+ directly by camera */
+ QCameraHalHeap_t mNoDispPreviewMemory;
+
+ QCameraHalHeap_t mSnapshotMemory;
+ QCameraHalHeap_t mThumbnailMemory;
+ QCameraHalHeap_t mRecordingMemory;
+ QCameraHalHeap_t mJpegMemory;
+ QCameraHalHeap_t mRawMemory;
+ camera_frame_metadata_t mMetadata;
+ camera_face_t mFace[MAX_ROI];
+ preview_format_info_t mPreviewFormatInfo;
+ friend void liveshot_callback(mm_camera_ch_data_buf_t *frame,void *user_data);
+
+ //EXIF
+ exif_tags_info_t mExifData[MAX_EXIF_TABLE_ENTRIES]; //Exif tags for JPEG encoder
+ exif_values_t mExifValues; //Exif values in usable format
+ int mExifTableNumEntries; //NUmber of entries in mExifData
+ int mNoDisplayMode;
+};
+
+}; // namespace android
+
+#endif
diff --git a/camera/QCamera/HAL/core/inc/QCameraHWI_Mem.h b/camera/QCamera/HAL/core/inc/QCameraHWI_Mem.h
new file mode 100644
index 0000000..c1484e1
--- /dev/null
+++ b/camera/QCamera/HAL/core/inc/QCameraHWI_Mem.h
@@ -0,0 +1,110 @@
+/*
+** Copyright (c) 2011-2012 The Linux Foundation. All rights reserved.
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+** http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+
+#ifndef __QCAMERAHWI_MEM_H
+#define __QCAMERAHWI_MEM_H
+
+#include <binder/MemoryBase.h>
+#include <binder/MemoryHeapBase.h>
+#include <utils/threads.h>
+#include <stdint.h>
+
+extern "C" {
+#include <linux/android_pmem.h>
+#include <linux/ion.h>
+#include <camera_defs_i.h>
+
+}
+
+
+#define VIDEO_BUFFER_COUNT 5
+#define VIDEO_BUFFER_COUNT_LOW_POWER_CAMCORDER 9
+
+#define PREVIEW_BUFFER_COUNT 5
+
+namespace android {
+
+// This class represents a heap which maintains several contiguous
+// buffers. The heap may be backed by pmem (when pmem_pool contains
+// the name of a /dev/pmem* file), or by ashmem (when pmem_pool == NULL).
+
+struct MemPool : public RefBase {
+ MemPool(int buffer_size, int num_buffers,
+ int frame_size,
+ const char *name);
+
+ virtual ~MemPool() = 0;
+
+ void completeInitialization();
+ bool initialized() const {
+ return mHeap != NULL && mHeap->base() != MAP_FAILED;
+ }
+
+ virtual status_t dump(int fd, const Vector<String16>& args) const;
+
+ int mBufferSize;
+ int mAlignedBufferSize;
+ int mNumBuffers;
+ int mFrameSize;
+ sp<MemoryHeapBase> mHeap;
+ sp<MemoryBase> *mBuffers;
+
+ const char *mName;
+};
+
+class AshmemPool : public MemPool {
+public:
+ AshmemPool(int buffer_size, int num_buffers,
+ int frame_size,
+ const char *name);
+};
+
+class PmemPool : public MemPool {
+public:
+ PmemPool(const char *pmem_pool,
+ int flags, int pmem_type,
+ int buffer_size, int num_buffers,
+ int frame_size, int cbcr_offset,
+ int yoffset, const char *name);
+ virtual ~PmemPool();
+ int mFd;
+ int mPmemType;
+ int mCbCrOffset;
+ int myOffset;
+ int mCameraControlFd;
+ uint32_t mAlignedSize;
+ struct pmem_region mSize;
+};
+
+class IonPool : public MemPool {
+public:
+ IonPool( int flags,
+ int buffer_size, int num_buffers,
+ int frame_size, int cbcr_offset,
+ int yoffset, const char *name);
+ virtual ~IonPool();
+ int mFd;
+ int mCbCrOffset;
+ int myOffset;
+ int mCameraControlFd;
+ uint32_t mAlignedSize;
+private:
+ static const char mIonDevName[];
+};
+
+};
+#endif
diff --git a/camera/QCamera/HAL/core/inc/QCameraStream.h b/camera/QCamera/HAL/core/inc/QCameraStream.h
new file mode 100644
index 0000000..19ce658
--- /dev/null
+++ b/camera/QCamera/HAL/core/inc/QCameraStream.h
@@ -0,0 +1,376 @@
+/*
+** Copyright 2008, Google Inc.
+** Copyright (c) 2009-2012, The Linux Foundation. All rights reserved.
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+** http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+#ifndef ANDROID_HARDWARE_QCAMERA_STREAM_H
+#define ANDROID_HARDWARE_QCAMERA_STREAM_H
+
+
+#include <utils/threads.h>
+
+#include <binder/MemoryBase.h>
+#include <binder/MemoryHeapBase.h>
+#include <utils/threads.h>
+
+#include "QCameraHWI.h"
+#include "QCameraHWI_Mem.h"
+
+extern "C" {
+
+#include <camera_defs_i.h>
+#include <mm_camera_interface2.h>
+
+#define DEFAULT_STREAM_WIDTH 320
+#define DEFAULT_STREAM_HEIGHT 240
+#define DEFAULT_LIVESHOT_WIDTH 2592
+#define DEFAULT_LIVESHOT_HEIGHT 1944
+
+#define MM_CAMERA_CH_PREVIEW_MASK (0x01 << MM_CAMERA_CH_PREVIEW)
+#define MM_CAMERA_CH_VIDEO_MASK (0x01 << MM_CAMERA_CH_VIDEO)
+#define MM_CAMERA_CH_SNAPSHOT_MASK (0x01 << MM_CAMERA_CH_SNAPSHOT)
+
+} /* extern C*/
+
+
+namespace android {
+
+class QCameraHardwareInterface;
+
+class StreamQueue {
+private:
+ Mutex mQueueLock;
+ Condition mQueueWait;
+ bool mInitialized;
+
+ //Vector<struct msm_frame *> mContainer;
+ Vector<void *> mContainer;
+public:
+ StreamQueue();
+ virtual ~StreamQueue();
+ bool enqueue(void *element);
+ void flush();
+ void* dequeue();
+ void init();
+ void deinit();
+ bool isInitialized();
+bool isEmpty();
+};
+
+
+class QCameraStream { //: public virtual RefBase{
+
+public:
+ bool mInit;
+ bool mActive;
+
+ virtual status_t init();
+ virtual status_t start();
+ virtual void stop();
+ virtual void release();
+
+ status_t setFormat(uint8_t ch_type_mask);
+ status_t setMode(int enable);
+
+ virtual void setHALCameraControl(QCameraHardwareInterface* ctrl);
+
+ //static status_t openChannel(mm_camera_t *, mm_camera_channel_type_t ch_type);
+ virtual status_t initChannel(int cameraId, uint32_t ch_type_mask);
+ virtual status_t deinitChannel(int cameraId, mm_camera_channel_type_t ch_type);
+ virtual void releaseRecordingFrame(const void *opaque)
+ {
+ ;
+ }
+#if 0 // mzhu
+ virtual status_t getBufferInfo(sp<IMemory>& Frame, size_t *alignedSize)
+ {
+ return NO_ERROR;
+ }
+#endif // mzhu
+ virtual void prepareHardware()
+ {
+ ;
+ }
+ virtual sp<IMemoryHeap> getHeap() const{return NULL;}
+ virtual status_t initDisplayBuffers(){return NO_ERROR;}
+ virtual status_t initPreviewOnlyBuffers(){return NO_ERROR;}
+ virtual sp<IMemoryHeap> getRawHeap() const {return NULL;}
+ virtual void *getLastQueuedFrame(void){return NULL;}
+ virtual status_t takePictureZSL(void){return NO_ERROR;}
+ virtual status_t takeLiveSnapshot(){return NO_ERROR;}
+ virtual status_t takePictureLiveshot(mm_camera_ch_data_buf_t* recvd_frame,
+ cam_ctrl_dimension_t *dim,
+ int frame_len){return NO_ERROR;}
+ virtual void setModeLiveSnapshot(bool){;}
+ virtual status_t initSnapshotBuffers(cam_ctrl_dimension_t *dim,
+ int num_of_buf){return NO_ERROR;}
+
+ virtual void setFullSizeLiveshot(bool){};
+ /* Set the ANativeWindow */
+ virtual int setPreviewWindow(preview_stream_ops_t* window) {return NO_ERROR;}
+ virtual void notifyROIEvent(fd_roi_t roi) {;}
+ virtual void notifyWDenoiseEvent(cam_ctrl_status_t status, void * cookie) {;}
+ virtual void resetSnapshotCounters(void ){};
+
+ QCameraStream();
+ QCameraStream(int, camera_mode_t);
+ virtual ~QCameraStream();
+ QCameraHardwareInterface* mHalCamCtrl;
+ mm_camera_ch_crop_t mCrop;
+
+ int mCameraId;
+ camera_mode_t myMode;
+
+ mutable Mutex mStopCallbackLock;
+private:
+ StreamQueue mBusyQueue;
+ StreamQueue mFreeQueue;
+public:
+ friend void liveshot_callback(mm_camera_ch_data_buf_t *frame,void *user_data);
+};
+
+/*
+* Record Class
+*/
+class QCameraStream_record : public QCameraStream {
+public:
+ status_t init();
+ status_t start() ;
+ void stop() ;
+ void release() ;
+
+ static QCameraStream* createInstance(int cameraId, camera_mode_t);
+ static void deleteInstance(QCameraStream *p);
+
+ QCameraStream_record() {};
+ virtual ~QCameraStream_record();
+
+ status_t processRecordFrame(void *data);
+ status_t initEncodeBuffers();
+ status_t getBufferInfo(sp<IMemory>& Frame, size_t *alignedSize);
+ //sp<IMemoryHeap> getHeap() const;
+
+ void releaseRecordingFrame(const void *opaque);
+ void debugShowVideoFPS() const;
+
+ status_t takeLiveSnapshot();
+private:
+ QCameraStream_record(int, camera_mode_t);
+ void releaseEncodeBuffer();
+
+ cam_ctrl_dimension_t dim;
+ bool mDebugFps;
+
+ mm_camera_reg_buf_t mRecordBuf;
+ //int record_frame_len;
+ //static const int maxFrameCnt = 16;
+ //camera_memory_t *mCameraMemoryPtr[maxFrameCnt];
+ //int mNumRecordFrames;
+ //sp<PmemPool> mRecordHeap[maxFrameCnt];
+ struct msm_frame *recordframes;
+ //uint32_t record_offset[VIDEO_BUFFER_COUNT];
+ mm_camera_ch_data_buf_t mRecordedFrames[MM_CAMERA_MAX_NUM_FRAMES];
+ //Mutex mRecordFreeQueueLock;
+ //Vector<mm_camera_ch_data_buf_t> mRecordFreeQueue;
+
+ int mJpegMaxSize;
+ QCameraStream *mStreamSnap;
+
+};
+
+class QCameraStream_preview : public QCameraStream {
+public:
+ status_t init();
+ status_t start() ;
+ void stop() ;
+ void release() ;
+
+ static QCameraStream* createInstance(int, camera_mode_t);
+ static void deleteInstance(QCameraStream *p);
+
+ QCameraStream_preview() {};
+ virtual ~QCameraStream_preview();
+ void *getLastQueuedFrame(void);
+ /*init preview buffers with display case*/
+ status_t initDisplayBuffers();
+ /*init preview buffers without display case*/
+ status_t initPreviewOnlyBuffers();
+
+ status_t processPreviewFrame(mm_camera_ch_data_buf_t *frame);
+
+ /*init preview buffers with display case*/
+ status_t processPreviewFrameWithDisplay(mm_camera_ch_data_buf_t *frame);
+ /*init preview buffers without display case*/
+ status_t processPreviewFrameWithOutDisplay(mm_camera_ch_data_buf_t *frame);
+
+ int setPreviewWindow(preview_stream_ops_t* window);
+ void notifyROIEvent(fd_roi_t roi);
+ friend class QCameraHardwareInterface;
+
+private:
+ QCameraStream_preview(int cameraId, camera_mode_t);
+ /*allocate and free buffers with display case*/
+ status_t getBufferFromSurface();
+ status_t putBufferToSurface();
+
+ /*allocate and free buffers without display case*/
+ status_t getBufferNoDisplay();
+ status_t freeBufferNoDisplay();
+
+ void dumpFrameToFile(struct msm_frame* newFrame);
+ bool mFirstFrameRcvd;
+
+ int8_t my_id;
+ mm_camera_op_mode_type_t op_mode;
+ cam_ctrl_dimension_t dim;
+ struct msm_frame *mLastQueuedFrame;
+ mm_camera_reg_buf_t mDisplayBuf;
+ mm_cameara_stream_buf_t mDisplayStreamBuf;
+ Mutex mDisplayLock;
+ preview_stream_ops_t *mPreviewWindow;
+ static const int kPreviewBufferCount = PREVIEW_BUFFER_COUNT;
+ mm_camera_ch_data_buf_t mNotifyBuffer[16];
+ int8_t mNumFDRcvd;
+ int mVFEOutputs;
+ int mHFRFrameCnt;
+ int mHFRFrameSkip;
+};
+
+/* Snapshot Class - handle data flow*/
+class QCameraStream_Snapshot : public QCameraStream {
+public:
+ status_t init();
+ status_t start();
+ void stop();
+ void release();
+ void prepareHardware();
+ static QCameraStream* createInstance(int cameraId, camera_mode_t);
+ static void deleteInstance(QCameraStream *p);
+
+ status_t takePictureZSL(void);
+ status_t takePictureLiveshot(mm_camera_ch_data_buf_t* recvd_frame,
+ cam_ctrl_dimension_t *dim,
+ int frame_len);
+ status_t receiveRawPicture(mm_camera_ch_data_buf_t* recvd_frame);
+ void receiveCompleteJpegPicture(jpeg_event_t event);
+ void receiveJpegFragment(uint8_t *ptr, uint32_t size);
+ void deInitBuffer(void);
+ sp<IMemoryHeap> getRawHeap() const;
+ int getSnapshotState();
+ /*Temp: to be removed once event handling is enabled in mm-camera*/
+ void runSnapshotThread(void *data);
+ bool isZSLMode();
+ void setFullSizeLiveshot(bool);
+ void notifyWDenoiseEvent(cam_ctrl_status_t status, void * cookie);
+ friend void liveshot_callback(mm_camera_ch_data_buf_t *frame,void *user_data);
+ void resetSnapshotCounters(void );
+
+private:
+ QCameraStream_Snapshot(int, camera_mode_t);
+ virtual ~QCameraStream_Snapshot();
+
+ /* snapshot related private members */
+ status_t initJPEGSnapshot(int num_of_snapshots);
+ status_t initRawSnapshot(int num_of_snapshots);
+ status_t initZSLSnapshot(void);
+ status_t initFullLiveshot(void);
+ status_t cancelPicture();
+ void notifyShutter(common_crop_t *crop,
+ bool play_shutter_sound);
+ status_t initSnapshotBuffers(cam_ctrl_dimension_t *dim,
+ int num_of_buf);
+ status_t initRawSnapshotBuffers(cam_ctrl_dimension_t *dim,
+ int num_of_buf);
+ status_t deinitRawSnapshotBuffers(void);
+ status_t deinitSnapshotBuffers(void);
+ status_t initRawSnapshotChannel(cam_ctrl_dimension_t* dim,
+ int num_snapshots);
+ status_t initSnapshotFormat(cam_ctrl_dimension_t *dim);
+ status_t takePictureRaw(void);
+ status_t takePictureJPEG(void);
+ status_t startStreamZSL(void);
+ void deinitSnapshotChannel(mm_camera_channel_type_t);
+ status_t configSnapshotDimension(cam_ctrl_dimension_t* dim);
+ status_t encodeData(mm_camera_ch_data_buf_t* recvd_frame,
+ common_crop_t *crop_info,
+ int frame_len,
+ bool enqueued);
+ status_t encodeDisplayAndSave(mm_camera_ch_data_buf_t* recvd_frame,
+ bool enqueued);
+ status_t setZSLChannelAttribute(void);
+ void handleError();
+ void setSnapshotState(int state);
+ void setModeLiveSnapshot(bool);
+ bool isLiveSnapshot(void);
+ void stopPolling(void);
+ bool isFullSizeLiveshot(void);
+ status_t doWaveletDenoise(mm_camera_ch_data_buf_t* frame);
+ status_t sendWDenoiseStartMsg(mm_camera_ch_data_buf_t * frame);
+ void lauchNextWDenoiseFromQueue();
+
+ /* Member variables */
+
+ int mSnapshotFormat;
+ int mPictureWidth;
+ int mPictureHeight;
+ cam_format_t mPictureFormat;
+ int mPostviewWidth;
+ int mPostviewHeight;
+ int mThumbnailWidth;
+ int mThumbnailHeight;
+ cam_format_t mThumbnailFormat;
+ int mJpegOffset;
+ int mSnapshotState;
+ int mNumOfSnapshot;
+ int mNumOfRecievedJPEG;
+ bool mModeLiveSnapshot;
+ bool mBurstModeFlag;
+ int mActualPictureWidth;
+ int mActualPictureHeight;
+ bool mJpegDownscaling;
+ sp<AshmemPool> mJpegHeap;
+ /*TBD:Bikas: This is defined in HWI too.*/
+#ifdef USE_ION
+ sp<IonPool> mDisplayHeap;
+ sp<IonPool> mPostviewHeap;
+#else
+ sp<PmemPool> mDisplayHeap;
+ sp<PmemPool> mPostviewHeap;
+#endif
+ mm_camera_ch_data_buf_t *mCurrentFrameEncoded;
+ mm_cameara_stream_buf_t mSnapshotStreamBuf;
+ mm_cameara_stream_buf_t mPostviewStreamBuf;
+ StreamQueue mSnapshotQueue;
+ static const int mMaxSnapshotBufferCount = 16;
+ int mSnapshotBufferNum;
+ int mMainfd[mMaxSnapshotBufferCount];
+ int mThumbfd[mMaxSnapshotBufferCount];
+ int mMainSize;
+ int mThumbSize;
+ camera_memory_t *mCameraMemoryPtrMain[mMaxSnapshotBufferCount];
+ camera_memory_t *mCameraMemoryPtrThumb[mMaxSnapshotBufferCount];
+ int mJpegSessionId;
+ int dump_fd;
+ bool mFullLiveshot;
+ StreamQueue mWDNQueue; // queue to hold frames while one frame is sent out for WDN
+ bool mIsDoingWDN; // flag to indicate if WDN is going on (one frame is sent out for WDN)
+ bool mDropThumbnail;
+ int mJpegQuality;
+}; // QCameraStream_Snapshot
+
+
+}; // namespace android
+
+#endif
diff --git a/camera/QCamera/HAL/core/src/QCameraHAL.cpp b/camera/QCamera/HAL/core/src/QCameraHAL.cpp
new file mode 100644
index 0000000..0fca676
--- /dev/null
+++ b/camera/QCamera/HAL/core/src/QCameraHAL.cpp
@@ -0,0 +1,133 @@
+/*
+** Copyright (c) 2011 The Linux Foundation. All rights reserved.
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+** http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+/*#error uncomment this for compiler test!*/
+
+//#define LOG_NDEBUG 0
+#define LOG_NIDEBUG 0
+#define LOG_TAG "QCameraHAL"
+#include <utils/Log.h>
+#include <utils/threads.h>
+#include <fcntl.h>
+#include <sys/mman.h>
+
+
+/* include QCamera Hardware Interface Header*/
+#include "QCameraHAL.h"
+
+int HAL_numOfCameras = 0;
+qcamera_info_t HAL_cameraInfo[MSM_MAX_CAMERA_SENSORS];
+mm_camera_t * HAL_camerahandle[MSM_MAX_CAMERA_SENSORS];
+int HAL_currentCameraMode;
+
+namespace android {
+/* HAL function implementation goes here*/
+
+/**
+ * The functions need to be provided by the camera HAL.
+ *
+ * If getNumberOfCameras() returns N, the valid cameraId for getCameraInfo()
+ * and openCameraHardware() is 0 to N-1.
+ */
+extern "C" int HAL_getNumberOfCameras()
+{
+ /* try to query every time we get the call!*/
+ uint8_t num_camera = 0;
+ mm_camera_t * handle_base = 0;
+ LOGV("%s: E", __func__);
+
+ handle_base= mm_camera_query(&num_camera);
+
+ if (!handle_base) {
+ HAL_numOfCameras = 0;
+ }
+ else
+ {
+ qcamera_info_t* p_camera_info = 0;
+ HAL_numOfCameras=num_camera;
+
+ LOGI("Handle base =0x%p",handle_base);
+ LOGI("getCameraInfo: numOfCameras = %d", HAL_numOfCameras);
+ for(int i = 0; i < HAL_numOfCameras; i++) {
+ LOGI("Handle [%d]=0x%p",i,handle_base+i);
+ HAL_camerahandle[i]=handle_base + i;
+ p_camera_info = &(HAL_camerahandle[i]->camera_info);
+ if (p_camera_info) {
+ LOGI("Camera sensor %d info:", i);
+ LOGI("camera_id: %d", p_camera_info->camera_id);
+ LOGI("modes_supported: %x", p_camera_info->modes_supported);
+ LOGI("position: %d", p_camera_info->position);
+ LOGI("sensor_mount_angle: %d", p_camera_info->sensor_mount_angle);
+ }
+ }
+ }
+
+ LOGV("%s: X", __func__);
+
+ return HAL_numOfCameras;
+}
+
+extern "C" int HAL_isIn3DMode()
+{
+ return HAL_currentCameraMode == CAMERA_MODE_3D;
+}
+
+extern "C" void HAL_getCameraInfo(int cameraId, struct CameraInfo* cameraInfo)
+{
+ mm_camera_t *mm_camer_obj = 0;
+ LOGV("%s: E", __func__);
+
+ if (!HAL_numOfCameras || HAL_numOfCameras < cameraId || !cameraInfo)
+ return;
+ else
+ mm_camer_obj = HAL_camerahandle[cameraId];
+
+ if (!mm_camer_obj)
+ return;
+ else {
+ cameraInfo->facing =
+ (FRONT_CAMERA == mm_camer_obj->camera_info.position)?
+ CAMERA_FACING_FRONT : CAMERA_FACING_BACK;
+
+ cameraInfo->orientation = mm_camer_obj->camera_info.sensor_mount_angle;
+#if 0
+ // TODO: fix me
+ /* We always supprot ZSL in our stack*/
+ cameraInfo->mode = CAMERA_SUPPORT_MODE_ZSL;
+ if (mm_camer_obj->camera_info.modes_supported & CAMERA_MODE_2D) {
+ cameraInfo->mode |= CAMERA_SUPPORT_MODE_2D;
+ }
+ if (mm_camer_obj->camera_info.modes_supported & CAMERA_MODE_3D) {
+ cameraInfo->mode |= CAMERA_SUPPORT_MODE_3D;
+ }
+#endif
+ }
+ LOGV("%s: X", __func__);
+ return;
+}
+
+/* HAL should return NULL if it fails to open camera hardware. */
+extern "C" void * HAL_openCameraHardware(int cameraId, int mode)
+{
+ LOGV("%s: E", __func__);
+ if (!HAL_numOfCameras || HAL_numOfCameras < cameraId ||cameraId < 0) {
+ return NULL;
+ }
+ return QCameraHAL_openCameraHardware(cameraId, mode);
+}
+
+
+}; // namespace android
diff --git a/camera/QCamera/HAL/core/src/QCameraHWI.cpp b/camera/QCamera/HAL/core/src/QCameraHWI.cpp
new file mode 100644
index 0000000..e2af277
--- /dev/null
+++ b/camera/QCamera/HAL/core/src/QCameraHWI.cpp
@@ -0,0 +1,2502 @@
+/*
+** Copyright (c) 2011-2012 The Linux Foundation. All rights reserved.
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+** http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+/*#error uncomment this for compiler test!*/
+
+#define LOG_NIDEBUG 0
+
+#define LOG_TAG "QCameraHWI"
+#include <utils/Log.h>
+#include <utils/threads.h>
+#include <cutils/properties.h>
+#include <fcntl.h>
+#include <sys/mman.h>
+
+#include "QCameraHAL.h"
+#include "QCameraHWI.h"
+
+/* QCameraHardwareInterface class implementation goes here*/
+/* following code implement the contol logic of this class*/
+
+namespace android {
+static void HAL_event_cb(mm_camera_event_t *evt, void *user_data)
+{
+ QCameraHardwareInterface *obj = (QCameraHardwareInterface *)user_data;
+ if (obj) {
+ obj->processEvent(evt);
+ } else {
+ LOGE("%s: NULL user_data", __func__);
+ }
+}
+
+int32_t QCameraHardwareInterface::createRecord()
+{
+ int32_t ret = MM_CAMERA_OK;
+ LOGV("%s : BEGIN",__func__);
+
+ /*
+ * Creating Instance of record stream.
+ */
+ LOGE("Mymode Record = %d",myMode);
+ mStreamRecord = QCameraStream_record::createInstance(mCameraId,
+ myMode);
+
+ if (!mStreamRecord) {
+ LOGE("%s: error - can't creat record stream!", __func__);
+ return BAD_VALUE;
+ }
+
+ /* Store HAL object in record stream Object */
+ mStreamRecord->setHALCameraControl(this);
+
+ /*Init Channel */
+ ret = mStreamRecord->init();
+ if (MM_CAMERA_OK != ret){
+ LOGE("%s: error - can't init Record channel!", __func__);
+ return BAD_VALUE;
+ }
+ LOGV("%s : END",__func__);
+ return ret;
+}
+
+int32_t QCameraHardwareInterface::createSnapshot()
+{
+ int32_t ret = MM_CAMERA_OK;
+ LOGV("%s : BEGIN",__func__);
+
+ /*
+ * Creating Instance of Snapshot stream.
+ */
+ LOGE("Mymode Snap = %d",myMode);
+ mStreamSnap = QCameraStream_Snapshot::createInstance(mCameraId,
+ myMode);
+ if (!mStreamSnap) {
+ LOGE("%s: error - can't creat snapshot stream!", __func__);
+ return BAD_VALUE;
+ }
+
+ /* Store HAL object in Snapshot stream Object */
+ mStreamSnap->setHALCameraControl(this);
+
+ /*Init Channel */
+ ret = mStreamSnap->init();
+ if (MM_CAMERA_OK != ret){
+ LOGE("%s: error - can't init Snapshot channel!", __func__);
+ return BAD_VALUE;
+ }
+ LOGV("%s : END",__func__);
+ return ret;
+}
+
+int32_t QCameraHardwareInterface::createPreview()
+{
+ int32_t ret = MM_CAMERA_OK;
+ LOGV("%s : BEGIN",__func__);
+
+ LOGE("Mymode Preview = %d",myMode);
+ mStreamDisplay = QCameraStream_preview::createInstance(mCameraId,
+ myMode);
+ if (!mStreamDisplay) {
+ LOGE("%s: error - can't creat preview stream!", __func__);
+ return BAD_VALUE;
+ }
+
+ mStreamDisplay->setHALCameraControl(this);
+
+ /*now init all the buffers and send to steam object*/
+ ret = mStreamDisplay->init();
+ if (MM_CAMERA_OK != ret){
+ LOGE("%s: error - can't init Preview channel!", __func__);
+ return BAD_VALUE;
+ }
+ LOGV("%s : END",__func__);
+ return ret;
+}
+
+/* constructor */
+QCameraHardwareInterface::
+QCameraHardwareInterface(int cameraId, int mode)
+ : mCameraId(cameraId),
+ mParameters(),
+ mMsgEnabled(0),
+ mNotifyCb(0),
+ mDataCb(0),
+ mDataCbTimestamp(0),
+ mCallbackCookie(0),
+ //mPreviewHeap(0),
+ mStreamDisplay (NULL), mStreamRecord(NULL), mStreamSnap(NULL),
+ mFps(0),
+ mDebugFps(0),
+ mMaxZoom(0),
+ mCurrentZoom(0),
+ mSupportedPictureSizesCount(15),
+ mDumpFrmCnt(0), mDumpSkipCnt(0),
+ mPictureSizeCount(15),
+ mPreviewSizeCount(13),
+ mVideoSizeCount(0),
+ mAutoFocusRunning(false),
+ mHasAutoFocusSupport(false),
+ mInitialized(false),
+ mIs3DModeOn(0),
+ mSmoothZoomRunning(false),
+ mParamStringInitialized(false),
+ mFaceDetectOn(0),
+ mDisEnabled(0),
+ mZoomSupported(false),
+ mFullLiveshotEnabled(true),
+ mRecordingHint(0),
+ mStatsOn(0), mCurrentHisto(-1), mSendData(false), mStatHeap(NULL),
+ mZslLookBackMode(0),
+ mZslLookBackValue(0),
+ mZslEmptyQueueFlag(FALSE),
+ mPictureSizes(NULL),
+ mVideoSizes(NULL),
+ mCameraState(CAMERA_STATE_UNINITED),
+ mPostPreviewHeap(NULL),
+ mHdrMode(HDR_BRACKETING_OFF),
+ mStreamLiveSnap(NULL),
+ mExifTableNumEntries(0),
+ mDenoiseValue(0),
+ mSnapshotFormat(0),
+ mStartRecording(0),
+ mZslInterval(1),
+ mNoDisplayMode(0),
+ mBrightness(0),
+ mContrast(0),
+ mEffects(0),
+ mBestShotMode(0),
+ mHJR(0),
+ mSkinToneEnhancement(0),
+ mRotation(0),
+ mFocusMode(AF_MODE_MAX),
+ mPreviewFormat(CAMERA_YUV_420_NV21),
+ mRestartPreview(false),
+ mReleasedRecordingFrame(false)
+{
+ LOGI("QCameraHardwareInterface: E");
+ int32_t result = MM_CAMERA_E_GENERAL;
+ char value[PROPERTY_VALUE_MAX];
+
+ pthread_mutex_init(&mAsyncCmdMutex, NULL);
+ pthread_cond_init(&mAsyncCmdWait, NULL);
+
+ property_get("persist.debug.sf.showfps", value, "0");
+ mDebugFps = atoi(value);
+ mPreviewState = QCAMERA_HAL_PREVIEW_STOPPED;
+ mPreviewWindow = NULL;
+ property_get("camera.hal.fps", value, "0");
+ mFps = atoi(value);
+
+ LOGI("Init mPreviewState = %d", mPreviewState);
+
+ property_get("persist.camera.hal.multitouchaf", value, "0");
+ mMultiTouch = atoi(value);
+
+ property_get("persist.camera.full.liveshot", value, "1");
+ mFullLiveshotEnabled = atoi(value);
+
+ property_get("persist.camera.hal.dis", value, "0");
+ mDisEnabled = atoi(value);
+
+ /* Open camera stack! */
+ result=cam_ops_open(mCameraId, MM_CAMERA_OP_MODE_NOTUSED);
+ if (result == MM_CAMERA_OK) {
+ int i;
+ mm_camera_event_type_t evt;
+ for (i = 0; i < MM_CAMERA_EVT_TYPE_MAX; i++) {
+ evt = (mm_camera_event_type_t) i;
+ if (cam_evt_is_event_supported(mCameraId, evt)){
+ cam_evt_register_event_notify(mCameraId,
+ HAL_event_cb, (void *)this, evt);
+ }
+ }
+ }
+ LOGV("Cam open returned %d",result);
+ if(MM_CAMERA_OK != result) {
+ LOGE("startCamera: cam_ops_open failed: id = %d", mCameraId);
+ return;
+ }
+
+ loadTables();
+ /* Setup Picture Size and Preview size tables */
+ setPictureSizeTable();
+ LOGD("%s: Picture table size: %d", __func__, mPictureSizeCount);
+ LOGD("%s: Picture table: ", __func__);
+ for(unsigned int i=0; i < mPictureSizeCount;i++) {
+ LOGD(" %d %d", mPictureSizes[i].width, mPictureSizes[i].height);
+ }
+
+ setPreviewSizeTable();
+ LOGD("%s: Preview table size: %d", __func__, mPreviewSizeCount);
+ LOGD("%s: Preview table: ", __func__);
+ for(unsigned int i=0; i < mPreviewSizeCount;i++) {
+ LOGD(" %d %d", mPreviewSizes[i].width, mPreviewSizes[i].height);
+ }
+
+ setVideoSizeTable();
+ LOGD("%s: Video table size: %d", __func__, mVideoSizeCount);
+ LOGD("%s: Video table: ", __func__);
+ for(unsigned int i=0; i < mVideoSizeCount;i++) {
+ LOGD(" %d %d", mVideoSizes[i].width, mVideoSizes[i].height);
+ }
+
+ /* set my mode - update myMode member variable due to difference in
+ enum definition between upper and lower layer*/
+ setMyMode(mode);
+ initDefaultParameters();
+
+ //Create Stream Objects
+ //Preview
+ result = createPreview();
+ if(result != MM_CAMERA_OK) {
+ LOGE("%s X: Failed to create Preview Object",__func__);
+ return;
+ }
+
+ //Record
+ result = createRecord();
+ if(result != MM_CAMERA_OK) {
+ LOGE("%s X: Failed to create Record Object",__func__);
+ return;
+ }
+
+ //Snapshot
+ result = createSnapshot();
+ if(result != MM_CAMERA_OK) {
+ LOGE("%s X: Failed to create Record Object",__func__);
+ return;
+ }
+ mCameraState = CAMERA_STATE_READY;
+
+ LOGI("QCameraHardwareInterface: X");
+}
+
+QCameraHardwareInterface::~QCameraHardwareInterface()
+{
+ LOGI("~QCameraHardwareInterface: E");
+ int result;
+
+ switch(mPreviewState) {
+ case QCAMERA_HAL_PREVIEW_STOPPED:
+ break;
+ case QCAMERA_HAL_PREVIEW_START:
+ break;
+ case QCAMERA_HAL_PREVIEW_STARTED:
+ stopPreview();
+ break;
+ case QCAMERA_HAL_RECORDING_STARTED:
+ stopRecordingInternal();
+ stopPreview();
+ break;
+ case QCAMERA_HAL_TAKE_PICTURE:
+ cancelPictureInternal();
+ break;
+ default:
+ break;
+ }
+ mPreviewState = QCAMERA_HAL_PREVIEW_STOPPED;
+
+ freePictureTable();
+ freeVideoSizeTable();
+ if(mStatHeap != NULL) {
+ mStatHeap.clear( );
+ mStatHeap = NULL;
+ }
+ /* Join the threads, complete operations and then delete
+ the instances. */
+ cam_ops_close(mCameraId);
+ if(mStreamDisplay){
+ QCameraStream_preview::deleteInstance (mStreamDisplay);
+ mStreamDisplay = NULL;
+ }
+ if(mStreamRecord) {
+ QCameraStream_record::deleteInstance (mStreamRecord);
+ mStreamRecord = NULL;
+ }
+ if(mStreamSnap) {
+ QCameraStream_Snapshot::deleteInstance (mStreamSnap);
+ mStreamSnap = NULL;
+ }
+
+ if (mStreamLiveSnap){
+ QCameraStream_Snapshot::deleteInstance (mStreamLiveSnap);
+ mStreamLiveSnap = NULL;
+ }
+
+ pthread_mutex_destroy(&mAsyncCmdMutex);
+ pthread_cond_destroy(&mAsyncCmdWait);
+
+ LOGI("~QCameraHardwareInterface: X");
+}
+
+bool QCameraHardwareInterface::isCameraReady()
+{
+ LOGE("isCameraReady mCameraState %d", mCameraState);
+ return (mCameraState == CAMERA_STATE_READY);
+}
+
+void QCameraHardwareInterface::release()
+{
+ LOGI("release: E");
+ Mutex::Autolock l(&mLock);
+
+ switch(mPreviewState) {
+ case QCAMERA_HAL_PREVIEW_STOPPED:
+ break;
+ case QCAMERA_HAL_PREVIEW_START:
+ break;
+ case QCAMERA_HAL_PREVIEW_STARTED:
+ stopPreviewInternal();
+ break;
+ case QCAMERA_HAL_RECORDING_STARTED:
+ stopRecordingInternal();
+ stopPreviewInternal();
+ break;
+ case QCAMERA_HAL_TAKE_PICTURE:
+ cancelPictureInternal();
+ break;
+ default:
+ break;
+ }
+#if 0
+ if (isRecordingRunning()) {
+ stopRecordingInternal();
+ LOGI("release: stopRecordingInternal done.");
+ }
+ if (isPreviewRunning()) {
+ stopPreview(); //stopPreviewInternal();
+ LOGI("release: stopPreviewInternal done.");
+ }
+ if (isSnapshotRunning()) {
+ cancelPictureInternal();
+ LOGI("release: cancelPictureInternal done.");
+ }
+ if (mCameraState == CAMERA_STATE_ERROR) {
+ //TBD: If Error occurs then tear down
+ LOGI("release: Tear down.");
+ }
+#endif
+ mPreviewState = QCAMERA_HAL_PREVIEW_STOPPED;
+ LOGI("release: X");
+}
+
+void QCameraHardwareInterface::setCallbacks(
+ camera_notify_callback notify_cb,
+ camera_data_callback data_cb,
+ camera_data_timestamp_callback data_cb_timestamp,
+ camera_request_memory get_memory,
+ void *user)
+{
+ LOGE("setCallbacks: E");
+ Mutex::Autolock lock(mLock);
+ mNotifyCb = notify_cb;
+ mDataCb = data_cb;
+ mDataCbTimestamp = data_cb_timestamp;
+ mGetMemory = get_memory;
+ mCallbackCookie = user;
+ LOGI("setCallbacks: X");
+}
+
+void QCameraHardwareInterface::enableMsgType(int32_t msgType)
+{
+ LOGI("enableMsgType: E, msgType =0x%x", msgType);
+ Mutex::Autolock lock(mLock);
+ mMsgEnabled |= msgType;
+ LOGI("enableMsgType: X, msgType =0x%x, mMsgEnabled=0x%x", msgType, mMsgEnabled);
+}
+
+void QCameraHardwareInterface::disableMsgType(int32_t msgType)
+{
+ LOGI("disableMsgType: E");
+ Mutex::Autolock lock(mLock);
+ mMsgEnabled &= ~msgType;
+ LOGI("disableMsgType: X, msgType =0x%x, mMsgEnabled=0x%x", msgType, mMsgEnabled);
+}
+
+int QCameraHardwareInterface::msgTypeEnabled(int32_t msgType)
+{
+ LOGI("msgTypeEnabled: E");
+ Mutex::Autolock lock(mLock);
+ return (mMsgEnabled & msgType);
+ LOGI("msgTypeEnabled: X");
+}
+#if 0
+status_t QCameraHardwareInterface::dump(int fd, const Vector<String16>& args) const
+{
+ LOGI("dump: E");
+ const size_t SIZE = 256;
+ char buffer[SIZE];
+ String8 result;
+ AutoMutex lock(&mLock);
+ write(fd, result.string(), result.size());
+ LOGI("dump: E");
+ return NO_ERROR;
+}
+#endif
+
+int QCameraHardwareInterface::dump(int fd)
+{
+ LOGE("%s: not supported yet", __func__);
+ return -1;
+}
+
+status_t QCameraHardwareInterface::sendCommand(int32_t command, int32_t arg1,
+ int32_t arg2)
+{
+ LOGI("sendCommand: E");
+ status_t rc = NO_ERROR;
+ Mutex::Autolock l(&mLock);
+
+ switch (command) {
+ case CAMERA_CMD_HISTOGRAM_ON:
+ LOGE("histogram set to on");
+ rc = setHistogram(1);
+ break;
+ case CAMERA_CMD_HISTOGRAM_OFF:
+ LOGE("histogram set to off");
+ rc = setHistogram(0);
+ break;
+ case CAMERA_CMD_HISTOGRAM_SEND_DATA:
+ LOGE("histogram send data");
+ mSendData = true;
+ rc = NO_ERROR;
+ break;
+ case CAMERA_CMD_START_FACE_DETECTION:
+ if(supportsFaceDetection() == false){
+ LOGE("Face detection support is not available");
+ return NO_ERROR;
+ }
+ setFaceDetection("on");
+ return runFaceDetection();
+ case CAMERA_CMD_STOP_FACE_DETECTION:
+ if(supportsFaceDetection() == false){
+ LOGE("Face detection support is not available");
+ return NO_ERROR;
+ }
+ setFaceDetection("off");
+ return runFaceDetection();
+#if 0
+ case CAMERA_CMD_SEND_META_DATA:
+ mMetaDataWaitLock.lock();
+ if(mFaceDetectOn == true) {
+ mSendMetaData = true;
+ }
+ mMetaDataWaitLock.unlock();
+ return NO_ERROR;
+#endif
+#if 0 /* To Do: will enable it later */
+ case CAMERA_CMD_START_SMOOTH_ZOOM :
+ LOGV("HAL sendcmd start smooth zoom %d %d", arg1 , arg2);
+ /*TO DO: get MaxZoom from parameter*/
+ int MaxZoom = 100;
+
+ switch(mCameraState ) {
+ case CAMERA_STATE_PREVIEW:
+ case CAMERA_STATE_RECORD_CMD_SENT:
+ case CAMERA_STATE_RECORD:
+ mTargetSmoothZoom = arg1;
+ mCurrentZoom = mParameters.getInt("zoom");
+ mSmoothZoomStep = (mCurrentZoom > mTargetSmoothZoom)? -1: 1;
+ if(mCurrentZoom == mTargetSmoothZoom) {
+ LOGV("Smoothzoom target zoom value is same as "
+ "current zoom value, return...");
+ mNotifyCallback(CAMERA_MSG_ZOOM,
+ mCurrentZoom, 1, mCallbackCookie);
+ } else if(mCurrentZoom < 0 || mCurrentZoom > MaxZoom ||
+ mTargetSmoothZoom < 0 || mTargetSmoothZoom > MaxZoom) {
+ LOGE(" ERROR : beyond supported zoom values, break..");
+ mNotifyCallback(CAMERA_MSG_ZOOM,
+ mCurrentZoom, 0, mCallbackCookie);
+ } else {
+ mSmoothZoomRunning = true;
+ mCurrentZoom += mSmoothZoomStep;
+ if ((mSmoothZoomStep < 0 && mCurrentZoom < mTargetSmoothZoom)||
+ (mSmoothZoomStep > 0 && mCurrentZoom > mTargetSmoothZoom )) {
+ mCurrentZoom = mTargetSmoothZoom;
+ }
+ mParameters.set("zoom", mCurrentZoom);
+ setZoom(mParameters);
+ }
+ break;
+ default:
+ LOGV(" No preview, no smoothzoom ");
+ break;
+ }
+ rc = NO_ERROR;
+ break;
+
+ case CAMERA_CMD_STOP_SMOOTH_ZOOM:
+ if(mSmoothZoomRunning) {
+ mSmoothZoomRunning = false;
+ /*To Do: send cmd to stop zooming*/
+ }
+ LOGV("HAL sendcmd stop smooth zoom");
+ rc = NO_ERROR;
+ break;
+#endif
+ default:
+ break;
+ }
+ LOGI("sendCommand: X");
+ return rc;
+}
+
+void QCameraHardwareInterface::setMyMode(int mode)
+{
+ LOGI("setMyMode: E");
+ if (mode & CAMERA_SUPPORT_MODE_3D) {
+ myMode = CAMERA_MODE_3D;
+ }else {
+ /* default mode is 2D */
+ myMode = CAMERA_MODE_2D;
+ }
+
+ if (mode & CAMERA_SUPPORT_MODE_ZSL) {
+ myMode = (camera_mode_t)(myMode |CAMERA_ZSL_MODE);
+ }else {
+ myMode = (camera_mode_t) (myMode | CAMERA_NONZSL_MODE);
+ }
+ LOGI("setMyMode: Set mode to %d (passed mode: %d)", myMode, mode);
+ LOGI("setMyMode: X");
+}
+/* static factory function */
+QCameraHardwareInterface *QCameraHardwareInterface::createInstance(int cameraId, int mode)
+{
+ LOGI("createInstance: E");
+ QCameraHardwareInterface *cam = new QCameraHardwareInterface(cameraId, mode);
+ if (cam ) {
+ if (cam->mCameraState != CAMERA_STATE_READY) {
+ LOGE("createInstance: Failed");
+ delete cam;
+ cam = NULL;
+ }
+ }
+
+ if (cam) {
+ //sp<CameraHardwareInterface> hardware(cam);
+ LOGI("createInstance: X");
+ return cam;
+ } else {
+ return NULL;
+ }
+}
+/* external plug in function */
+extern "C" void *
+QCameraHAL_openCameraHardware(int cameraId, int mode)
+{
+ LOGI("QCameraHAL_openCameraHardware: E");
+ return (void *) QCameraHardwareInterface::createInstance(cameraId, mode);
+}
+
+#if 0
+bool QCameraHardwareInterface::useOverlay(void)
+{
+ LOGI("useOverlay: E");
+ mUseOverlay = TRUE;
+ LOGI("useOverlay: X");
+ return mUseOverlay;
+}
+#endif
+
+bool QCameraHardwareInterface::isPreviewRunning() {
+ LOGI("isPreviewRunning: E");
+ bool ret = false;
+ LOGI("isPreviewRunning: camera state:%d", mCameraState);
+
+ if((mCameraState == CAMERA_STATE_PREVIEW) ||
+ (mCameraState == CAMERA_STATE_PREVIEW_START_CMD_SENT) ||
+ (mCameraState == CAMERA_STATE_RECORD) ||
+ (mCameraState == CAMERA_STATE_RECORD_START_CMD_SENT) ||
+ (mCameraState == CAMERA_STATE_ZSL) ||
+ (mCameraState == CAMERA_STATE_ZSL_START_CMD_SENT)){
+ return true;
+ }
+ LOGI("isPreviewRunning: X");
+ return ret;
+}
+
+bool QCameraHardwareInterface::isRecordingRunning() {
+ LOGE("isRecordingRunning: E");
+ bool ret = false;
+ if(QCAMERA_HAL_RECORDING_STARTED == mPreviewState)
+ ret = true;
+ //if((mCameraState == CAMERA_STATE_RECORD) ||
+ // (mCameraState == CAMERA_STATE_RECORD_START_CMD_SENT)) {
+ // return true;
+ //}
+ LOGE("isRecordingRunning: X");
+ return ret;
+}
+
+bool QCameraHardwareInterface::isSnapshotRunning() {
+ LOGE("isSnapshotRunning: E");
+ bool ret = false;
+ //if((mCameraState == CAMERA_STATE_SNAP_CMD_ACKED) ||
+ // (mCameraState == CAMERA_STATE_SNAP_START_CMD_SENT)) {
+ // return true;
+ //}
+ switch(mPreviewState) {
+ case QCAMERA_HAL_PREVIEW_STOPPED:
+ case QCAMERA_HAL_PREVIEW_START:
+ case QCAMERA_HAL_PREVIEW_STARTED:
+ case QCAMERA_HAL_RECORDING_STARTED:
+ default:
+ break;
+ case QCAMERA_HAL_TAKE_PICTURE:
+ ret = true;
+ break;
+ }
+ LOGI("isSnapshotRunning: X");
+ return ret;
+}
+
+bool QCameraHardwareInterface::isZSLMode() {
+ return (myMode & CAMERA_ZSL_MODE);
+}
+
+int QCameraHardwareInterface::getHDRMode() {
+ return mHdrMode;
+}
+
+void QCameraHardwareInterface::debugShowPreviewFPS() const
+{
+ static int mFrameCount;
+ static int mLastFrameCount = 0;
+ static nsecs_t mLastFpsTime = 0;
+ static float mFps = 0;
+ mFrameCount++;
+ nsecs_t now = systemTime();
+ nsecs_t diff = now - mLastFpsTime;
+ if (diff > ms2ns(250)) {
+ mFps = ((mFrameCount - mLastFrameCount) * float(s2ns(1))) / diff;
+ LOGI("Preview Frames Per Second: %.4f", mFps);
+ mLastFpsTime = now;
+ mLastFrameCount = mFrameCount;
+ }
+}
+
+void QCameraHardwareInterface::
+processPreviewChannelEvent(mm_camera_ch_event_type_t channelEvent, app_notify_cb_t *app_cb) {
+ LOGI("processPreviewChannelEvent: E");
+ switch(channelEvent) {
+ case MM_CAMERA_CH_EVT_STREAMING_ON:
+ mCameraState =
+ isZSLMode() ? CAMERA_STATE_ZSL : CAMERA_STATE_PREVIEW;
+ break;
+ case MM_CAMERA_CH_EVT_STREAMING_OFF:
+ mCameraState = CAMERA_STATE_READY;
+ break;
+ case MM_CAMERA_CH_EVT_DATA_DELIVERY_DONE:
+ break;
+ default:
+ break;
+ }
+ LOGI("processPreviewChannelEvent: X");
+ return;
+}
+
+void QCameraHardwareInterface::processRecordChannelEvent(
+ mm_camera_ch_event_type_t channelEvent, app_notify_cb_t *app_cb) {
+ LOGI("processRecordChannelEvent: E");
+ switch(channelEvent) {
+ case MM_CAMERA_CH_EVT_STREAMING_ON:
+ mCameraState = CAMERA_STATE_RECORD;
+ break;
+ case MM_CAMERA_CH_EVT_STREAMING_OFF:
+ mCameraState = CAMERA_STATE_PREVIEW;
+ break;
+ case MM_CAMERA_CH_EVT_DATA_DELIVERY_DONE:
+ break;
+ default:
+ break;
+ }
+ LOGI("processRecordChannelEvent: X");
+ return;
+}
+
+void QCameraHardwareInterface::
+processSnapshotChannelEvent(mm_camera_ch_event_type_t channelEvent, app_notify_cb_t *app_cb) {
+ LOGI("processSnapshotChannelEvent: E evt=%d state=%d", channelEvent,
+ mCameraState);
+ switch(channelEvent) {
+ case MM_CAMERA_CH_EVT_STREAMING_ON:
+ if (!mFullLiveshotEnabled) {
+ mCameraState =
+ isZSLMode() ? CAMERA_STATE_ZSL : CAMERA_STATE_SNAP_CMD_ACKED;
+ }
+ break;
+ case MM_CAMERA_CH_EVT_STREAMING_OFF:
+ if (!mFullLiveshotEnabled) {
+ mCameraState = CAMERA_STATE_READY;
+ }
+ break;
+ case MM_CAMERA_CH_EVT_DATA_DELIVERY_DONE:
+ break;
+ case MM_CAMERA_CH_EVT_DATA_REQUEST_MORE:
+ if (isZSLMode()) {
+ /* ZSL Mode: In ZSL Burst Mode, users may request for number of
+ snapshots larger than internal size of ZSL queue. So we'll need
+ process the remaining frames as they become available.
+ In such case, we'll get this event */
+ if(NULL != mStreamSnap)
+ mStreamSnap->takePictureZSL();
+ }
+ break;
+ default:
+ break;
+ }
+ LOGI("processSnapshotChannelEvent: X");
+ return;
+}
+
+void QCameraHardwareInterface::processChannelEvent(
+ mm_camera_ch_event_t *event, app_notify_cb_t *app_cb)
+{
+ LOGI("processChannelEvent: E");
+ Mutex::Autolock lock(mLock);
+ switch(event->ch) {
+ case MM_CAMERA_CH_PREVIEW:
+ processPreviewChannelEvent(event->evt, app_cb);
+ break;
+ case MM_CAMERA_CH_VIDEO:
+ processRecordChannelEvent(event->evt, app_cb);
+ break;
+ case MM_CAMERA_CH_SNAPSHOT:
+ processSnapshotChannelEvent(event->evt, app_cb);
+ break;
+ default:
+ break;
+ }
+ LOGI("processChannelEvent: X");
+ return;
+}
+
+void QCameraHardwareInterface::processCtrlEvent(mm_camera_ctrl_event_t *event, app_notify_cb_t *app_cb)
+{
+ LOGI("processCtrlEvent: %d, E",event->evt);
+ Mutex::Autolock lock(mLock);
+ switch(event->evt)
+ {
+ case MM_CAMERA_CTRL_EVT_ZOOM_DONE:
+ zoomEvent(&event->status, app_cb);
+ break;
+ case MM_CAMERA_CTRL_EVT_AUTO_FOCUS_DONE:
+ autoFocusEvent(&event->status, app_cb);
+ break;
+ case MM_CAMERA_CTRL_EVT_PREP_SNAPSHOT:
+ break;
+ case MM_CAMERA_CTRL_EVT_WDN_DONE:
+ wdenoiseEvent(event->status, (void *)(event->cookie));
+ break;
+ case MM_CAMERA_CTRL_EVT_ERROR:
+ app_cb->notifyCb = mNotifyCb;
+ app_cb->argm_notify.msg_type = CAMERA_MSG_ERROR;
+ app_cb->argm_notify.ext1 = CAMERA_ERROR_UNKNOWN;
+ app_cb->argm_notify.cookie = mCallbackCookie;
+ break;
+ default:
+ break;
+ }
+ LOGI("processCtrlEvent: X");
+ return;
+}
+
+void QCameraHardwareInterface::processStatsEvent(
+ mm_camera_stats_event_t *event, app_notify_cb_t *app_cb)
+{
+ LOGI("processStatsEvent: E");
+ if (!isPreviewRunning( )) {
+ LOGE("preview is not running");
+ return;
+ }
+
+ switch (event->event_id) {
+ case MM_CAMERA_STATS_EVT_HISTO:
+ {
+ LOGE("HAL process Histo: mMsgEnabled=0x%x, mStatsOn=%d, mSendData=%d, mDataCb=%p ",
+ (mMsgEnabled & CAMERA_MSG_STATS_DATA), mStatsOn, mSendData, mDataCb);
+ int msgEnabled = mMsgEnabled;
+ /*get stats buffer based on index*/
+ camera_preview_histogram_info* hist_info =
+ (camera_preview_histogram_info*) mHistServer.camera_memory[event->e.stats_histo.index]->data;
+
+ if(mStatsOn == QCAMERA_PARM_ENABLE && mSendData &&
+ mDataCb && (msgEnabled & CAMERA_MSG_STATS_DATA) ) {
+ uint32_t *dest;
+ mSendData = false;
+ mCurrentHisto = (mCurrentHisto + 1) % 3;
+ // The first element of the array will contain the maximum hist value provided by driver.
+ *(uint32_t *)((unsigned int)(mStatsMapped[mCurrentHisto]->data)) = hist_info->max_value;
+ memcpy((uint32_t *)((unsigned int)mStatsMapped[mCurrentHisto]->data + sizeof(int32_t)),
+ (uint32_t *)hist_info->buffer,(sizeof(int32_t) * 256));
+
+ app_cb->dataCb = mDataCb;
+ app_cb->argm_data_cb.msg_type = CAMERA_MSG_STATS_DATA;
+ app_cb->argm_data_cb.data = mStatsMapped[mCurrentHisto];
+ app_cb->argm_data_cb.index = 0;
+ app_cb->argm_data_cb.metadata = NULL;
+ app_cb->argm_data_cb.cookie = mCallbackCookie;
+ }
+ break;
+ }
+ default:
+ break;
+ }
+ LOGV("receiveCameraStats X");
+}
+
+void QCameraHardwareInterface::processInfoEvent(
+ mm_camera_info_event_t *event, app_notify_cb_t *app_cb) {
+ LOGI("processInfoEvent: %d, E",event->event_id);
+ //Mutex::Autolock lock(eventLock);
+ switch(event->event_id)
+ {
+ case MM_CAMERA_INFO_EVT_ROI:
+ roiEvent(event->e.roi, app_cb);
+ break;
+ default:
+ break;
+ }
+ LOGI("processInfoEvent: X");
+ return;
+}
+
+void QCameraHardwareInterface::processEvent(mm_camera_event_t *event)
+{
+ app_notify_cb_t app_cb;
+ LOGE("processEvent: type :%d E",event->event_type);
+ if(mPreviewState == QCAMERA_HAL_PREVIEW_STOPPED){
+ LOGE("Stop recording issued. Return from process Event");
+ return;
+ }
+ memset(&app_cb, 0, sizeof(app_notify_cb_t));
+ switch(event->event_type)
+ {
+ case MM_CAMERA_EVT_TYPE_CH:
+ processChannelEvent(&event->e.ch, &app_cb);
+ break;
+ case MM_CAMERA_EVT_TYPE_CTRL:
+ processCtrlEvent(&event->e.ctrl, &app_cb);
+ break;
+ case MM_CAMERA_EVT_TYPE_STATS:
+ processStatsEvent(&event->e.stats, &app_cb);
+ break;
+ case MM_CAMERA_EVT_TYPE_INFO:
+ processInfoEvent(&event->e.info, &app_cb);
+ break;
+ default:
+ break;
+ }
+ LOGE(" App_cb Notify %p, datacb=%p", app_cb.notifyCb, app_cb.dataCb);
+ if (app_cb.notifyCb) {
+ app_cb.notifyCb(app_cb.argm_notify.msg_type,
+ app_cb.argm_notify.ext1, app_cb.argm_notify.ext2,
+ app_cb.argm_notify.cookie);
+ }
+ if (app_cb.dataCb) {
+ app_cb.dataCb(app_cb.argm_data_cb.msg_type,
+ app_cb.argm_data_cb.data, app_cb.argm_data_cb.index,
+ app_cb.argm_data_cb.metadata, app_cb.argm_data_cb.cookie);
+ }
+ LOGI("processEvent: X");
+ return;
+}
+
+bool QCameraHardwareInterface::preview_parm_config (cam_ctrl_dimension_t* dim,
+ CameraParameters& parm)
+{
+ LOGI("preview_parm_config: E");
+ bool matching = true;
+ int display_width = 0; /* width of display */
+ int display_height = 0; /* height of display */
+ uint16_t video_width = 0; /* width of the video */
+ uint16_t video_height = 0; /* height of the video */
+
+ /* First check if the preview resolution is the same, if not, change it*/
+ parm.getPreviewSize(&display_width, &display_height);
+ if (display_width && display_height) {
+ matching = (display_width == dim->display_width) &&
+ (display_height == dim->display_height);
+
+ if (!matching) {
+ dim->display_width = display_width;
+ dim->display_height = display_height;
+ }
+ }
+ else
+ matching = false;
+
+ cam_format_t value = getPreviewFormat();
+
+ if(value != NOT_FOUND && value != dim->prev_format ) {
+ //Setting to Parameter requested by the Upper layer
+ dim->prev_format = value;
+ }else{
+ //Setting to default Format.
+ dim->prev_format = CAMERA_YUV_420_NV21;
+ }
+ mPreviewFormat = dim->prev_format;
+
+ dim->prev_padding_format = getPreviewPadding( );
+
+ dim->enc_format = CAMERA_YUV_420_NV12;
+ dim->orig_video_width = mDimension.orig_video_width;
+ dim->orig_video_height = mDimension.orig_video_height;
+ dim->video_width = mDimension.video_width;
+ dim->video_height = mDimension.video_height;
+ dim->video_chroma_width = mDimension.video_width;
+ dim->video_chroma_height = mDimension.video_height;
+ /* Reset the Main image and thumbnail formats here,
+ * since they might have been changed when video size
+ * livesnapshot was taken. */
+ if (mSnapshotFormat == 1)
+ dim->main_img_format = CAMERA_YUV_422_NV61;
+ else
+ dim->main_img_format = CAMERA_YUV_420_NV21;
+ dim->thumb_format = CAMERA_YUV_420_NV21;
+ LOGI("preview_parm_config: X");
+ return matching;
+}
+
+status_t QCameraHardwareInterface::startPreview()
+{
+ status_t retVal = NO_ERROR;
+
+ LOGE("%s: mPreviewState =%d", __func__, mPreviewState);
+ Mutex::Autolock lock(mLock);
+ switch(mPreviewState) {
+ case QCAMERA_HAL_PREVIEW_STOPPED:
+ case QCAMERA_HAL_TAKE_PICTURE:
+ mPreviewState = QCAMERA_HAL_PREVIEW_START;
+ LOGE("%s: HAL::startPreview begin", __func__);
+
+ if(QCAMERA_HAL_PREVIEW_START == mPreviewState &&
+ (mPreviewWindow || isNoDisplayMode())) {
+ LOGE("%s: start preview now", __func__);
+ retVal = startPreview2();
+ if(retVal == NO_ERROR)
+ mPreviewState = QCAMERA_HAL_PREVIEW_STARTED;
+ } else {
+ LOGE("%s: received startPreview, but preview window = null", __func__);
+ }
+ break;
+ case QCAMERA_HAL_PREVIEW_START:
+ case QCAMERA_HAL_PREVIEW_STARTED:
+ break;
+ case QCAMERA_HAL_RECORDING_STARTED:
+ LOGE("%s: cannot start preview in recording state", __func__);
+ break;
+ default:
+ LOGE("%s: unknow state %d received", __func__, mPreviewState);
+ retVal = UNKNOWN_ERROR;
+ break;
+ }
+ return retVal;
+}
+
+status_t QCameraHardwareInterface::startPreview2()
+{
+ LOGI("startPreview2: E");
+ status_t ret = NO_ERROR;
+
+ cam_ctrl_dimension_t dim;
+ mm_camera_dimension_t maxDim;
+ bool initPreview = false;
+
+ if (mPreviewState == QCAMERA_HAL_PREVIEW_STARTED) { //isPreviewRunning()){
+ LOGE("%s:Preview already started mCameraState = %d!", __func__, mCameraState);
+ LOGE("%s: X", __func__);
+ return NO_ERROR;
+ }
+
+ /* get existing preview information, by qury mm_camera*/
+ memset(&dim, 0, sizeof(cam_ctrl_dimension_t));
+ ret = cam_config_get_parm(mCameraId, MM_CAMERA_PARM_DIMENSION,&dim);
+
+ if (MM_CAMERA_OK != ret) {
+ LOGE("%s: error - can't get preview dimension!", __func__);
+ LOGE("%s: X", __func__);
+ return BAD_VALUE;
+ }
+
+ /* config the parmeters and see if we need to re-init the stream*/
+ initPreview = preview_parm_config (&dim, mParameters);
+
+ if (mRecordingHint && mFullLiveshotEnabled) {
+#if 0
+ /* Camcorder mode and Full resolution liveshot enabled
+ * TBD lookup table for correct aspect ratio matching size */
+ memset(&maxDim, 0, sizeof(mm_camera_dimension_t));
+ getMaxPictureDimension(&maxDim);
+ if (!maxDim.width || !maxDim.height) {
+ maxDim.width = DEFAULT_LIVESHOT_WIDTH;
+ maxDim.height = DEFAULT_LIVESHOT_HEIGHT;
+ }
+ /* TODO Remove this hack after adding code to get live shot dimension */
+ if (!mCameraId) {
+ maxDim.width = DEFAULT_LIVESHOT_WIDTH;
+ maxDim.height = DEFAULT_LIVESHOT_HEIGHT;
+ }
+ dim.picture_width = maxDim.width;
+ dim.picture_height = maxDim.height;
+ mParameters.setPictureSize(dim.picture_width, dim.picture_height);
+ LOGI("%s Setting Liveshot dimension as %d x %d", __func__,
+ maxDim.width, maxDim.height);
+#endif
+ int mPictureWidth, mPictureHeight;
+ bool matching;
+ /* First check if the picture resolution is the same, if not, change it*/
+ getPictureSize(&mPictureWidth, &mPictureHeight);
+
+ matching = (mPictureWidth == dim.picture_width) &&
+ (mPictureHeight == dim.picture_height);
+
+ if (!matching) {
+ dim.picture_width = mPictureWidth;
+ dim.picture_height = mPictureHeight;
+ dim.ui_thumbnail_height = dim.display_height;
+ dim.ui_thumbnail_width = dim.display_width;
+ }
+ LOGE("%s: Fullsize Liveshaot Picture size to set: %d x %d", __func__,
+ dim.picture_width, dim.picture_height);
+ mParameters.setPictureSize(dim.picture_width, dim.picture_height);
+ }
+
+ ret = cam_config_set_parm(mCameraId, MM_CAMERA_PARM_DIMENSION,&dim);
+ if (MM_CAMERA_OK != ret) {
+ LOGE("%s X: error - can't config preview parms!", __func__);
+ return BAD_VALUE;
+ }
+
+ mStreamDisplay->setMode(myMode & CAMERA_ZSL_MODE);
+ mStreamSnap->setMode(myMode & CAMERA_ZSL_MODE);
+ mStreamRecord->setMode(myMode & CAMERA_ZSL_MODE);
+ LOGE("%s: myMode = %d", __func__, myMode);
+
+ LOGE("%s: setPreviewWindow", __func__);
+ mStreamDisplay->setPreviewWindow(mPreviewWindow);
+
+ if(isZSLMode()) {
+ /* Start preview streaming */
+ ret = mStreamDisplay->start();
+ if (MM_CAMERA_OK != ret){
+ LOGE("%s: X -error - can't start nonZSL stream!", __func__);
+ return BAD_VALUE;
+ }
+
+ /* Start ZSL stream */
+ ret = mStreamSnap->start();
+ if (MM_CAMERA_OK != ret){
+ LOGE("%s: error - can't start Snapshot stream!", __func__);
+ mStreamDisplay->stop();
+ return BAD_VALUE;
+ }
+ }else{
+ ret = mStreamDisplay->start();
+ }
+
+ /*call QCameraStream_noneZSL::start() */
+ if (MM_CAMERA_OK != ret){
+ LOGE("%s: X error - can't start stream!", __func__);
+ return BAD_VALUE;
+ }
+ if(MM_CAMERA_OK == ret)
+ mCameraState = CAMERA_STATE_PREVIEW_START_CMD_SENT;
+ else
+ mCameraState = CAMERA_STATE_ERROR;
+
+ if(mPostPreviewHeap != NULL) {
+ mPostPreviewHeap.clear();
+ mPostPreviewHeap = NULL;
+ }
+
+ LOGI("startPreview: X");
+ return ret;
+}
+
+void QCameraHardwareInterface::stopPreview()
+{
+ LOGI("%s: stopPreview: E", __func__);
+ Mutex::Autolock lock(mLock);
+ mm_camera_util_profile("HAL: stopPreview(): E");
+ mFaceDetectOn = false;
+
+ // reset recording hint to the value passed from Apps
+ const char * str = mParameters.get(CameraParameters::KEY_RECORDING_HINT);
+ if((str != NULL) && !strcmp(str, "true")){
+ mRecordingHint = TRUE;
+ } else {
+ mRecordingHint = FALSE;
+ }
+
+ switch(mPreviewState) {
+ case QCAMERA_HAL_PREVIEW_START:
+ //mPreviewWindow = NULL;
+ mPreviewState = QCAMERA_HAL_PREVIEW_STOPPED;
+ break;
+ case QCAMERA_HAL_PREVIEW_STARTED:
+ stopPreviewInternal();
+ mPreviewState = QCAMERA_HAL_PREVIEW_STOPPED;
+ break;
+ case QCAMERA_HAL_RECORDING_STARTED:
+ stopRecordingInternal();
+ stopPreviewInternal();
+ mPreviewState = QCAMERA_HAL_PREVIEW_STOPPED;
+ break;
+ case QCAMERA_HAL_TAKE_PICTURE:
+ case QCAMERA_HAL_PREVIEW_STOPPED:
+ default:
+ break;
+ }
+ LOGI("stopPreview: X, mPreviewState = %d", mPreviewState);
+}
+
+#if 0 //mzhu
+void QCameraHardwareInterface::stopPreviewZSL()
+{
+ LOGI("stopPreviewZSL: E");
+
+ if(!mStreamDisplay || !mStreamSnap) {
+ LOGE("mStreamDisplay/mStreamSnap is null");
+ return;
+ }
+ LOGI("stopPreview: X, mPreviewState = %d", mPreviewState);
+}
+#endif
+void QCameraHardwareInterface::stopPreviewInternal()
+{
+ LOGI("stopPreviewInternal: E");
+ status_t ret = NO_ERROR;
+
+ if(!mStreamDisplay) {
+ LOGE("mStreamDisplay is null");
+ return;
+ }
+
+ if(isZSLMode()) {
+ /* take care snapshot object for ZSL mode */
+ mStreamSnap->stop();
+ }
+ mStreamDisplay->stop();
+
+ mCameraState = CAMERA_STATE_PREVIEW_STOP_CMD_SENT;
+ LOGI("stopPreviewInternal: X");
+}
+
+int QCameraHardwareInterface::previewEnabled()
+{
+ LOGI("previewEnabled: E");
+ Mutex::Autolock lock(mLock);
+ LOGE("%s: mCameraState = %d", __func__, mCameraState);
+ switch(mPreviewState) {
+ case QCAMERA_HAL_PREVIEW_STOPPED:
+ case QCAMERA_HAL_TAKE_PICTURE:
+ default:
+ return false;
+ break;
+ case QCAMERA_HAL_PREVIEW_START:
+ case QCAMERA_HAL_PREVIEW_STARTED:
+ case QCAMERA_HAL_RECORDING_STARTED:
+ return true;
+ break;
+ }
+ return false;
+}
+
+status_t QCameraHardwareInterface::startRecording()
+{
+ LOGI("startRecording: E");
+ status_t ret = NO_ERROR;
+ Mutex::Autolock lock(mLock);
+
+ switch(mPreviewState) {
+ case QCAMERA_HAL_PREVIEW_STOPPED:
+ LOGE("%s: preview has not been started", __func__);
+ ret = UNKNOWN_ERROR;
+ break;
+ case QCAMERA_HAL_PREVIEW_START:
+ LOGE("%s: no preview native window", __func__);
+ ret = UNKNOWN_ERROR;
+ break;
+ case QCAMERA_HAL_PREVIEW_STARTED:
+ if (mRecordingHint == FALSE) {
+ LOGE("%s: start recording when hint is false, stop preview first", __func__);
+ stopPreviewInternal();
+ mPreviewState = QCAMERA_HAL_PREVIEW_STOPPED;
+
+ // Set recording hint to TRUE
+ mRecordingHint = TRUE;
+ setRecordingHintValue(mRecordingHint);
+
+ // start preview again
+ mPreviewState = QCAMERA_HAL_PREVIEW_START;
+ if (startPreview2() == NO_ERROR)
+ mPreviewState = QCAMERA_HAL_PREVIEW_STARTED;
+ }
+ ret = mStreamRecord->start();
+ if (MM_CAMERA_OK != ret){
+ LOGE("%s: error - mStreamRecord->start!", __func__);
+ ret = BAD_VALUE;
+ break;
+ }
+ if(MM_CAMERA_OK == ret)
+ mCameraState = CAMERA_STATE_RECORD_START_CMD_SENT;
+ else
+ mCameraState = CAMERA_STATE_ERROR;
+ mPreviewState = QCAMERA_HAL_RECORDING_STARTED;
+ break;
+ case QCAMERA_HAL_RECORDING_STARTED:
+ LOGE("%s: ", __func__);
+ break;
+ case QCAMERA_HAL_TAKE_PICTURE:
+ default:
+ ret = BAD_VALUE;
+ break;
+ }
+ LOGI("startRecording: X");
+ return ret;
+}
+
+void QCameraHardwareInterface::stopRecording()
+{
+ LOGI("stopRecording: E");
+ Mutex::Autolock lock(mLock);
+ switch(mPreviewState) {
+ case QCAMERA_HAL_PREVIEW_STOPPED:
+ case QCAMERA_HAL_PREVIEW_START:
+ case QCAMERA_HAL_PREVIEW_STARTED:
+ break;
+ case QCAMERA_HAL_RECORDING_STARTED:
+ stopRecordingInternal();
+ mPreviewState = QCAMERA_HAL_PREVIEW_STARTED;
+ break;
+ case QCAMERA_HAL_TAKE_PICTURE:
+ default:
+ break;
+ }
+ LOGI("stopRecording: X");
+
+}
+void QCameraHardwareInterface::stopRecordingInternal()
+{
+ LOGI("stopRecordingInternal: E");
+ status_t ret = NO_ERROR;
+
+ if(!mStreamRecord) {
+ LOGE("mStreamRecord is null");
+ return;
+ }
+
+ /*
+ * call QCameraStream_record::stop()
+ * Unregister Callback, action stop
+ */
+ mStreamRecord->stop();
+ mCameraState = CAMERA_STATE_PREVIEW; //TODO : Apurva : Hacked for 2nd time Recording
+ mPreviewState = QCAMERA_HAL_PREVIEW_STARTED;
+ LOGI("stopRecordingInternal: X");
+ return;
+}
+
+int QCameraHardwareInterface::recordingEnabled()
+{
+ int ret = 0;
+ Mutex::Autolock lock(mLock);
+ LOGV("%s: E", __func__);
+ switch(mPreviewState) {
+ case QCAMERA_HAL_PREVIEW_STOPPED:
+ case QCAMERA_HAL_PREVIEW_START:
+ case QCAMERA_HAL_PREVIEW_STARTED:
+ break;
+ case QCAMERA_HAL_RECORDING_STARTED:
+ ret = 1;
+ break;
+ case QCAMERA_HAL_TAKE_PICTURE:
+ default:
+ break;
+ }
+ LOGV("%s: X, ret = %d", __func__, ret);
+ return ret; //isRecordingRunning();
+}
+
+/**
+* Release a record frame previously returned by CAMERA_MSG_VIDEO_FRAME.
+*/
+void QCameraHardwareInterface::releaseRecordingFrame(const void *opaque)
+{
+ LOGV("%s : BEGIN",__func__);
+ if(mStreamRecord == NULL) {
+ LOGE("Record stream Not Initialized");
+ return;
+ }
+ mStreamRecord->releaseRecordingFrame(opaque);
+ LOGV("%s : END",__func__);
+ return;
+}
+
+status_t QCameraHardwareInterface::autoFocusEvent(cam_ctrl_status_t *status, app_notify_cb_t *app_cb)
+{
+ LOGE("autoFocusEvent: E");
+ int ret = NO_ERROR;
+/************************************************************
+ BEGIN MUTEX CODE
+*************************************************************/
+
+ LOGE("%s:%d: Trying to acquire AF bit lock",__func__,__LINE__);
+ mAutofocusLock.lock();
+ LOGE("%s:%d: Acquired AF bit lock",__func__,__LINE__);
+
+ if(mAutoFocusRunning==false) {
+ LOGE("%s:AF not running, discarding stale event",__func__);
+ mAutofocusLock.unlock();
+ return ret;
+ }
+
+ mAutoFocusRunning = false;
+ mAutofocusLock.unlock();
+
+/************************************************************
+ END MUTEX CODE
+*************************************************************/
+ if(status==NULL) {
+ LOGE("%s:NULL ptr received for status",__func__);
+ return BAD_VALUE;
+ }
+
+ /* update focus distances after autofocus is done */
+ if(updateFocusDistances() != NO_ERROR) {
+ LOGE("%s: updateFocusDistances failed for %d", __FUNCTION__, mFocusMode);
+ }
+
+ /*(Do?) we need to make sure that the call back is the
+ last possible step in the execution flow since the same
+ context might be used if a fail triggers another round
+ of AF then the mAutoFocusRunning flag and other state
+ variables' validity will be under question*/
+
+ if (mNotifyCb && ( mMsgEnabled & CAMERA_MSG_FOCUS)){
+ LOGE("%s:Issuing callback to service",__func__);
+
+ /* "Accepted" status is not appropriate it should be used for
+ initial cmd, event reporting should only give use SUCCESS/FAIL
+ */
+
+ app_cb->notifyCb = mNotifyCb;
+ app_cb->argm_notify.msg_type = CAMERA_MSG_FOCUS;
+ app_cb->argm_notify.ext2 = 0;
+ app_cb->argm_notify.cookie = mCallbackCookie;
+
+ LOGE("Auto foucs state =%d", *status);
+ if(*status==CAM_CTRL_SUCCESS) {
+ app_cb->argm_notify.ext1 = true;
+ }
+ else if(*status==CAM_CTRL_FAILED){
+ app_cb->argm_notify.ext1 = false;
+ }
+ else{
+ app_cb->notifyCb = NULL;
+ LOGE("%s:Unknown AF status (%d) received",__func__,*status);
+ }
+
+ }/*(mNotifyCb && ( mMsgEnabled & CAMERA_MSG_FOCUS))*/
+ else{
+ LOGE("%s:Call back not enabled",__func__);
+ }
+
+ LOGE("autoFocusEvent: X");
+ return ret;
+
+}
+
+status_t QCameraHardwareInterface::cancelPicture()
+{
+ LOGI("cancelPicture: E");
+ status_t ret = MM_CAMERA_OK;
+ Mutex::Autolock lock(mLock);
+
+ switch(mPreviewState) {
+ case QCAMERA_HAL_PREVIEW_STOPPED:
+ case QCAMERA_HAL_PREVIEW_START:
+ case QCAMERA_HAL_PREVIEW_STARTED:
+ case QCAMERA_HAL_RECORDING_STARTED:
+ default:
+ break;
+ case QCAMERA_HAL_TAKE_PICTURE:
+ ret = cancelPictureInternal();
+ break;
+ }
+ LOGI("cancelPicture: X");
+ return ret;
+}
+
+status_t QCameraHardwareInterface::cancelPictureInternal()
+{
+ LOGI("cancelPictureInternal: E");
+ status_t ret = MM_CAMERA_OK;
+ if(mCameraState != CAMERA_STATE_READY) {
+ if(mStreamSnap) {
+ mStreamSnap->stop();
+ mCameraState = CAMERA_STATE_SNAP_STOP_CMD_SENT;
+ }
+ } else {
+ LOGE("%s: Cannot process cancel picture as snapshot is already done",__func__);
+ }
+ LOGI("cancelPictureInternal: X");
+ return ret;
+}
+
+void QCameraHardwareInterface::pausePreviewForSnapshot()
+{
+ stopPreviewInternal( );
+}
+status_t QCameraHardwareInterface::resumePreviewAfterSnapshot()
+{
+ status_t ret = NO_ERROR;
+ ret = mStreamDisplay->start();
+ return ret;
+}
+
+void liveshot_callback(mm_camera_ch_data_buf_t *recvd_frame,
+ void *user_data)
+{
+ QCameraHardwareInterface *pme = (QCameraHardwareInterface *)user_data;
+ cam_ctrl_dimension_t dim;
+ int mJpegMaxSize;
+ status_t ret;
+ LOGE("%s: E", __func__);
+
+
+ mm_camera_ch_data_buf_t* frame =
+ (mm_camera_ch_data_buf_t *)malloc(sizeof(mm_camera_ch_data_buf_t));
+ if (frame == NULL) {
+ LOGE("%s: Error allocating memory to save received_frame structure.", __func__);
+ cam_evt_buf_done(pme->mCameraId, recvd_frame);
+ return ;
+ }
+ memcpy(frame, recvd_frame, sizeof(mm_camera_ch_data_buf_t));
+
+ LOGE("<DEBUG> Liveshot buffer idx:%d",frame->video.video.idx);
+ memset(&dim, 0, sizeof(cam_ctrl_dimension_t));
+ ret = cam_config_get_parm(pme->mCameraId, MM_CAMERA_PARM_DIMENSION, &dim);
+ if (MM_CAMERA_OK != ret) {
+ LOGE("%s: error - can't get dimension!", __func__);
+ LOGE("%s: X", __func__);
+ }
+
+#if 1
+ LOGE("Live Snapshot Enabled");
+ frame->snapshot.main.frame = frame->video.video.frame;
+ frame->snapshot.main.idx = frame->video.video.idx;
+ frame->snapshot.thumbnail.frame = frame->video.video.frame;
+ frame->snapshot.thumbnail.idx = frame->video.video.idx;
+
+ dim.picture_width = pme->mDimension.video_width;
+ dim.picture_height = pme->mDimension.video_height;
+ dim.ui_thumbnail_width = pme->mDimension.video_width;
+ dim.ui_thumbnail_height = pme->mDimension.video_height;
+ dim.main_img_format = pme->mDimension.enc_format;
+ dim.thumb_format = pme->mDimension.enc_format;
+
+ mJpegMaxSize = pme->mDimension.video_width * pme->mDimension.video_width * 1.5;
+
+ LOGE("Picture w = %d , h = %d, size = %d",dim.picture_width,dim.picture_height,mJpegMaxSize);
+ if (pme->mStreamLiveSnap){
+ LOGE("%s:Deleting old Snapshot stream instance",__func__);
+ QCameraStream_Snapshot::deleteInstance (pme->mStreamLiveSnap);
+ pme->mStreamLiveSnap = NULL;
+ }
+
+ pme->mStreamLiveSnap = (QCameraStream_Snapshot*)QCameraStream_Snapshot::createInstance(pme->mCameraId,
+ pme->myMode);
+
+ if (!pme->mStreamLiveSnap) {
+ LOGE("%s: error - can't creat snapshot stream!", __func__);
+ return ;
+ }
+ pme->mStreamLiveSnap->setModeLiveSnapshot(true);
+ pme->mStreamLiveSnap->setHALCameraControl(pme);
+ pme->mStreamLiveSnap->initSnapshotBuffers(&dim,1);
+ LOGE("Calling live shot");
+
+
+ ((QCameraStream_Snapshot*)(pme->mStreamLiveSnap))->takePictureLiveshot(frame,&dim,mJpegMaxSize);
+
+#else
+
+
+
+
+ if(MM_CAMERA_OK != cam_evt_buf_done(pme->mCameraId,frame )) {
+ LOGE(" BUF DONE FAILED");
+ }
+#endif
+ LOGE("%s: X", __func__);
+
+}
+
+status_t QCameraHardwareInterface::takePicture()
+{
+ LOGI("takePicture: E");
+ status_t ret = MM_CAMERA_OK;
+ Mutex::Autolock lock(mLock);
+
+ mStreamSnap->resetSnapshotCounters( );
+ switch(mPreviewState) {
+ case QCAMERA_HAL_PREVIEW_STARTED:
+ mStreamSnap->setFullSizeLiveshot(false);
+ if (isZSLMode()) {
+ if (mStreamSnap != NULL) {
+ pausePreviewForZSL();
+ ret = mStreamSnap->takePictureZSL();
+ if (ret != MM_CAMERA_OK) {
+ LOGE("%s: Error taking ZSL snapshot!", __func__);
+ ret = BAD_VALUE;
+ }
+ }
+ else {
+ LOGE("%s: ZSL stream not active! Failure!!", __func__);
+ ret = BAD_VALUE;
+ }
+ return ret;
+ }
+
+ /*prepare snapshot, e.g LED*/
+ takePicturePrepareHardware( );
+ /* There's an issue where we have a glimpse of corrupted data between
+ a time we stop a preview and display the postview. It happens because
+ when we call stopPreview we deallocate the preview buffers hence overlay
+ displays garbage value till we enqueue postview buffer to be displayed.
+ Hence for temporary fix, we'll do memcopy of the last frame displayed and
+ queue it to overlay*/
+ // mzhu storePreviewFrameForPostview();
+
+ /* stop preview */
+ pausePreviewForSnapshot();
+
+ /* call Snapshot start() :*/
+ ret = mStreamSnap->start();
+ if (MM_CAMERA_OK != ret){
+ /* mzhu: fix me, restore preview */
+ LOGE("%s: error - can't start Snapshot stream!", __func__);
+ return BAD_VALUE;
+ }
+
+ if(MM_CAMERA_OK == ret)
+ mCameraState = CAMERA_STATE_SNAP_START_CMD_SENT;
+ else
+ mCameraState = CAMERA_STATE_ERROR;
+ mPreviewState = QCAMERA_HAL_TAKE_PICTURE;
+ break;
+ case QCAMERA_HAL_TAKE_PICTURE:
+ break;
+ case QCAMERA_HAL_PREVIEW_STOPPED:
+ case QCAMERA_HAL_PREVIEW_START:
+ ret = UNKNOWN_ERROR;
+ break;
+ case QCAMERA_HAL_RECORDING_STARTED:
+ if (canTakeFullSizeLiveshot()) {
+ LOGD(" Calling takeFullSizeLiveshot");
+ takeFullSizeLiveshot();
+ }else{
+ (void) cam_evt_register_buf_notify(mCameraId, MM_CAMERA_CH_VIDEO,
+ liveshot_callback,
+ MM_CAMERA_REG_BUF_CB_COUNT,
+ 1,
+ this);
+ }
+
+ break;
+ default:
+ ret = UNKNOWN_ERROR;
+ break;
+ }
+ LOGI("takePicture: X");
+ return ret;
+}
+
+bool QCameraHardwareInterface::canTakeFullSizeLiveshot() {
+ bool ret;
+ if (mFullLiveshotEnabled && !isLowPowerCamcorder()) {
+ /* Full size liveshot enabled. */
+
+ /* If Picture size is same as video size, switch to Video size
+ * live snapshot */
+ if ((mDimension.picture_width == mDimension.video_width) &&
+ (mDimension.picture_height == mDimension.video_height)) {
+ return FALSE;
+ }
+
+ if (mDisEnabled) {
+ /* If DIS is enabled and Picture size is
+ * less than (video size + 10% DIS Margin)
+ * then fall back to Video size liveshot. */
+ if ((mDimension.picture_width <
+ (int)(mDimension.video_width * 1.1)) ||
+ (mDimension.picture_height <
+ (int)(mDimension.video_height * 1.1))) {
+ ret = FALSE;
+ } else {
+ /* Go with Full size live snapshot. */
+ ret = TRUE;
+ }
+ } else {
+ /* DIS Disabled. Go with Full size live snapshot */
+ ret = TRUE;
+ }
+ } else {
+ /* Full size liveshot disabled. Fallback to Video size liveshot. */
+ ret = FALSE;
+ }
+
+ return ret;
+}
+
+status_t QCameraHardwareInterface::takeFullSizeLiveshot()
+{
+ status_t ret = NO_ERROR;
+ if (mStreamLiveSnap){
+ LOGE("%s:Deleting old Snapshot stream instance",__func__);
+ QCameraStream_Snapshot::deleteInstance (mStreamLiveSnap);
+ mStreamLiveSnap = NULL;
+ }
+ mStreamLiveSnap = QCameraStream_Snapshot::createInstance(mCameraId, myMode);
+
+ if (!mStreamLiveSnap) {
+ LOGE("%s: error - can't creat snapshot stream!", __func__);
+ /* mzhu: fix me, restore preview */
+ return BAD_VALUE;
+ }
+
+ /* Store HAL object in snapshot stream Object */
+ mStreamLiveSnap->setHALCameraControl(this);
+
+ mStreamLiveSnap->setFullSizeLiveshot(true);
+
+ /* Call snapshot init*/
+ ret = mStreamLiveSnap->init();
+ if (MM_CAMERA_OK != ret){
+ LOGE("%s: error - can't init Snapshot stream!", __func__);
+ return BAD_VALUE;
+ }
+
+ /* call Snapshot start() :*/
+ mStreamLiveSnap->resetSnapshotCounters( );
+ ret = mStreamLiveSnap->start();
+ if (MM_CAMERA_OK != ret){
+ /* mzhu: fix me, restore preview */
+ LOGE("%s: error - can't start Snapshot stream!", __func__);
+ return BAD_VALUE;
+ }
+ return ret;
+}
+
+status_t QCameraHardwareInterface::takeLiveSnapshot()
+{
+ status_t ret = NO_ERROR;
+ LOGI("takeLiveSnapshot: E");
+ mStreamRecord->takeLiveSnapshot();
+ LOGI("takeLiveSnapshot: X");
+ return ret;
+}
+
+status_t QCameraHardwareInterface::autoFocus()
+{
+ LOGI("autoFocus: E");
+ status_t ret = NO_ERROR;
+ Mutex::Autolock lock(mLock);
+ LOGI("autoFocus: Got lock");
+ bool status = true;
+ isp3a_af_mode_t afMode = getAutoFocusMode(mParameters);
+
+ if(mAutoFocusRunning==true){
+ LOGE("%s:AF already running should not have got this call",__func__);
+ return NO_ERROR;
+ }
+
+ if (afMode == AF_MODE_MAX) {
+ /* This should never happen. We cannot send a
+ * callback notifying error from this place because
+ * the CameraService has called this function after
+ * acquiring the lock. So if we try to issue a callback
+ * from this place, the callback will try to acquire
+ * the same lock in CameraService and it will result
+ * in deadlock. So, let the call go in to the lower
+ * layer. The lower layer will anyway return error if
+ * the autofocus is not supported or if the focus
+ * value is invalid.
+ * Just print out the error. */
+ LOGE("%s:Invalid AF mode (%d)", __func__, afMode);
+ }
+
+ LOGI("%s:AF start (mode %d)", __func__, afMode);
+ if(MM_CAMERA_OK != cam_ops_action(mCameraId, TRUE,
+ MM_CAMERA_OPS_FOCUS, &afMode)) {
+ LOGE("%s: AF command failed err:%d error %s",
+ __func__, errno, strerror(errno));
+ return UNKNOWN_ERROR;
+ }
+
+ mAutoFocusRunning = true;
+ LOGI("autoFocus: X");
+ return ret;
+}
+
+status_t QCameraHardwareInterface::cancelAutoFocus()
+{
+ LOGE("cancelAutoFocus: E");
+ status_t ret = NO_ERROR;
+ Mutex::Autolock lock(mLock);
+
+/**************************************************************
+ BEGIN MUTEX CODE
+*************************************************************/
+
+ mAutofocusLock.lock();
+ if(mAutoFocusRunning) {
+
+ mAutoFocusRunning = false;
+ mAutofocusLock.unlock();
+
+ }else/*(!mAutoFocusRunning)*/{
+
+ mAutofocusLock.unlock();
+ LOGE("%s:Af not running",__func__);
+ return NO_ERROR;
+ }
+/**************************************************************
+ END MUTEX CODE
+*************************************************************/
+
+
+ if(MM_CAMERA_OK!=cam_ops_action(mCameraId,FALSE,MM_CAMERA_OPS_FOCUS,NULL )) {
+ LOGE("%s: AF command failed err:%d error %s",__func__, errno,strerror(errno));
+ }
+
+ LOGE("cancelAutoFocus: X");
+ return NO_ERROR;
+}
+
+#if 0 //mzhu
+/*==========================================================================
+ * FUNCTION - prepareSnapshotAndWait -
+ *
+ * DESCRIPTION: invoke preparesnapshot and wait for it done
+ it can be called within takepicture, so no need
+ to grab mLock.
+ *=========================================================================*/
+void QCameraHardwareInterface::prepareSnapshotAndWait()
+{
+ LOGI("prepareSnapshotAndWait: E");
+ int rc = 0;
+ /*To Do: call mm camera preparesnapshot */
+ if(!rc ) {
+ mPreparingSnapshot = true;
+ pthread_mutex_lock(&mAsyncCmdMutex);
+ pthread_cond_wait(&mAsyncCmdWait, &mAsyncCmdMutex);
+ pthread_mutex_unlock(&mAsyncCmdMutex);
+ mPreparingSnapshot = false;
+ }
+ LOGI("prepareSnapshotAndWait: X");
+}
+#endif //mzhu
+
+/*==========================================================================
+ * FUNCTION - processprepareSnapshotEvent -
+ *
+ * DESCRIPTION: Process the event of preparesnapshot done msg
+ unblock prepareSnapshotAndWait( )
+ *=========================================================================*/
+void QCameraHardwareInterface::processprepareSnapshotEvent(cam_ctrl_status_t *status)
+{
+ LOGI("processprepareSnapshotEvent: E");
+ pthread_mutex_lock(&mAsyncCmdMutex);
+ pthread_cond_signal(&mAsyncCmdWait);
+ pthread_mutex_unlock(&mAsyncCmdMutex);
+ LOGI("processprepareSnapshotEvent: X");
+}
+
+void QCameraHardwareInterface::roiEvent(fd_roi_t roi,app_notify_cb_t *app_cb)
+{
+ LOGE("roiEvent: E");
+
+ if(mStreamDisplay) mStreamDisplay->notifyROIEvent(roi);
+#if 0 //TODO: move to preview obj
+ mCallbackLock.lock();
+ data_callback mcb = mDataCb;
+ void *mdata = mCallbackCookie;
+ int msgEnabled = mMsgEnabled;
+ mCallbackLock.unlock();
+
+ mMetaDataWaitLock.lock();
+ if (mFaceDetectOn == true && mSendMetaData == true) {
+ mSendMetaData = false;
+ int faces_detected = roi.rect_num;
+ int max_faces_detected = MAX_ROI * 4;
+ int array[max_faces_detected + 1];
+
+ array[0] = faces_detected * 4;
+ for (int i = 1, j = 0;j < MAX_ROI; j++, i = i + 4) {
+ if (j < faces_detected) {
+ array[i] = roi.faces[j].x;
+ array[i+1] = roi.faces[j].y;
+ array[i+2] = roi.faces[j].dx;
+ array[i+3] = roi.faces[j].dy;
+ } else {
+ array[i] = -1;
+ array[i+1] = -1;
+ array[i+2] = -1;
+ array[i+3] = -1;
+ }
+ }
+ if(mMetaDataHeap != NULL){
+ LOGV("mMetaDataHEap is non-NULL");
+ memcpy((uint32_t *)mMetaDataHeap->mHeap->base(), (uint32_t *)array, (sizeof(int)*(MAX_ROI*4+1)));
+ mMetaDataWaitLock.unlock();
+
+ if (mcb != NULL && (msgEnabled & CAMERA_MSG_META_DATA)) {
+ mcb(CAMERA_MSG_META_DATA, mMetaDataHeap->mBuffers[0], mdata);
+ }
+ } else {
+ mMetaDataWaitLock.unlock();
+ LOGE("runPreviewThread mMetaDataHeap is NULL");
+ }
+ } else {
+ mMetaDataWaitLock.unlock();
+ }
+#endif // mzhu
+ LOGE("roiEvent: X");
+}
+
+
+void QCameraHardwareInterface::handleZoomEventForSnapshot(void)
+{
+ mm_camera_ch_crop_t v4l2_crop;
+
+
+ LOGI("%s: E", __func__);
+
+ memset(&v4l2_crop,0,sizeof(v4l2_crop));
+ v4l2_crop.ch_type=MM_CAMERA_CH_SNAPSHOT;
+
+ LOGI("%s: Fetching crop info", __func__);
+ cam_config_get_parm(mCameraId,MM_CAMERA_PARM_CROP,&v4l2_crop);
+
+ LOGI("%s: Crop info received for main: %d, %d, %d, %d ", __func__,
+ v4l2_crop.snapshot.main_crop.left,
+ v4l2_crop.snapshot.main_crop.top,
+ v4l2_crop.snapshot.main_crop.width,
+ v4l2_crop.snapshot.main_crop.height);
+ LOGI("%s: Crop info received for thumbnail: %d, %d, %d, %d ",__func__,
+ v4l2_crop.snapshot.thumbnail_crop.left,
+ v4l2_crop.snapshot.thumbnail_crop.top,
+ v4l2_crop.snapshot.thumbnail_crop.width,
+ v4l2_crop.snapshot.thumbnail_crop.height);
+
+ if(mStreamSnap) {
+ LOGD("%s: Setting crop info for snapshot", __func__);
+ memcpy(&(mStreamSnap->mCrop), &v4l2_crop, sizeof(v4l2_crop));
+ }
+ if(mFullLiveshotEnabled && mStreamLiveSnap){
+ LOGD("%s: Setting crop info for snapshot", __func__);
+ memcpy(&(mStreamLiveSnap->mCrop), &v4l2_crop, sizeof(v4l2_crop));
+ }
+ LOGD("%s: X", __func__);
+}
+
+void QCameraHardwareInterface::handleZoomEventForPreview(app_notify_cb_t *app_cb)
+{
+ mm_camera_ch_crop_t v4l2_crop;
+
+ LOGI("%s: E", __func__);
+
+ /*regular zooming or smooth zoom stopped*/
+ if (!mSmoothZoomRunning && mPreviewWindow) {
+ memset(&v4l2_crop, 0, sizeof(v4l2_crop));
+ v4l2_crop.ch_type = MM_CAMERA_CH_PREVIEW;
+
+ LOGI("%s: Fetching crop info", __func__);
+ cam_config_get_parm(mCameraId,MM_CAMERA_PARM_CROP,&v4l2_crop);
+
+ LOGI("%s: Crop info received: %d, %d, %d, %d ", __func__,
+ v4l2_crop.crop.left,
+ v4l2_crop.crop.top,
+ v4l2_crop.crop.width,
+ v4l2_crop.crop.height);
+
+ mPreviewWindow->set_crop(mPreviewWindow,
+ v4l2_crop.crop.left,
+ v4l2_crop.crop.top,
+ v4l2_crop.crop.left + v4l2_crop.crop.width,
+ v4l2_crop.crop.top + v4l2_crop.crop.height);
+ LOGI("%s: Done setting crop", __func__);
+ LOGI("%s: Currrent zoom :%d",__func__, mCurrentZoom);
+ }
+
+ LOGI("%s: X", __func__);
+}
+
+void QCameraHardwareInterface::zoomEvent(cam_ctrl_status_t *status, app_notify_cb_t *app_cb)
+{
+ LOGI("zoomEvent: state:%d E",mPreviewState);
+ switch (mPreviewState) {
+ case QCAMERA_HAL_PREVIEW_STOPPED:
+ break;
+ case QCAMERA_HAL_PREVIEW_START:
+ break;
+ case QCAMERA_HAL_PREVIEW_STARTED:
+ handleZoomEventForPreview(app_cb);
+ if (isZSLMode())
+ handleZoomEventForSnapshot();
+ break;
+ case QCAMERA_HAL_RECORDING_STARTED:
+ handleZoomEventForPreview(app_cb);
+ if (mFullLiveshotEnabled)
+ handleZoomEventForSnapshot();
+ break;
+ case QCAMERA_HAL_TAKE_PICTURE:
+ if(isZSLMode())
+ handleZoomEventForPreview(app_cb);
+ handleZoomEventForSnapshot();
+ break;
+ default:
+ break;
+ }
+ LOGI("zoomEvent: X");
+}
+
+void QCameraHardwareInterface::dumpFrameToFile(const void * data, uint32_t size, char* name, char* ext, int index)
+{
+ char buf[32];
+ int file_fd;
+ if ( data != NULL) {
+ char * str;
+ snprintf(buf, sizeof(buf), "/data/%s_%d.%s", name, index, ext);
+ LOGE("marvin, %s size =%d", buf, size);
+ file_fd = open(buf, O_RDWR | O_CREAT, 0777);
+ write(file_fd, data, size);
+ close(file_fd);
+ }
+}
+
+void QCameraHardwareInterface::dumpFrameToFile(struct msm_frame* newFrame,
+ HAL_cam_dump_frm_type_t frm_type)
+{
+ int32_t enabled = 0;
+ int frm_num;
+ uint32_t skip_mode;
+ char value[PROPERTY_VALUE_MAX];
+ char buf[32];
+ int main_422 = 1;
+ property_get("persist.camera.dumpimg", value, "0");
+ enabled = atoi(value);
+
+ LOGV(" newFrame =%p, frm_type = %d", newFrame, frm_type);
+ if(enabled & HAL_DUMP_FRM_MASK_ALL) {
+ if((enabled & frm_type) && newFrame) {
+ frm_num = ((enabled & 0xffff0000) >> 16);
+ if(frm_num == 0) frm_num = 10; /*default 10 frames*/
+ if(frm_num > 256) frm_num = 256; /*256 buffers cycle around*/
+ skip_mode = ((enabled & 0x0000ff00) >> 8);
+ if(skip_mode == 0) skip_mode = 1; /*no -skip */
+
+ if( mDumpSkipCnt % skip_mode == 0) {
+ if (mDumpFrmCnt >= 0 && mDumpFrmCnt <= frm_num) {
+ int w, h;
+ int file_fd;
+ switch (frm_type) {
+ case HAL_DUMP_FRM_PREVIEW:
+ w = mDimension.display_width;
+ h = mDimension.display_height;
+ snprintf(buf, sizeof(buf), "/data/%dp_%dx%d.yuv", mDumpFrmCnt, w, h);
+ file_fd = open(buf, O_RDWR | O_CREAT, 0777);
+ break;
+ case HAL_DUMP_FRM_VIDEO:
+ w = mDimension.video_width;
+ h = mDimension.video_height;
+ snprintf(buf, sizeof(buf),"/data/%dv_%dx%d.yuv", mDumpFrmCnt, w, h);
+ file_fd = open(buf, O_RDWR | O_CREAT, 0777);
+ break;
+ case HAL_DUMP_FRM_MAIN:
+ w = mDimension.picture_width;
+ h = mDimension.picture_height;
+ snprintf(buf, sizeof(buf), "/data/%dm_%dx%d.yuv", mDumpFrmCnt, w, h);
+ file_fd = open(buf, O_RDWR | O_CREAT, 0777);
+ if (mDimension.main_img_format == CAMERA_YUV_422_NV16 ||
+ mDimension.main_img_format == CAMERA_YUV_422_NV61)
+ main_422 = 2;
+ break;
+ case HAL_DUMP_FRM_THUMBNAIL:
+ w = mDimension.ui_thumbnail_width;
+ h = mDimension.ui_thumbnail_height;
+ snprintf(buf, sizeof(buf),"/data/%dt_%dx%d.yuv", mDumpFrmCnt, w, h);
+ file_fd = open(buf, O_RDWR | O_CREAT, 0777);
+ break;
+ default:
+ w = h = 0;
+ file_fd = -1;
+ break;
+ }
+
+ if (file_fd < 0) {
+ LOGE("%s: cannot open file:type=%d\n", __func__, frm_type);
+ } else {
+ LOGE("%s: %d %d", __func__, newFrame->y_off, newFrame->cbcr_off);
+ write(file_fd, (const void *)(newFrame->buffer+newFrame->y_off), w * h);
+ write(file_fd, (const void *)
+ (newFrame->buffer + newFrame->cbcr_off), w * h / 2 * main_422);
+ close(file_fd);
+ LOGE("dump %s", buf);
+ }
+ } else if(frm_num == 256){
+ mDumpFrmCnt = 0;
+ }
+ mDumpFrmCnt++;
+ }
+ mDumpSkipCnt++;
+ }
+ } else {
+ mDumpFrmCnt = 0;
+ }
+}
+
+status_t QCameraHardwareInterface::setPreviewWindow(preview_stream_ops_t* window)
+{
+ status_t retVal = NO_ERROR;
+ LOGE(" %s: E mPreviewState = %d, mStreamDisplay = %p", __FUNCTION__, mPreviewState, mStreamDisplay);
+ if( window == NULL) {
+ LOGE("%s:Received Setting NULL preview window", __func__);
+ }
+ Mutex::Autolock lock(mLock);
+ switch(mPreviewState) {
+ case QCAMERA_HAL_PREVIEW_START:
+ mPreviewWindow = window;
+ if(mPreviewWindow) {
+ /* we have valid surface now, start preview */
+ LOGE("%s: calling startPreview2", __func__);
+ retVal = startPreview2();
+ if(retVal == NO_ERROR)
+ mPreviewState = QCAMERA_HAL_PREVIEW_STARTED;
+ LOGE("%s: startPreview2 done, mPreviewState = %d", __func__, mPreviewState);
+ } else
+ LOGE("%s: null window received, mPreviewState = %d", __func__, mPreviewState);
+ break;
+ case QCAMERA_HAL_PREVIEW_STARTED:
+ /* new window comes */
+ LOGE("%s: bug, cannot handle new window in started state", __func__);
+ //retVal = UNKNOWN_ERROR;
+ break;
+ case QCAMERA_HAL_PREVIEW_STOPPED:
+ case QCAMERA_HAL_TAKE_PICTURE:
+ mPreviewWindow = window;
+ LOGE("%s: mPreviewWindow = 0x%p, mStreamDisplay = 0x%p",
+ __func__, mPreviewWindow, mStreamDisplay);
+ if(mStreamDisplay)
+ retVal = mStreamDisplay->setPreviewWindow(window);
+ break;
+ default:
+ LOGE("%s: bug, cannot handle new window in state %d", __func__, mPreviewState);
+ retVal = UNKNOWN_ERROR;
+ break;
+ }
+ LOGE(" %s : X, mPreviewState = %d", __FUNCTION__, mPreviewState);
+ return retVal;
+}
+
+int QCameraHardwareInterface::storeMetaDataInBuffers(int enable)
+{
+ /* this is a dummy func now. fix me later */
+ mStoreMetaDataInFrame = enable;
+ return 0;
+}
+
+status_t QCameraHardwareInterface::sendMappingBuf(int ext_mode, int idx, int fd,
+ uint32_t size, int cameraid,
+ mm_camera_socket_msg_type msg_type)
+{
+ cam_sock_packet_t packet;
+ memset(&packet, 0, sizeof(cam_sock_packet_t));
+ packet.msg_type = msg_type;
+ packet.payload.frame_fd_map.ext_mode = ext_mode;
+ packet.payload.frame_fd_map.frame_idx = idx;
+ packet.payload.frame_fd_map.fd = fd;
+ packet.payload.frame_fd_map.size = size;
+
+ if ( cam_ops_sendmsg(cameraid, &packet, sizeof(cam_sock_packet_t), packet.payload.frame_fd_map.fd) <= 0 ) {
+ LOGE("%s: sending frame mapping buf msg Failed", __func__);
+ return FAILED_TRANSACTION;
+ }
+ return NO_ERROR;
+}
+
+status_t QCameraHardwareInterface::sendUnMappingBuf(int ext_mode, int idx, int cameraid,
+ mm_camera_socket_msg_type msg_type)
+{
+ cam_sock_packet_t packet;
+ memset(&packet, 0, sizeof(cam_sock_packet_t));
+ packet.msg_type = msg_type;
+ packet.payload.frame_fd_unmap.ext_mode = ext_mode;
+ packet.payload.frame_fd_unmap.frame_idx = idx;
+ if ( cam_ops_sendmsg(cameraid, &packet, sizeof(cam_sock_packet_t), 0) <= 0 ) {
+ LOGE("%s: sending frame unmapping buf msg Failed", __func__);
+ return FAILED_TRANSACTION;
+ }
+ return NO_ERROR;
+}
+
+int QCameraHardwareInterface::allocate_ion_memory(QCameraHalHeap_t *p_camera_memory, int cnt, int ion_type)
+{
+ int rc = 0;
+ struct ion_handle_data handle_data;
+
+ p_camera_memory->main_ion_fd[cnt] = open("/dev/ion", O_RDONLY);
+ if (p_camera_memory->main_ion_fd[cnt] < 0) {
+ LOGE("Ion dev open failed\n");
+ LOGE("Error is %s\n", strerror(errno));
+ goto ION_OPEN_FAILED;
+ }
+ p_camera_memory->alloc[cnt].len = p_camera_memory->size;
+ /* to make it page size aligned */
+ p_camera_memory->alloc[cnt].len = (p_camera_memory->alloc[cnt].len + 4095) & (~4095);
+ p_camera_memory->alloc[cnt].align = 4096;
+ p_camera_memory->alloc[cnt].flags = ION_FLAG_CACHED;
+ p_camera_memory->alloc[cnt].heap_mask = ion_type;
+
+
+ rc = ioctl(p_camera_memory->main_ion_fd[cnt], ION_IOC_ALLOC, &p_camera_memory->alloc[cnt]);
+ if (rc < 0) {
+ LOGE("ION allocation failed\n");
+ goto ION_ALLOC_FAILED;
+ }
+
+ p_camera_memory->ion_info_fd[cnt].handle = p_camera_memory->alloc[cnt].handle;
+ rc = ioctl(p_camera_memory->main_ion_fd[cnt], ION_IOC_SHARE, &p_camera_memory->ion_info_fd[cnt]);
+ if (rc < 0) {
+ LOGE("ION map failed %s\n", strerror(errno));
+ goto ION_MAP_FAILED;
+ }
+ p_camera_memory->fd[cnt] = p_camera_memory->ion_info_fd[cnt].fd;
+ return 0;
+
+ION_MAP_FAILED:
+ handle_data.handle = p_camera_memory->ion_info_fd[cnt].handle;
+ ioctl(p_camera_memory->main_ion_fd[cnt], ION_IOC_FREE, &handle_data);
+ION_ALLOC_FAILED:
+ close(p_camera_memory->main_ion_fd[cnt]);
+ p_camera_memory->main_ion_fd[cnt] = -1;
+ION_OPEN_FAILED:
+ return -1;
+}
+
+int QCameraHardwareInterface::deallocate_ion_memory(QCameraHalHeap_t *p_camera_memory, int cnt)
+{
+ struct ion_handle_data handle_data;
+ int rc = 0;
+
+ if (p_camera_memory->main_ion_fd[cnt] > 0) {
+ handle_data.handle = p_camera_memory->ion_info_fd[cnt].handle;
+ ioctl(p_camera_memory->main_ion_fd[cnt], ION_IOC_FREE, &handle_data);
+ close(p_camera_memory->main_ion_fd[cnt]);
+ p_camera_memory->main_ion_fd[cnt] = -1;
+ }
+ return rc;
+}
+
+int QCameraHardwareInterface::allocate_ion_memory(QCameraStatHeap_t *p_camera_memory, int cnt, int ion_type)
+{
+ int rc = 0;
+ struct ion_handle_data handle_data;
+
+ p_camera_memory->main_ion_fd[cnt] = open("/dev/ion", O_RDONLY);
+ if (p_camera_memory->main_ion_fd[cnt] < 0) {
+ LOGE("Ion dev open failed\n");
+ LOGE("Error is %s\n", strerror(errno));
+ goto ION_OPEN_FAILED;
+ }
+ p_camera_memory->alloc[cnt].len = p_camera_memory->size;
+ /* to make it page size aligned */
+ p_camera_memory->alloc[cnt].len = (p_camera_memory->alloc[cnt].len + 4095) & (~4095);
+ p_camera_memory->alloc[cnt].align = 4096;
+ p_camera_memory->alloc[cnt].flags = ION_FLAG_CACHED;
+ p_camera_memory->alloc[cnt].heap_mask = (0x1 << ion_type | 0x1 << ION_IOMMU_HEAP_ID);
+
+ rc = ioctl(p_camera_memory->main_ion_fd[cnt], ION_IOC_ALLOC, &p_camera_memory->alloc[cnt]);
+ if (rc < 0) {
+ LOGE("ION allocation failed\n");
+ goto ION_ALLOC_FAILED;
+ }
+
+ p_camera_memory->ion_info_fd[cnt].handle = p_camera_memory->alloc[cnt].handle;
+ rc = ioctl(p_camera_memory->main_ion_fd[cnt], ION_IOC_SHARE, &p_camera_memory->ion_info_fd[cnt]);
+ if (rc < 0) {
+ LOGE("ION map failed %s\n", strerror(errno));
+ goto ION_MAP_FAILED;
+ }
+ p_camera_memory->fd[cnt] = p_camera_memory->ion_info_fd[cnt].fd;
+ return 0;
+
+ION_MAP_FAILED:
+ handle_data.handle = p_camera_memory->ion_info_fd[cnt].handle;
+ ioctl(p_camera_memory->main_ion_fd[cnt], ION_IOC_FREE, &handle_data);
+ION_ALLOC_FAILED:
+ close(p_camera_memory->main_ion_fd[cnt]);
+ p_camera_memory->main_ion_fd[cnt] = -1;
+ION_OPEN_FAILED:
+ return -1;
+}
+
+int QCameraHardwareInterface::cache_ops(int ion_fd,
+ struct ion_flush_data *cache_data, int type)
+{
+ int rc = 0;
+ struct ion_custom_data data;
+ data.cmd = type;
+ data.arg = (unsigned long)cache_data;
+ rc = ioctl(ion_fd, ION_IOC_CUSTOM, &data);
+ if (rc < 0)
+ LOGE("%s: Cache Invalidate failed\n", __func__);
+ else
+ LOGV("%s: Cache OPs type(%d) success", __func__);
+
+ return rc;
+}
+
+int QCameraHardwareInterface::deallocate_ion_memory(QCameraStatHeap_t *p_camera_memory, int cnt)
+{
+ struct ion_handle_data handle_data;
+ int rc = 0;
+
+ if (p_camera_memory->main_ion_fd[cnt] > 0) {
+ handle_data.handle = p_camera_memory->ion_info_fd[cnt].handle;
+ ioctl(p_camera_memory->main_ion_fd[cnt], ION_IOC_FREE, &handle_data);
+ close(p_camera_memory->main_ion_fd[cnt]);
+ p_camera_memory->main_ion_fd[cnt] = -1;
+ }
+ return rc;
+}
+
+int QCameraHardwareInterface::initHeapMem( QCameraHalHeap_t *heap,
+ int num_of_buf,
+ int buf_len,
+ int y_off,
+ int cbcr_off,
+ int pmem_type,
+ mm_cameara_stream_buf_t *StreamBuf,
+ mm_camera_buf_def_t *buf_def,
+ uint8_t num_planes,
+ uint32_t *planes
+)
+{
+ int rc = 0;
+ int i;
+ int path;
+ struct msm_frame *frame;
+ LOGE("Init Heap =%p. stream_buf =%p, pmem_type =%d, num_of_buf=%d. buf_len=%d, cbcr_off=%d",
+ heap, StreamBuf, pmem_type, num_of_buf, buf_len, cbcr_off);
+ if(num_of_buf > MM_CAMERA_MAX_NUM_FRAMES || heap == NULL ||
+ mGetMemory == NULL ) {
+ LOGE("Init Heap error");
+ rc = -1;
+ return rc;
+ }
+ memset(heap, 0, sizeof(QCameraHalHeap_t));
+ for (i=0; i<MM_CAMERA_MAX_NUM_FRAMES;i++) {
+ heap->main_ion_fd[i] = -1;
+ heap->fd[i] = -1;
+ }
+ heap->buffer_count = num_of_buf;
+ heap->size = buf_len;
+ heap->y_offset = y_off;
+ heap->cbcr_offset = cbcr_off;
+
+ if (StreamBuf != NULL) {
+ StreamBuf->num = num_of_buf;
+ StreamBuf->frame_len = buf_len;
+ switch (pmem_type) {
+ case MSM_PMEM_MAINIMG:
+ case MSM_PMEM_RAW_MAINIMG:
+ path = OUTPUT_TYPE_S;
+ break;
+
+ case MSM_PMEM_THUMBNAIL:
+ path = OUTPUT_TYPE_T;
+ break;
+
+ default:
+ rc = -1;
+ return rc;
+ }
+ }
+
+
+ for(i = 0; i < num_of_buf; i++) {
+#ifdef USE_ION
+ if (isZSLMode())
+ rc = allocate_ion_memory(heap, i, ((0x1 << CAMERA_ZSL_ION_HEAP_ID) |
+ (0x1 << CAMERA_ZSL_ION_FALLBACK_HEAP_ID)));
+ else
+ rc = allocate_ion_memory(heap, i, ((0x1 << CAMERA_ION_HEAP_ID) |
+ (0x1 << CAMERA_ION_FALLBACK_HEAP_ID)));
+
+ if (rc < 0) {
+ LOGE("%sION allocation failed\n", __func__);
+ break;
+ }
+#else
+ if (pmem_type == MSM_PMEM_MAX)
+ heap->fd[i] = -1;
+ else {
+ heap->fd[i] = open("/dev/pmem_adsp", O_RDWR|O_SYNC);
+ if ( heap->fd[i] <= 0) {
+ rc = -1;
+ LOGE("Open fail: heap->fd[%d] =%d", i, heap->fd[i]);
+ break;
+ }
+ }
+#endif
+ heap->camera_memory[i] = mGetMemory( heap->fd[i], buf_len, 1, (void *)this);
+
+ if (heap->camera_memory[i] == NULL ) {
+ LOGE("Getmem fail %d: ", i);
+ rc = -1;
+ break;
+ }
+ if (StreamBuf != NULL) {
+ frame = &(StreamBuf->frame[i]);
+ memset(frame, 0, sizeof(struct msm_frame));
+ frame->fd = heap->fd[i];
+ frame->phy_offset = 0;
+ frame->buffer = (uint32_t) heap->camera_memory[i]->data;
+ frame->path = path;
+ frame->cbcr_off = planes[0]+heap->cbcr_offset;
+ frame->y_off = heap->y_offset;
+ frame->fd_data = heap->ion_info_fd[i];
+ frame->ion_alloc = heap->alloc[i];
+ frame->ion_dev_fd = heap->main_ion_fd[i];
+ LOGD("%s: Buffer idx: %d addr: %x fd: %d phy_offset: %d"
+ "cbcr_off: %d y_off: %d frame_len: %d", __func__,
+ i, (unsigned int)frame->buffer, frame->fd,
+ frame->phy_offset, cbcr_off, y_off, frame->ion_alloc.len);
+
+ buf_def->buf.mp[i].frame = *frame;
+ buf_def->buf.mp[i].frame_offset = 0;
+ buf_def->buf.mp[i].num_planes = num_planes;
+ /* Plane 0 needs to be set seperately. Set other planes
+ * in a loop. */
+ buf_def->buf.mp[i].planes[0].length = planes[0];
+ buf_def->buf.mp[i].planes[0].m.userptr = frame->fd;
+ buf_def->buf.mp[i].planes[0].data_offset = y_off;
+ buf_def->buf.mp[i].planes[0].reserved[0] =
+ buf_def->buf.mp[i].frame_offset;
+ for (int j = 1; j < num_planes; j++) {
+ buf_def->buf.mp[i].planes[j].length = planes[j];
+ buf_def->buf.mp[i].planes[j].m.userptr = frame->fd;
+ buf_def->buf.mp[i].planes[j].data_offset = cbcr_off;
+ buf_def->buf.mp[i].planes[j].reserved[0] =
+ buf_def->buf.mp[i].planes[j-1].reserved[0] +
+ buf_def->buf.mp[i].planes[j-1].length;
+ }
+ } else {
+ }
+
+ LOGE("heap->fd[%d] =%d, camera_memory=%p", i, heap->fd[i], heap->camera_memory[i]);
+ heap->local_flag[i] = 1;
+ }
+ if( rc < 0) {
+ releaseHeapMem(heap);
+ }
+ return rc;
+
+}
+
+int QCameraHardwareInterface::releaseHeapMem( QCameraHalHeap_t *heap)
+{
+ int rc = 0;
+ LOGE("Release %p", heap);
+ if (heap != NULL) {
+
+ for (int i = 0; i < heap->buffer_count; i++) {
+ if(heap->camera_memory[i] != NULL) {
+ heap->camera_memory[i]->release( heap->camera_memory[i] );
+ heap->camera_memory[i] = NULL;
+ } else if (heap->fd[i] <= 0) {
+ LOGE("impossible: amera_memory[%d] = %p, fd = %d",
+ i, heap->camera_memory[i], heap->fd[i]);
+ }
+
+ if(heap->fd[i] > 0) {
+ close(heap->fd[i]);
+ heap->fd[i] = -1;
+ }
+#ifdef USE_ION
+ deallocate_ion_memory(heap, i);
+#endif
+ }
+ heap->buffer_count = 0;
+ heap->size = 0;
+ heap->y_offset = 0;
+ heap->cbcr_offset = 0;
+ }
+ return rc;
+}
+
+preview_format_info_t QCameraHardwareInterface::getPreviewFormatInfo( )
+{
+ return mPreviewFormatInfo;
+}
+
+void QCameraHardwareInterface::wdenoiseEvent(cam_ctrl_status_t status, void *cookie)
+{
+ LOGI("wdnEvent: preview state:%d E",mPreviewState);
+ if (mStreamSnap != NULL) {
+ LOGI("notifyWDNEvent to snapshot stream");
+ mStreamSnap->notifyWDenoiseEvent(status, cookie);
+ }
+}
+
+bool QCameraHardwareInterface::isWDenoiseEnabled()
+{
+ return mDenoiseValue;
+}
+
+void QCameraHardwareInterface::takePicturePrepareHardware()
+{
+ LOGV("%s: E", __func__);
+
+ /* Prepare snapshot*/
+ cam_ops_action(mCameraId,
+ TRUE,
+ MM_CAMERA_OPS_PREPARE_SNAPSHOT,
+ this);
+ LOGV("%s: X", __func__);
+}
+
+bool QCameraHardwareInterface::isNoDisplayMode()
+{
+ return (mNoDisplayMode != 0);
+}
+
+void QCameraHardwareInterface::pausePreviewForZSL()
+{
+ if(mRestartPreview) {
+ stopPreviewInternal();
+ mPreviewState = QCAMERA_HAL_PREVIEW_STOPPED;
+ startPreview2();
+ mPreviewState = QCAMERA_HAL_PREVIEW_STARTED;
+ mRestartPreview = false;
+ }
+}
+}; // namespace android
+
diff --git a/camera/QCamera/HAL/core/src/QCameraHWI_Mem.cpp b/camera/QCamera/HAL/core/src/QCameraHWI_Mem.cpp
new file mode 100644
index 0000000..0cb1100
--- /dev/null
+++ b/camera/QCamera/HAL/core/src/QCameraHWI_Mem.cpp
@@ -0,0 +1,402 @@
+/*
+** Copyright (c) 2011-2012 The Linux Foundation. All rights reserved.
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+** http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+/*#error uncomment this for compiler test!*/
+
+//#define LOG_NDEBUG 0
+#define LOG_NIDEBUG 0
+#define LOG_TAG "QCameraHWI_Mem"
+#include <utils/Log.h>
+
+#include <utils/Errors.h>
+#include <utils/threads.h>
+#include <binder/MemoryHeapPmem.h>
+#include <utils/String16.h>
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <unistd.h>
+#include <fcntl.h>
+#include <cutils/properties.h>
+#include <math.h>
+#if HAVE_ANDROID_OS
+#include <linux/android_pmem.h>
+#endif
+#include <linux/ioctl.h>
+#include <camera/CameraParameters.h>
+#include <media/mediarecorder.h>
+#include <gralloc_priv.h>
+
+#include "QCameraHWI_Mem.h"
+
+#define CAMERA_HAL_UNUSED(expr) do { (void)(expr); } while (0)
+
+/* QCameraHardwareInterface class implementation goes here*/
+/* following code implement the contol logic of this class*/
+
+namespace android {
+
+
+static bool register_buf(int size,
+ int frame_size,
+ int cbcr_offset,
+ int yoffset,
+ int pmempreviewfd,
+ uint32_t offset,
+ uint8_t *buf,
+ int pmem_type,
+ bool vfe_can_write,
+ bool register_buffer = true);
+
+#if 0
+MMCameraDL::MMCameraDL(){
+ LOGV("MMCameraDL: E");
+ libmmcamera = NULL;
+#if DLOPEN_LIBMMCAMERA
+ libmmcamera = ::dlopen("liboemcamera.so", RTLD_NOW);
+#endif
+ LOGV("Open MM camera DL libeomcamera loaded at %p ", libmmcamera);
+ LOGV("MMCameraDL: X");
+}
+
+void * MMCameraDL::pointer(){
+ return libmmcamera;
+}
+
+MMCameraDL::~MMCameraDL(){
+ LOGV("~MMCameraDL: E");
+ LINK_mm_camera_destroy();
+ if (libmmcamera != NULL) {
+ ::dlclose(libmmcamera);
+ LOGV("closed MM Camera DL ");
+ }
+ libmmcamera = NULL;
+ LOGV("~MMCameraDL: X");
+}
+
+
+wp<MMCameraDL> MMCameraDL::instance;
+Mutex MMCameraDL::singletonLock;
+
+
+sp<MMCameraDL> MMCameraDL::getInstance(){
+ Mutex::Autolock instanceLock(singletonLock);
+ sp<MMCameraDL> mmCamera = instance.promote();
+ if(mmCamera == NULL){
+ mmCamera = new MMCameraDL();
+ instance = mmCamera;
+ }
+ return mmCamera;
+}
+#endif
+
+MemPool::MemPool(int buffer_size, int num_buffers,
+ int frame_size,
+ const char *name) :
+ mBufferSize(buffer_size),
+ mNumBuffers(num_buffers),
+ mFrameSize(frame_size),
+ mBuffers(NULL), mName(name)
+{
+ int page_size_minus_1 = getpagesize() - 1;
+ mAlignedBufferSize = (buffer_size + page_size_minus_1) & (~page_size_minus_1);
+}
+
+void MemPool::completeInitialization()
+{
+ // If we do not know how big the frame will be, we wait to allocate
+ // the buffers describing the individual frames until we do know their
+ // size.
+
+ if (mFrameSize > 0) {
+ mBuffers = new sp<MemoryBase>[mNumBuffers];
+ for (int i = 0; i < mNumBuffers; i++) {
+ mBuffers[i] = new
+ MemoryBase(mHeap,
+ i * mAlignedBufferSize,
+ mFrameSize);
+ }
+ }
+}
+
+AshmemPool::AshmemPool(int buffer_size, int num_buffers,
+ int frame_size,
+ const char *name) :
+ MemPool(buffer_size,
+ num_buffers,
+ frame_size,
+ name)
+{
+ LOGV("constructing MemPool %s backed by ashmem: "
+ "%d frames @ %d uint8_ts, "
+ "buffer size %d",
+ mName,
+ num_buffers, frame_size, buffer_size);
+
+ int page_mask = getpagesize() - 1;
+ int ashmem_size = buffer_size * num_buffers;
+ ashmem_size += page_mask;
+ ashmem_size &= ~page_mask;
+
+ mHeap = new MemoryHeapBase(ashmem_size);
+
+ completeInitialization();
+}
+
+static bool register_buf(int size,
+ int frame_size,
+ int cbcr_offset,
+ int yoffset,
+ int pmempreviewfd,
+ uint32_t offset,
+ uint8_t *buf,
+ int pmem_type,
+ bool vfe_can_write,
+ bool register_buffer)
+{
+ struct msm_pmem_info pmemBuf;
+ CAMERA_HAL_UNUSED(frame_size);
+
+ pmemBuf.type = pmem_type;
+ pmemBuf.fd = pmempreviewfd;
+ pmemBuf.offset = offset;
+ pmemBuf.len = size;
+ pmemBuf.vaddr = buf;
+ pmemBuf.y_off = yoffset;
+ pmemBuf.cbcr_off = cbcr_offset;
+
+ pmemBuf.active = vfe_can_write;
+
+ LOGV("register_buf: reg = %d buffer = %p",
+ !register_buffer, buf);
+ /*TODO*/
+ /*if(native_start_ops(register_buffer ? CAMERA_OPS_REGISTER_BUFFER :
+ CAMERA_OPS_UNREGISTER_BUFFER ,(void *)&pmemBuf) < 0) {
+ LOGE("register_buf: MSM_CAM_IOCTL_(UN)REGISTER_PMEM error %s",
+ strerror(errno));
+ return false;
+ }*/
+
+ return true;
+
+}
+
+#if 0
+bool register_record_buffers(bool register_buffer) {
+ LOGI("%s: (%d) E", __FUNCTION__, register_buffer);
+ struct msm_pmem_info pmemBuf;
+
+ for (int cnt = 0; cnt < VIDEO_BUFFER_COUNT; ++cnt) {
+ pmemBuf.type = MSM_PMEM_VIDEO;
+ pmemBuf.fd = mRecordHeap->mHeap->getHeapID();
+ pmemBuf.offset = mRecordHeap->mAlignedBufferSize * cnt;
+ pmemBuf.len = mRecordHeap->mBufferSize;
+ pmemBuf.vaddr = (uint8_t *)mRecordHeap->mHeap->base() + mRecordHeap->mAlignedBufferSize * cnt;
+ pmemBuf.y_off = 0;
+ pmemBuf.cbcr_off = recordframes[0].cbcr_off;
+ if(register_buffer == true) {
+ pmemBuf.active = (cnt<ACTIVE_VIDEO_BUFFERS);
+ if( (mVpeEnabled) && (cnt == kRecordBufferCount-1)) {
+ pmemBuf.type = MSM_PMEM_VIDEO_VPE;
+ pmemBuf.active = 1;
+ }
+ } else {
+ pmemBuf.active = false;
+ }
+
+ LOGV("register_buf: reg = %d buffer = %p", !register_buffer,
+ (void *)pmemBuf.vaddr);
+ if(native_start_ops(register_buffer ? CAMERA_OPS_REGISTER_BUFFER :
+ CAMERA_OPS_UNREGISTER_BUFFER ,(void *)&pmemBuf) < 0) {
+ LOGE("register_buf: MSM_CAM_IOCTL_(UN)REGISTER_PMEM error %s",
+ strerror(errno));
+ return false;
+ }
+ }
+ return true;
+}
+#endif
+PmemPool::PmemPool(const char *pmem_pool,
+ int flags,
+ int pmem_type,
+ int buffer_size, int num_buffers,
+ int frame_size, int cbcr_offset,
+ int yOffset, const char *name) :
+ MemPool(buffer_size,num_buffers,frame_size,name),
+ mPmemType(pmem_type),
+ mCbCrOffset(cbcr_offset),
+ myOffset(yOffset)
+{
+ LOGI("constructing MemPool %s backed by pmem pool %s: "
+ "%d frames @ %d bytes, buffer size %d",
+ mName,
+ pmem_pool, num_buffers, frame_size,
+ buffer_size);
+
+ //mMMCameraDLRef = MMCameraDL::getInstance();
+
+
+ // Make a new mmap'ed heap that can be shared across processes.
+ // mAlignedBufferSize is already in 4k aligned. (do we need total size necessary to be in power of 2??)
+ mAlignedSize = mAlignedBufferSize * num_buffers;
+
+ sp<MemoryHeapBase> masterHeap =
+ new MemoryHeapBase(pmem_pool, mAlignedSize, flags);
+
+ if (masterHeap->getHeapID() < 0) {
+ LOGE("failed to construct master heap for pmem pool %s", pmem_pool);
+ masterHeap.clear();
+ return;
+ }
+
+ sp<MemoryHeapPmem> pmemHeap = new MemoryHeapPmem(masterHeap, flags);
+ if (pmemHeap->getHeapID() >= 0) {
+ pmemHeap->slap();
+ masterHeap.clear();
+ mHeap = pmemHeap;
+ pmemHeap.clear();
+
+ mFd = mHeap->getHeapID();
+ if (::ioctl(mFd, PMEM_GET_SIZE, &mSize)) {
+ LOGE("pmem pool %s ioctl(PMEM_GET_SIZE) error %s (%d)",
+ pmem_pool,
+ ::strerror(errno), errno);
+ mHeap.clear();
+ return;
+ }
+
+ LOGE("pmem pool %s ioctl(fd = %d, PMEM_GET_SIZE) is %ld",
+ pmem_pool,
+ mFd,
+ mSize.len);
+ LOGE("mBufferSize=%d, mAlignedBufferSize=%d\n", mBufferSize, mAlignedBufferSize);
+
+#if 0
+ // Unregister preview buffers with the camera drivers. Allow the VFE to write
+ // to all preview buffers except for the last one.
+ // Only Register the preview, snapshot and thumbnail buffers with the kernel.
+ if( (strcmp("postview", mName) != 0) ){
+ int num_buf = num_buffers;
+ if(!strcmp("preview", mName)) num_buf = kPreviewBufferCount;
+ LOGD("num_buffers = %d", num_buf);
+ for (int cnt = 0; cnt < num_buf; ++cnt) {
+ int active = 1;
+ if(pmem_type == MSM_PMEM_VIDEO){
+ active = (cnt<ACTIVE_VIDEO_BUFFERS);
+ //When VPE is enabled, set the last record
+ //buffer as active and pmem type as PMEM_VIDEO_VPE
+ //as this is a requirement from VPE operation.
+ //No need to set this pmem type to VIDEO_VPE while unregistering,
+ //because as per camera stack design: "the VPE AXI is also configured
+ //when VFE is configured for VIDEO, which is as part of preview
+ //initialization/start. So during this VPE AXI config camera stack
+ //will lookup the PMEM_VIDEO_VPE buffer and give it as o/p of VPE and
+ //change it's type to PMEM_VIDEO".
+ if( (mVpeEnabled) && (cnt == kRecordBufferCount-1)) {
+ active = 1;
+ pmem_type = MSM_PMEM_VIDEO_VPE;
+ }
+ LOGV(" pmempool creating video buffers : active %d ", active);
+ }
+ else if (pmem_type == MSM_PMEM_PREVIEW){
+ active = (cnt < ACTIVE_PREVIEW_BUFFERS);
+ }
+ else if ((pmem_type == MSM_PMEM_MAINIMG)
+ || (pmem_type == MSM_PMEM_THUMBNAIL)){
+ active = (cnt < ACTIVE_ZSL_BUFFERS);
+ }
+ register_buf(mBufferSize,
+ mFrameSize, mCbCrOffset, myOffset,
+ mHeap->getHeapID(),
+ mAlignedBufferSize * cnt,
+ (uint8_t *)mHeap->base() + mAlignedBufferSize * cnt,
+ pmem_type,
+ active);
+ }
+ }
+#endif
+ completeInitialization();
+ }
+ else LOGE("pmem pool %s error: could not create master heap!",
+ pmem_pool);
+ LOGI("%s: (%s) X ", __FUNCTION__, mName);
+}
+
+PmemPool::~PmemPool()
+{
+ LOGI("%s: %s E", __FUNCTION__, mName);
+#if 0
+ if (mHeap != NULL) {
+ // Unregister preview buffers with the camera drivers.
+ // Only Unregister the preview, snapshot and thumbnail
+ // buffers with the kernel.
+ if( (strcmp("postview", mName) != 0) ){
+ int num_buffers = mNumBuffers;
+ if(!strcmp("preview", mName)) num_buffers = PREVIEW_BUFFER_COUNT;
+ for (int cnt = 0; cnt < num_buffers; ++cnt) {
+ register_buf(mBufferSize,
+ mFrameSize,
+ mCbCrOffset,
+ myOffset,
+ mHeap->getHeapID(),
+ mAlignedBufferSize * cnt,
+ (uint8_t *)mHeap->base() + mAlignedBufferSize * cnt,
+ mPmemType,
+ false,
+ false /* unregister */);
+ }
+ }
+ }
+ mMMCameraDLRef.clear();
+#endif
+ LOGI("%s: %s X", __FUNCTION__, mName);
+}
+MemPool::~MemPool()
+{
+ LOGV("destroying MemPool %s", mName);
+ if (mFrameSize > 0)
+ delete [] mBuffers;
+ mHeap.clear();
+ LOGV("destroying MemPool %s completed", mName);
+}
+
+
+status_t MemPool::dump(int fd, const Vector<String16>& args) const
+{
+ const size_t SIZE = 256;
+ char buffer[SIZE];
+ String8 result;
+ CAMERA_HAL_UNUSED(args);
+ snprintf(buffer, 255, "QualcommCameraHardware::AshmemPool::dump\n");
+ result.append(buffer);
+ if (mName) {
+ snprintf(buffer, 255, "mem pool name (%s)\n", mName);
+ result.append(buffer);
+ }
+ if (mHeap != 0) {
+ snprintf(buffer, 255, "heap base(%p), size(%d), flags(%d), device(%s)\n",
+ mHeap->getBase(), mHeap->getSize(),
+ mHeap->getFlags(), mHeap->getDevice());
+ result.append(buffer);
+ }
+ snprintf(buffer, 255,
+ "buffer size (%d), number of buffers (%d), frame size(%d)",
+ mBufferSize, mNumBuffers, mFrameSize);
+ result.append(buffer);
+ write(fd, result.string(), result.size());
+ return NO_ERROR;
+}
+
+};
diff --git a/camera/QCamera/HAL/core/src/QCameraHWI_Parm.cpp b/camera/QCamera/HAL/core/src/QCameraHWI_Parm.cpp
new file mode 100644
index 0000000..b4381c1
--- /dev/null
+++ b/camera/QCamera/HAL/core/src/QCameraHWI_Parm.cpp
@@ -0,0 +1,4071 @@
+/*
+** Copyright (c) 2011-2012 The Linux Foundation. All rights reserved.
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+** http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+//#define LOG_NDEBUG 0
+#define LOG_NIDEBUG 0
+#define LOG_TAG "QCameraHWI_Parm"
+#include <utils/Log.h>
+
+#include <utils/Errors.h>
+#include <utils/threads.h>
+#include <binder/MemoryHeapPmem.h>
+#include <utils/String16.h>
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <unistd.h>
+#include <fcntl.h>
+#include <cutils/properties.h>
+#include <math.h>
+#if HAVE_ANDROID_OS
+#include <linux/android_pmem.h>
+#endif
+#include <linux/ioctl.h>
+#include <camera/CameraParameters.h>
+#include <media/mediarecorder.h>
+#include <gralloc_priv.h>
+
+#include "linux/msm_mdp.h"
+#include <linux/fb.h>
+#include <limits>
+
+
+extern "C" {
+#include <fcntl.h>
+#include <time.h>
+#include <pthread.h>
+#include <stdio.h>
+#include <string.h>
+#include <unistd.h>
+#include <termios.h>
+#include <assert.h>
+#include <stdlib.h>
+#include <ctype.h>
+#include <signal.h>
+#include <errno.h>
+#include <sys/mman.h>
+#include <sys/system_properties.h>
+#include <sys/time.h>
+#include <stdlib.h>
+#include <linux/msm_ion.h>
+#include <cam_fifo.h>
+#include <jpege.h>
+
+} // extern "C"
+
+#include "QCameraHWI.h"
+
+/* QCameraHardwareInterface class implementation goes here*/
+/* following code implements the parameter logic of this class*/
+#define EXPOSURE_COMPENSATION_MAXIMUM_NUMERATOR 12
+#define EXPOSURE_COMPENSATION_MINIMUM_NUMERATOR -12
+#define EXPOSURE_COMPENSATION_DEFAULT_NUMERATOR 0
+#define EXPOSURE_COMPENSATION_DENOMINATOR 6
+#define EXPOSURE_COMPENSATION_STEP ((float (1))/EXPOSURE_COMPENSATION_DENOMINATOR)
+#define DEFAULT_CAMERA_AREA "(0, 0, 0, 0, 0)"
+
+#define HDR_HAL_FRAME 2
+
+#define BURST_INTREVAL_MIN 1
+#define BURST_INTREVAL_MAX 10
+#define BURST_INTREVAL_DEFAULT 1
+
+//Default FPS
+#define MINIMUM_FPS 5
+#define MAXIMUM_FPS 120
+#define DEFAULT_FIXED_FPS 30
+#define DEFAULT_FPS MAXIMUM_FPS
+
+//Default Picture Width
+#define DEFAULT_PICTURE_WIDTH 640
+#define DEFAULT_PICTURE_HEIGHT 480
+
+//Default Video Width
+#define DEFAULT_VIDEO_WIDTH 1920
+#define DEFAULT_VIDEO_HEIGHT 1088
+
+#define THUMBNAIL_SIZE_COUNT (sizeof(thumbnail_sizes)/sizeof(thumbnail_size_type))
+#define DEFAULT_THUMBNAIL_SETTING 4
+#define THUMBNAIL_WIDTH_STR "512"
+#define THUMBNAIL_HEIGHT_STR "384"
+#define THUMBNAIL_SMALL_HEIGHT 144
+
+#define DONT_CARE_COORDINATE -1
+
+//for histogram stats
+#define HISTOGRAM_STATS_SIZE 257
+
+//Supported preview fps ranges should be added to this array in the form (minFps,maxFps)
+static android::FPSRange FpsRangesSupported[] = {
+ android::FPSRange(MINIMUM_FPS*1000,MAXIMUM_FPS*1000)
+ };
+#define FPS_RANGES_SUPPORTED_COUNT (sizeof(FpsRangesSupported)/sizeof(FpsRangesSupported[0]))
+
+
+typedef struct {
+ uint32_t aspect_ratio;
+ uint32_t width;
+ uint32_t height;
+} thumbnail_size_type;
+
+static thumbnail_size_type thumbnail_sizes[] = {
+{ 7281, 512, 288 }, //1.777778
+{ 6826, 480, 288 }, //1.666667
+{ 6808, 256, 154 }, //1.66233
+{ 6144, 432, 288 }, //1.5
+{ 5461, 512, 384 }, //1.333333
+{ 5006, 352, 288 }, //1.222222
+{ 5461, 320, 240 }, //1.33333
+{ 5006, 176, 144 }, //1.222222
+
+};
+
+static struct camera_size_type zsl_picture_sizes[] = {
+ { 1024, 768}, // 1MP XGA
+ { 800, 600}, //SVGA
+ { 800, 480}, // WVGA
+ { 640, 480}, // VGA
+ { 352, 288}, //CIF
+ { 320, 240}, // QVGA
+ { 176, 144} // QCIF
+};
+
+static camera_size_type default_picture_sizes[] = {
+ { 4000, 3000}, // 12MP
+ { 3200, 2400}, // 8MP
+ { 2592, 1944}, // 5MP
+ { 2048, 1536}, // 3MP QXGA
+ { 1920, 1088}, //HD1080
+ { 1600, 1200}, // 2MP UXGA
+ { 1280, 768}, //WXGA
+ { 1280, 720}, //HD720
+ { 1024, 768}, // 1MP XGA
+ { 800, 600}, //SVGA
+ { 800, 480}, // WVGA
+ { 640, 480}, // VGA
+ { 352, 288}, //CIF
+ { 320, 240}, // QVGA
+ { 176, 144} // QCIF
+};
+
+static int iso_speed_values[] = {
+ 0, 1, 100, 200, 400, 800, 1600
+};
+
+extern int HAL_numOfCameras;
+extern qcamera_info_t HAL_cameraInfo[MSM_MAX_CAMERA_SENSORS];
+extern mm_camera_t * HAL_camerahandle[MSM_MAX_CAMERA_SENSORS];
+
+namespace android {
+
+static uint32_t HFR_SIZE_COUNT=2;
+static const int PICTURE_FORMAT_JPEG = 1;
+static const int PICTURE_FORMAT_RAW = 2;
+
+/********************************************************************/
+static const str_map effects[] = {
+ { CameraParameters::EFFECT_NONE, CAMERA_EFFECT_OFF },
+ { CameraParameters::EFFECT_MONO, CAMERA_EFFECT_MONO },
+ { CameraParameters::EFFECT_NEGATIVE, CAMERA_EFFECT_NEGATIVE },
+ { CameraParameters::EFFECT_SOLARIZE, CAMERA_EFFECT_SOLARIZE },
+ { CameraParameters::EFFECT_SEPIA, CAMERA_EFFECT_SEPIA },
+ { CameraParameters::EFFECT_POSTERIZE, CAMERA_EFFECT_POSTERIZE },
+ { CameraParameters::EFFECT_WHITEBOARD, CAMERA_EFFECT_WHITEBOARD },
+ { CameraParameters::EFFECT_BLACKBOARD, CAMERA_EFFECT_BLACKBOARD },
+ { CameraParameters::EFFECT_AQUA, CAMERA_EFFECT_AQUA },
+ { CameraParameters::EFFECT_EMBOSS, CAMERA_EFFECT_EMBOSS },
+ { CameraParameters::EFFECT_SKETCH, CAMERA_EFFECT_SKETCH },
+ { CameraParameters::EFFECT_NEON, CAMERA_EFFECT_NEON }
+};
+
+static const str_map iso[] = {
+ { CameraParameters::ISO_AUTO, CAMERA_ISO_AUTO},
+ { CameraParameters::ISO_HJR, CAMERA_ISO_DEBLUR},
+ { CameraParameters::ISO_100, CAMERA_ISO_100},
+ { CameraParameters::ISO_200, CAMERA_ISO_200},
+ { CameraParameters::ISO_400, CAMERA_ISO_400},
+ { CameraParameters::ISO_800, CAMERA_ISO_800 },
+ { CameraParameters::ISO_1600, CAMERA_ISO_1600 }
+};
+
+static const str_map scenemode[] = {
+ { CameraParameters::SCENE_MODE_AUTO, CAMERA_BESTSHOT_OFF },
+ { CameraParameters::SCENE_MODE_ASD, CAMERA_BESTSHOT_AUTO },
+ { CameraParameters::SCENE_MODE_ACTION, CAMERA_BESTSHOT_ACTION },
+ { CameraParameters::SCENE_MODE_PORTRAIT, CAMERA_BESTSHOT_PORTRAIT },
+ { CameraParameters::SCENE_MODE_LANDSCAPE, CAMERA_BESTSHOT_LANDSCAPE },
+ { CameraParameters::SCENE_MODE_NIGHT, CAMERA_BESTSHOT_NIGHT },
+ { CameraParameters::SCENE_MODE_NIGHT_PORTRAIT, CAMERA_BESTSHOT_NIGHT_PORTRAIT },
+ { CameraParameters::SCENE_MODE_THEATRE, CAMERA_BESTSHOT_THEATRE },
+ { CameraParameters::SCENE_MODE_BEACH, CAMERA_BESTSHOT_BEACH },
+ { CameraParameters::SCENE_MODE_SNOW, CAMERA_BESTSHOT_SNOW },
+ { CameraParameters::SCENE_MODE_SUNSET, CAMERA_BESTSHOT_SUNSET },
+ { CameraParameters::SCENE_MODE_STEADYPHOTO, CAMERA_BESTSHOT_ANTISHAKE },
+ { CameraParameters::SCENE_MODE_FIREWORKS , CAMERA_BESTSHOT_FIREWORKS },
+ { CameraParameters::SCENE_MODE_SPORTS , CAMERA_BESTSHOT_SPORTS },
+ { CameraParameters::SCENE_MODE_PARTY, CAMERA_BESTSHOT_PARTY },
+ { CameraParameters::SCENE_MODE_CANDLELIGHT, CAMERA_BESTSHOT_CANDLELIGHT },
+ { CameraParameters::SCENE_MODE_BACKLIGHT, CAMERA_BESTSHOT_BACKLIGHT },
+ { CameraParameters::SCENE_MODE_FLOWERS, CAMERA_BESTSHOT_FLOWERS },
+ { CameraParameters::SCENE_MODE_AR, CAMERA_BESTSHOT_AR },
+};
+
+static const str_map scenedetect[] = {
+ { CameraParameters::SCENE_DETECT_OFF, FALSE },
+ { CameraParameters::SCENE_DETECT_ON, TRUE },
+};
+
+#define DONT_CARE AF_MODE_MAX
+static const str_map focus_modes[] = {
+ { CameraParameters::FOCUS_MODE_AUTO, AF_MODE_AUTO},
+ { CameraParameters::FOCUS_MODE_INFINITY, AF_MODE_INFINITY },
+ { CameraParameters::FOCUS_MODE_NORMAL, AF_MODE_NORMAL },
+ { CameraParameters::FOCUS_MODE_MACRO, AF_MODE_MACRO },
+ { CameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE, AF_MODE_CAF},
+ { CameraParameters::FOCUS_MODE_CONTINUOUS_VIDEO, AF_MODE_CAF }
+};
+
+static const str_map selectable_zone_af[] = {
+ { CameraParameters::SELECTABLE_ZONE_AF_AUTO, AUTO },
+ { CameraParameters::SELECTABLE_ZONE_AF_SPOT_METERING, SPOT },
+ { CameraParameters::SELECTABLE_ZONE_AF_CENTER_WEIGHTED, CENTER_WEIGHTED },
+ { CameraParameters::SELECTABLE_ZONE_AF_FRAME_AVERAGE, AVERAGE }
+};
+
+static const str_map autoexposure[] = {
+ { CameraParameters::AUTO_EXPOSURE_FRAME_AVG, CAMERA_AEC_FRAME_AVERAGE },
+ { CameraParameters::AUTO_EXPOSURE_CENTER_WEIGHTED, CAMERA_AEC_CENTER_WEIGHTED },
+ { CameraParameters::AUTO_EXPOSURE_SPOT_METERING, CAMERA_AEC_SPOT_METERING }
+};
+
+// from aeecamera.h
+static const str_map whitebalance[] = {
+ { CameraParameters::WHITE_BALANCE_AUTO, CAMERA_WB_AUTO },
+ { CameraParameters::WHITE_BALANCE_INCANDESCENT, CAMERA_WB_INCANDESCENT },
+ { CameraParameters::WHITE_BALANCE_FLUORESCENT, CAMERA_WB_FLUORESCENT },
+ { CameraParameters::WHITE_BALANCE_DAYLIGHT, CAMERA_WB_DAYLIGHT },
+ { CameraParameters::WHITE_BALANCE_CLOUDY_DAYLIGHT, CAMERA_WB_CLOUDY_DAYLIGHT }
+};
+
+static const str_map antibanding[] = {
+ { CameraParameters::ANTIBANDING_OFF, CAMERA_ANTIBANDING_OFF },
+ { CameraParameters::ANTIBANDING_50HZ, CAMERA_ANTIBANDING_50HZ },
+ { CameraParameters::ANTIBANDING_60HZ, CAMERA_ANTIBANDING_60HZ },
+ { CameraParameters::ANTIBANDING_AUTO, CAMERA_ANTIBANDING_AUTO }
+};
+
+static const str_map frame_rate_modes[] = {
+ {CameraParameters::KEY_PREVIEW_FRAME_RATE_AUTO_MODE, FPS_MODE_AUTO},
+ {CameraParameters::KEY_PREVIEW_FRAME_RATE_FIXED_MODE, FPS_MODE_FIXED}
+};
+
+static const str_map touchafaec[] = {
+ { CameraParameters::TOUCH_AF_AEC_OFF, FALSE },
+ { CameraParameters::TOUCH_AF_AEC_ON, TRUE }
+};
+
+static const str_map hfr[] = {
+ { CameraParameters::VIDEO_HFR_OFF, CAMERA_HFR_MODE_OFF },
+ { CameraParameters::VIDEO_HFR_2X, CAMERA_HFR_MODE_60FPS },
+ { CameraParameters::VIDEO_HFR_3X, CAMERA_HFR_MODE_90FPS },
+ { CameraParameters::VIDEO_HFR_4X, CAMERA_HFR_MODE_120FPS },
+};
+static const int HFR_VALUES_COUNT = (sizeof(hfr)/sizeof(str_map));
+
+static const str_map flash[] = {
+ { CameraParameters::FLASH_MODE_OFF, LED_MODE_OFF },
+ { CameraParameters::FLASH_MODE_AUTO, LED_MODE_AUTO },
+ { CameraParameters::FLASH_MODE_ON, LED_MODE_ON },
+ { CameraParameters::FLASH_MODE_TORCH, LED_MODE_TORCH}
+};
+
+static const str_map lensshade[] = {
+ { CameraParameters::LENSSHADE_ENABLE, TRUE },
+ { CameraParameters::LENSSHADE_DISABLE, FALSE }
+};
+
+static const str_map mce[] = {
+ { CameraParameters::MCE_ENABLE, TRUE },
+ { CameraParameters::MCE_DISABLE, FALSE }
+};
+
+static const str_map histogram[] = {
+ { CameraParameters::HISTOGRAM_ENABLE, TRUE },
+ { CameraParameters::HISTOGRAM_DISABLE, FALSE }
+};
+
+static const str_map skinToneEnhancement[] = {
+ { CameraParameters::SKIN_TONE_ENHANCEMENT_ENABLE, TRUE },
+ { CameraParameters::SKIN_TONE_ENHANCEMENT_DISABLE, FALSE }
+};
+
+static const str_map denoise[] = {
+ { CameraParameters::DENOISE_OFF, FALSE },
+ { CameraParameters::DENOISE_ON, TRUE }
+};
+
+static const str_map facedetection[] = {
+ { CameraParameters::FACE_DETECTION_OFF, FALSE },
+ { CameraParameters::FACE_DETECTION_ON, TRUE }
+};
+
+static const str_map redeye_reduction[] = {
+ { CameraParameters::REDEYE_REDUCTION_ENABLE, TRUE },
+ { CameraParameters::REDEYE_REDUCTION_DISABLE, FALSE }
+};
+
+static const str_map picture_formats[] = {
+ {CameraParameters::PIXEL_FORMAT_JPEG, PICTURE_FORMAT_JPEG},
+ {CameraParameters::PIXEL_FORMAT_RAW, PICTURE_FORMAT_RAW}
+};
+
+static const str_map recording_Hints[] = {
+ {"false", FALSE},
+ {"true", TRUE}
+};
+
+static const str_map preview_formats[] = {
+ {CameraParameters::PIXEL_FORMAT_YUV420SP, HAL_PIXEL_FORMAT_YCrCb_420_SP},
+ {CameraParameters::PIXEL_FORMAT_YUV420SP_ADRENO, HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO},
+ {CameraParameters::PIXEL_FORMAT_YV12, HAL_PIXEL_FORMAT_YV12},
+ {CameraParameters::PIXEL_FORMAT_YUV420P,HAL_PIXEL_FORMAT_YV12},
+ {CameraParameters::PIXEL_FORMAT_NV12, HAL_PIXEL_FORMAT_YCbCr_420_SP}
+};
+
+static const preview_format_info_t preview_format_info_list[] = {
+ {HAL_PIXEL_FORMAT_YCrCb_420_SP, CAMERA_YUV_420_NV21, CAMERA_PAD_TO_WORD, 2},
+ {HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO, CAMERA_YUV_420_NV21, CAMERA_PAD_TO_4K, 2},
+ {HAL_PIXEL_FORMAT_YCbCr_420_SP, CAMERA_YUV_420_NV12, CAMERA_PAD_TO_WORD, 2},
+ {HAL_PIXEL_FORMAT_YV12, CAMERA_YUV_420_YV12, CAMERA_PAD_TO_WORD, 3}
+};
+
+static const str_map zsl_modes[] = {
+ { CameraParameters::ZSL_OFF, FALSE },
+ { CameraParameters::ZSL_ON, TRUE },
+};
+
+
+static const str_map hdr_bracket[] = {
+ { CameraParameters::AE_BRACKET_HDR_OFF,HDR_BRACKETING_OFF},
+ { CameraParameters::AE_BRACKET_HDR,HDR_MODE },
+ { CameraParameters::AE_BRACKET,EXP_BRACKETING_MODE }
+};
+
+typedef enum {
+ NORMAL_POWER,
+ LOW_POWER
+} power_mode;
+
+static const str_map power_modes[] = {
+ { CameraParameters::NORMAL_POWER,NORMAL_POWER },
+ { CameraParameters::LOW_POWER,LOW_POWER }
+};
+
+/**************************************************************************/
+static int attr_lookup(const str_map arr[], int len, const char *name)
+{
+ if (name) {
+ for (int i = 0; i < len; i++) {
+ if (!strcmp(arr[i].desc, name))
+ return arr[i].val;
+ }
+ }
+ return NOT_FOUND;
+}
+
+bool QCameraHardwareInterface::native_set_parms(
+ mm_camera_parm_type_t type, uint16_t length, void *value)
+{
+ LOGE("%s : type : %d Value : %d",__func__,type,*((int *)value));
+ if(MM_CAMERA_OK != cam_config_set_parm(mCameraId, type,value )) {
+ LOGE("native_set_parms failed: type %d length %d error %s",
+ type, length, strerror(errno));
+ return false;
+ }
+
+ return true;
+
+}
+
+bool QCameraHardwareInterface::native_set_parms(
+ mm_camera_parm_type_t type, uint16_t length, void *value, int *result)
+{
+ *result= cam_config_set_parm(mCameraId, type,value );
+ if(MM_CAMERA_OK == *result) {
+ LOGE("native_set_parms: succeeded : %d", *result);
+ return true;
+ }
+
+ LOGE("native_set_parms failed: type %d length %d error str %s error# %d",
+ type, length, strerror(errno), errno);
+ return false;
+}
+
+//Filter Picture sizes based on max width and height
+/* TBD: do we still need this - except for ZSL? */
+void QCameraHardwareInterface::filterPictureSizes(){
+ unsigned int i;
+ if(mPictureSizeCount <= 0)
+ return;
+ maxSnapshotWidth = mPictureSizes[0].width;
+ maxSnapshotHeight = mPictureSizes[0].height;
+ // Iterate through all the width and height to find the max value
+ for(i =0; i<mPictureSizeCount;i++){
+ if(((maxSnapshotWidth < mPictureSizes[i].width) &&
+ (maxSnapshotHeight <= mPictureSizes[i].height))){
+ maxSnapshotWidth = mPictureSizes[i].width;
+ maxSnapshotHeight = mPictureSizes[i].height;
+ }
+ }
+ if(myMode & CAMERA_ZSL_MODE){
+ // due to lack of PMEM we restrict to lower resolution
+ mPictureSizesPtr = zsl_picture_sizes;
+ mSupportedPictureSizesCount = 7;
+ }else{
+ mPictureSizesPtr = mPictureSizes;
+ mSupportedPictureSizesCount = mPictureSizeCount;
+ }
+}
+
+static String8 create_sizes_str(const camera_size_type *sizes, int len) {
+ String8 str;
+ char buffer[32];
+
+ if (len > 0) {
+ snprintf(buffer, sizeof(buffer), "%dx%d", sizes[0].width, sizes[0].height);
+ str.append(buffer);
+ }
+ for (int i = 1; i < len; i++) {
+ snprintf(buffer, sizeof(buffer), ",%dx%d", sizes[i].width, sizes[i].height);
+ str.append(buffer);
+ }
+ return str;
+}
+
+String8 QCameraHardwareInterface::create_values_str(const str_map *values, int len) {
+ String8 str;
+
+ if (len > 0) {
+ str.append(values[0].desc);
+ }
+ for (int i = 1; i < len; i++) {
+ str.append(",");
+ str.append(values[i].desc);
+ }
+ return str;
+}
+
+static String8 create_fps_str(const android:: FPSRange* fps, int len) {
+ String8 str;
+ char buffer[32];
+
+ if (len > 0) {
+ snprintf(buffer, sizeof(buffer), "(%d,%d)", fps[0].minFPS, fps[0].maxFPS);
+ str.append(buffer);
+ }
+ for (int i = 1; i < len; i++) {
+ snprintf(buffer, sizeof(buffer), ",(%d,%d)", fps[i].minFPS, fps[i].maxFPS);
+ str.append(buffer);
+ }
+ return str;
+}
+
+static String8 create_values_range_str(int min, int max){
+ String8 str;
+ char buffer[32];
+
+ if(min <= max){
+ snprintf(buffer, sizeof(buffer), "%d", min);
+ str.append(buffer);
+
+ for (int i = min + 1; i <= max; i++) {
+ snprintf(buffer, sizeof(buffer), ",%d", i);
+ str.append(buffer);
+ }
+ }
+ return str;
+}
+
+static int parse_size(const char *str, int &width, int &height)
+{
+ // Find the width.
+ char *end;
+ int w = (int)strtol(str, &end, 10);
+ // If an 'x' or 'X' does not immediately follow, give up.
+ if ( (*end != 'x') && (*end != 'X') )
+ return -1;
+
+ // Find the height, immediately after the 'x'.
+ int h = (int)strtol(end+1, 0, 10);
+
+ width = w;
+ height = h;
+
+ return 0;
+}
+
+bool QCameraHardwareInterface::isValidDimension(int width, int height) {
+ bool retVal = FALSE;
+ /* This function checks if a given resolution is valid or not.
+ * A particular resolution is considered valid if it satisfies
+ * the following conditions:
+ * 1. width & height should be multiple of 16.
+ * 2. width & height should be less than/equal to the dimensions
+ * supported by the camera sensor.
+ * 3. the aspect ratio is a valid aspect ratio and is among the
+ * commonly used aspect ratio as determined by the thumbnail_sizes
+ * data structure.
+ */
+
+ if( (width == CEILING16(width)) && (height == CEILING16(height))
+ && (width <= maxSnapshotWidth)
+ && (height <= maxSnapshotHeight) )
+ {
+ uint32_t pictureAspectRatio = (uint32_t)((width * Q12)/height);
+ for(uint32_t i = 0; i < THUMBNAIL_SIZE_COUNT; i++ ) {
+ if(thumbnail_sizes[i].aspect_ratio == pictureAspectRatio) {
+ retVal = TRUE;
+ break;
+ }
+ }
+ }
+ return retVal;
+}
+
+void QCameraHardwareInterface::hasAutoFocusSupport(){
+
+ LOGV("%s",__func__);
+
+ if(isZSLMode()){
+ mHasAutoFocusSupport = false;
+ return;
+ }
+
+ if(cam_ops_is_op_supported (mCameraId, MM_CAMERA_OPS_FOCUS )) {
+ mHasAutoFocusSupport = true;
+ }
+ else {
+ LOGE("AutoFocus is not supported");
+ mHasAutoFocusSupport = false;
+ }
+
+ LOGV("%s:rc= %d",__func__, mHasAutoFocusSupport);
+
+}
+
+bool QCameraHardwareInterface::supportsSceneDetection() {
+ bool rc = cam_config_is_parm_supported(mCameraId,MM_CAMERA_PARM_ASD_ENABLE);
+ return rc;
+}
+
+bool QCameraHardwareInterface::supportsFaceDetection() {
+ bool rc = cam_config_is_parm_supported(mCameraId,MM_CAMERA_PARM_FD);
+ return rc;
+}
+
+bool QCameraHardwareInterface::supportsSelectableZoneAf() {
+ bool rc = cam_config_is_parm_supported(mCameraId,MM_CAMERA_PARM_FOCUS_RECT);
+ return rc;
+}
+
+bool QCameraHardwareInterface::supportsRedEyeReduction() {
+ bool rc = cam_config_is_parm_supported(mCameraId,MM_CAMERA_PARM_REDEYE_REDUCTION);
+ return rc;
+}
+
+static String8 create_str(int16_t *arr, int length){
+ String8 str;
+ char buffer[32] = {0};
+
+ if(length > 0){
+ snprintf(buffer, sizeof(buffer), "%d", arr[0]);
+ str.append(buffer);
+ }
+
+ for (int i =1;i<length;i++){
+ snprintf(buffer, sizeof(buffer), ",%d",arr[i]);
+ str.append(buffer);
+ }
+ return str;
+}
+
+bool QCameraHardwareInterface::getMaxPictureDimension(mm_camera_dimension_t *maxDim)
+{
+ bool ret = NO_ERROR;
+ mm_camera_dimension_t dim;
+
+ ret = cam_config_get_parm(mCameraId,
+ MM_CAMERA_PARM_MAX_PICTURE_SIZE, &dim);
+ if (ret != NO_ERROR)
+ return ret;
+
+ /* Find the first dimension in the mPictureSizes
+ * array which is smaller than the max dimension.
+ * This will be the valid max picture resolution */
+ for (unsigned int i = 0; i < mPictureSizeCount; i++) {
+ if ((mPictureSizes[i].width <= dim.width) &&
+ (mPictureSizes[i].height <= dim.height)) {
+ maxDim->height = mPictureSizes[i].height;
+ maxDim->width = mPictureSizes[i].width;
+ break;
+ }
+ }
+ LOGD("%s: Found Max Picture dimension: %d x %d", __func__,
+ maxDim->width, maxDim->height);
+ return ret;
+}
+void QCameraHardwareInterface::loadTables()
+{
+
+ bool ret = NO_ERROR;
+ LOGE("%s: E", __func__);
+
+ ret = cam_config_get_parm(mCameraId,
+ MM_CAMERA_PARM_PREVIEW_SIZES_CNT, &preview_sizes_count);
+
+ default_sizes_tbl_t preview_sizes_tbl;
+ preview_sizes_tbl.tbl_size=preview_sizes_count;
+ preview_sizes_tbl.sizes_tbl=&default_preview_sizes[0];
+ if(MM_CAMERA_OK != cam_config_get_parm(mCameraId,
+ MM_CAMERA_PARM_DEF_PREVIEW_SIZES, &preview_sizes_tbl)){
+ LOGE("%s:Failed to get default preview sizes",__func__);
+ }
+ ret = cam_config_get_parm(mCameraId,
+ MM_CAMERA_PARM_VIDEO_SIZES_CNT, &video_sizes_count);
+
+ default_sizes_tbl_t video_sizes_tbl;
+ video_sizes_tbl.tbl_size=video_sizes_count;
+ video_sizes_tbl.sizes_tbl=&default_video_sizes[0];
+ if(MM_CAMERA_OK != cam_config_get_parm(mCameraId,
+ MM_CAMERA_PARM_DEF_VIDEO_SIZES, &video_sizes_tbl)){
+ LOGE("%s:Failed to get default video sizes",__func__);
+ }
+
+ ret = cam_config_get_parm(mCameraId,
+ MM_CAMERA_PARM_THUMB_SIZES_CNT, &thumbnail_sizes_count);
+
+ default_sizes_tbl_t thumbnail_sizes_tbl;
+ thumbnail_sizes_tbl.tbl_size=thumbnail_sizes_count;
+ thumbnail_sizes_tbl.sizes_tbl=&default_thumbnail_sizes[0];
+ if(MM_CAMERA_OK != cam_config_get_parm(mCameraId,
+ MM_CAMERA_PARM_DEF_THUMB_SIZES, &thumbnail_sizes_tbl)){
+ LOGE("%s:Failed to get default thumbnail sizes",__func__);
+ }
+
+ ret = cam_config_get_parm(mCameraId,
+ MM_CAMERA_PARM_HFR_SIZES_CNT, &hfr_sizes_count);
+
+ default_sizes_tbl_t hfr_sizes_tbl;
+ hfr_sizes_tbl.tbl_size=hfr_sizes_count;
+ hfr_sizes_tbl.sizes_tbl=&default_hfr_sizes[0];
+ if(MM_CAMERA_OK != cam_config_get_parm(mCameraId,
+ MM_CAMERA_PARM_DEF_HFR_SIZES, &hfr_sizes_tbl)){
+ LOGE("%s:Failed to get default HFR sizes",__func__);
+ }
+ LOGE("%s: X", __func__);
+}
+void QCameraHardwareInterface::initDefaultParameters()
+{
+ bool ret;
+ char prop[PROPERTY_VALUE_MAX];
+ mm_camera_dimension_t maxDim;
+ int rc = MM_CAMERA_OK;
+ LOGI("%s: E", __func__);
+
+ memset(&maxDim, 0, sizeof(mm_camera_dimension_t));
+ ret = getMaxPictureDimension(&maxDim);
+
+ if (ret != NO_ERROR) {
+ LOGE("%s: Cannot get Max picture size supported", __func__);
+ return;
+ }
+ if (!maxDim.width || !maxDim.height) {
+ maxDim.width = DEFAULT_LIVESHOT_WIDTH;
+ maxDim.height = DEFAULT_LIVESHOT_HEIGHT;
+ }
+
+ memset(prop, 0, sizeof(prop));
+ property_get("persist.camera.snap.format", prop, "0");
+ mSnapshotFormat = atoi(prop);
+ LOGV("%s: prop =(%s), snap_format=%d", __func__, prop, mSnapshotFormat);
+
+ //cam_ctrl_dimension_t dim;
+ mHFRLevel = 0;
+ memset(&mDimension, 0, sizeof(cam_ctrl_dimension_t));
+ memset(&mPreviewFormatInfo, 0, sizeof(preview_format_info_t));
+ mDimension.video_width = DEFAULT_VIDEO_WIDTH;
+ mDimension.video_height = DEFAULT_VIDEO_HEIGHT;
+ // mzhu mDimension.picture_width = DEFAULT_STREAM_WIDTH;
+ // mzhu mDimension.picture_height = DEFAULT_STREAM_HEIGHT;
+ mDimension.picture_width = maxDim.width;
+ mDimension.picture_height = maxDim.height;
+ mDimension.display_width = DEFAULT_STREAM_WIDTH;
+ mDimension.display_height = DEFAULT_STREAM_HEIGHT;
+ mDimension.orig_picture_dx = mDimension.picture_width;
+ mDimension.orig_picture_dy = mDimension.picture_height;
+ mDimension.ui_thumbnail_width = DEFAULT_STREAM_WIDTH;
+ mDimension.ui_thumbnail_height = DEFAULT_STREAM_HEIGHT;
+ mDimension.orig_video_width = DEFAULT_STREAM_WIDTH;
+ mDimension.orig_video_height = DEFAULT_STREAM_HEIGHT;
+
+ mDimension.prev_format = CAMERA_YUV_420_NV21;
+ mDimension.enc_format = CAMERA_YUV_420_NV12;
+ if (mSnapshotFormat == 1) {
+ mDimension.main_img_format = CAMERA_YUV_422_NV61;
+ } else {
+ mDimension.main_img_format = CAMERA_YUV_420_NV21;
+ }
+ mDimension.thumb_format = CAMERA_YUV_420_NV21;
+ LOGV("%s: main_img_format =%d, thumb_format=%d", __func__,
+ mDimension.main_img_format, mDimension.thumb_format);
+ mDimension.prev_padding_format = CAMERA_PAD_TO_WORD;
+
+ ret = native_set_parms(MM_CAMERA_PARM_DIMENSION,
+ sizeof(cam_ctrl_dimension_t), (void *) &mDimension);
+ if(!ret) {
+ LOGE("MM_CAMERA_PARM_DIMENSION Failed.");
+ return;
+ }
+
+ hasAutoFocusSupport();
+
+ // Initialize constant parameter strings. This will happen only once in the
+ // lifetime of the mediaserver process.
+ if (true/*!mParamStringInitialized*/) {
+ //filter picture sizes
+ filterPictureSizes();
+ mPictureSizeValues = create_sizes_str(
+ mPictureSizesPtr, mSupportedPictureSizesCount);
+ mPreviewSizeValues = create_sizes_str(
+ mPreviewSizes, mPreviewSizeCount);
+ mVideoSizeValues = create_sizes_str(
+ mVideoSizes, mVideoSizeCount);
+
+ //Query for max HFR value
+ camera_hfr_mode_t maxHFR;
+ cam_config_get_parm(mCameraId, MM_CAMERA_PARM_MAX_HFR_MODE, (void *)&maxHFR);
+ //Filter HFR values and build parameter string
+ String8 str;
+ for(int i=0; i<HFR_VALUES_COUNT; i++){
+ if(hfr[i].val <= maxHFR){
+ if(i>0) str.append(",");
+ str.append(hfr[i].desc);
+ }
+ }
+ mHfrValues = str;
+ mHfrSizeValues = create_sizes_str(
+ default_hfr_sizes, hfr_sizes_count);
+ mFpsRangesSupportedValues = create_fps_str(
+ FpsRangesSupported,FPS_RANGES_SUPPORTED_COUNT );
+ mParameters.set(
+ CameraParameters::KEY_SUPPORTED_PREVIEW_FPS_RANGE,
+ mFpsRangesSupportedValues);
+ mParameters.setPreviewFpsRange(MINIMUM_FPS*1000,MAXIMUM_FPS*1000);
+ mFlashValues = create_values_str(
+ flash, sizeof(flash) / sizeof(str_map));
+ mLensShadeValues = create_values_str(
+ lensshade,sizeof(lensshade)/sizeof(str_map));
+ mMceValues = create_values_str(
+ mce,sizeof(mce)/sizeof(str_map));
+ mEffectValues = create_values_str(effects, sizeof(effects) / sizeof(str_map));
+ mAntibandingValues = create_values_str(
+ antibanding, sizeof(antibanding) / sizeof(str_map));
+ mIsoValues = create_values_str(iso,sizeof(iso)/sizeof(str_map));
+ mAutoExposureValues = create_values_str(
+ autoexposure, sizeof(autoexposure) / sizeof(str_map));
+ mWhitebalanceValues = create_values_str(
+ whitebalance, sizeof(whitebalance) / sizeof(str_map));
+
+ if(mHasAutoFocusSupport){
+ mFocusModeValues = create_values_str(
+ focus_modes, sizeof(focus_modes) / sizeof(str_map));
+ }
+
+ mSceneModeValues = create_values_str(scenemode, sizeof(scenemode) / sizeof(str_map));
+
+ if(mHasAutoFocusSupport){
+ mTouchAfAecValues = create_values_str(
+ touchafaec,sizeof(touchafaec)/sizeof(str_map));
+ }
+ //Currently Enabling Histogram for 8x60
+ mHistogramValues = create_values_str(
+ histogram,sizeof(histogram)/sizeof(str_map));
+
+ mSkinToneEnhancementValues = create_values_str(
+ skinToneEnhancement,sizeof(skinToneEnhancement)/sizeof(str_map));
+
+ mPictureFormatValues = create_values_str(
+ picture_formats, sizeof(picture_formats)/sizeof(str_map));
+
+ mZoomSupported=false;
+ mMaxZoom=0;
+ mm_camera_zoom_tbl_t zmt;
+ if(MM_CAMERA_OK != cam_config_get_parm(mCameraId,
+ MM_CAMERA_PARM_MAXZOOM, &mMaxZoom)){
+ LOGE("%s:Failed to get max zoom",__func__);
+ }else{
+
+ LOGE("Max Zoom:%d",mMaxZoom);
+ /* Kernel driver limits the max amount of data that can be retreived through a control
+ command to 260 bytes hence we conservatively limit to 110 zoom ratios */
+ if(mMaxZoom>MAX_ZOOM_RATIOS) {
+ LOGE("%s:max zoom is larger than sizeof zoomRatios table",__func__);
+ mMaxZoom=MAX_ZOOM_RATIOS-1;
+ }
+ zmt.size=mMaxZoom;
+ zmt.zoom_ratio_tbl=&zoomRatios[0];
+ if(MM_CAMERA_OK != cam_config_get_parm(mCameraId,
+ MM_CAMERA_PARM_ZOOM_RATIO, &zmt)){
+ LOGE("%s:Failed to get max zoom ratios",__func__);
+ }else{
+ mZoomSupported=true;
+ mZoomRatioValues = create_str(zoomRatios, mMaxZoom);
+ }
+ }
+
+ LOGE("Zoom supported:%d",mZoomSupported);
+
+ denoise_value = create_values_str(
+ denoise, sizeof(denoise) / sizeof(str_map));
+
+ if(supportsFaceDetection()) {
+ mFaceDetectionValues = create_values_str(
+ facedetection, sizeof(facedetection) / sizeof(str_map));
+ }
+
+ if(mHasAutoFocusSupport){
+ mSelectableZoneAfValues = create_values_str(
+ selectable_zone_af, sizeof(selectable_zone_af) / sizeof(str_map));
+ }
+
+ mSceneDetectValues = create_values_str(scenedetect, sizeof(scenedetect) / sizeof(str_map));
+
+ mRedeyeReductionValues = create_values_str(
+ redeye_reduction, sizeof(redeye_reduction) / sizeof(str_map));
+
+ mZslValues = create_values_str(
+ zsl_modes,sizeof(zsl_modes)/sizeof(str_map));
+
+ mParamStringInitialized = true;
+ }
+
+ //set supported video sizes
+ mParameters.set(CameraParameters::KEY_SUPPORTED_VIDEO_SIZES, mVideoSizeValues.string());
+
+ //set default video size to first one in supported table
+ String8 vSize = create_sizes_str(&mVideoSizes[0], 1);
+ mParameters.set(CameraParameters::KEY_VIDEO_SIZE, vSize.string());
+
+ //Set Preview size
+ int default_preview_width, default_preview_height;
+ cam_config_get_parm(mCameraId, MM_CAMERA_PARM_DEFAULT_PREVIEW_WIDTH,
+ &default_preview_width);
+ cam_config_get_parm(mCameraId, MM_CAMERA_PARM_DEFAULT_PREVIEW_HEIGHT,
+ &default_preview_height);
+ mParameters.setPreviewSize(default_preview_width, default_preview_height);
+ mParameters.set(CameraParameters::KEY_SUPPORTED_PREVIEW_SIZES,
+ mPreviewSizeValues.string());
+ mDimension.display_width = default_preview_width;
+ mDimension.display_height = default_preview_height;
+
+ //Set Preview Frame Rate
+ if(mFps >= MINIMUM_FPS && mFps <= MAXIMUM_FPS) {
+ mPreviewFrameRateValues = create_values_range_str(
+ MINIMUM_FPS, mFps);
+ }else{
+ mPreviewFrameRateValues = create_values_range_str(
+ MINIMUM_FPS, MAXIMUM_FPS);
+ }
+
+
+ if (cam_config_is_parm_supported(mCameraId, MM_CAMERA_PARM_FPS)) {
+ mParameters.set(CameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES,
+ mPreviewFrameRateValues.string());
+ } else {
+ mParameters.set(
+ CameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES,
+ DEFAULT_FIXED_FPS);
+ }
+
+ //Set Preview Frame Rate Modes
+ mParameters.setPreviewFrameRateMode("frame-rate-auto");
+ mFrameRateModeValues = create_values_str(
+ frame_rate_modes, sizeof(frame_rate_modes) / sizeof(str_map));
+ if(cam_config_is_parm_supported(mCameraId, MM_CAMERA_PARM_FPS_MODE)){
+ mParameters.set(CameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATE_MODES,
+ mFrameRateModeValues.string());
+ }
+
+ //Set Preview Format
+ //mParameters.setPreviewFormat("yuv420sp"); // informative
+ mParameters.setPreviewFormat(CameraParameters::PIXEL_FORMAT_YUV420SP);
+
+ mPreviewFormatValues = create_values_str(
+ preview_formats, sizeof(preview_formats) / sizeof(str_map));
+ mParameters.set(CameraParameters::KEY_SUPPORTED_PREVIEW_FORMATS,
+ mPreviewFormatValues.string());
+
+ //Set Overlay Format
+ mParameters.set("overlay-format", HAL_PIXEL_FORMAT_YCbCr_420_SP);
+ mParameters.set("max-num-detected-faces-hw", "2");
+
+ //Set Picture Size
+ mParameters.setPictureSize(DEFAULT_PICTURE_WIDTH, DEFAULT_PICTURE_HEIGHT);
+ mParameters.set(CameraParameters::KEY_SUPPORTED_PICTURE_SIZES,
+ mPictureSizeValues.string());
+
+ //Set Preview Frame Rate
+ if(mFps >= MINIMUM_FPS && mFps <= MAXIMUM_FPS) {
+ mParameters.setPreviewFrameRate(mFps);
+ }else{
+ mParameters.setPreviewFrameRate(DEFAULT_FPS);
+ }
+
+ //Set Picture Format
+ mParameters.setPictureFormat("jpeg"); // informative
+ mParameters.set(CameraParameters::KEY_SUPPORTED_PICTURE_FORMATS,
+ mPictureFormatValues);
+
+ mParameters.set(CameraParameters::KEY_JPEG_QUALITY, "85"); // max quality
+ mJpegQuality = 85;
+ //Set Video Format
+ mParameters.set(CameraParameters::KEY_VIDEO_FRAME_FORMAT, "yuv420sp");
+
+ //Set Thumbnail parameters
+ mParameters.set(CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH,
+ THUMBNAIL_WIDTH_STR); // informative
+ mParameters.set(CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT,
+ THUMBNAIL_HEIGHT_STR); // informative
+ mDimension.ui_thumbnail_width =
+ thumbnail_sizes[DEFAULT_THUMBNAIL_SETTING].width;
+ mDimension.ui_thumbnail_height =
+ thumbnail_sizes[DEFAULT_THUMBNAIL_SETTING].height;
+ mParameters.set(CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY, "90");
+ String8 valuesStr = create_sizes_str(default_thumbnail_sizes, thumbnail_sizes_count);
+ mParameters.set(CameraParameters::KEY_SUPPORTED_JPEG_THUMBNAIL_SIZES,
+ valuesStr.string());
+ // Define CAMERA_SMOOTH_ZOOM in Android.mk file , to enable smoothzoom
+#ifdef CAMERA_SMOOTH_ZOOM
+ mParameters.set(CameraParameters::KEY_SMOOTH_ZOOM_SUPPORTED, "true");
+#endif
+ if(mZoomSupported){
+ mParameters.set(CameraParameters::KEY_ZOOM_SUPPORTED, "true");
+ LOGE("max zoom is %d", mMaxZoom-1);
+ /* mMaxZoom value that the query interface returns is the size
+ LOGV("max zoom is %d", mMaxZoom-1);
+ * mMaxZoom value that the query interface returns is the size
+ * of zoom table. So the actual max zoom value will be one
+ * less than that value. */
+
+ mParameters.set("max-zoom",mMaxZoom-1);
+ mParameters.set(CameraParameters::KEY_ZOOM_RATIOS,
+ mZoomRatioValues);
+ } else
+ {
+ mParameters.set(CameraParameters::KEY_ZOOM_SUPPORTED, "false");
+ }
+
+ /* Enable zoom support for video application if VPE enabled */
+ if(mZoomSupported) {
+ mParameters.set("video-zoom-support", "true");
+ } else {
+ mParameters.set("video-zoom-support", "false");
+ }
+
+ //8960 supports Power modes : Low power, Normal Power.
+ mParameters.set("power-mode-supported", "true");
+ //Set Live shot support
+ rc = cam_config_is_parm_supported(mCameraId, MM_CAMERA_PARM_LIVESHOT_MAIN);
+ if(!rc) {
+ LOGE("%s:LIVESHOT is not supported", __func__);
+ mParameters.set("video-snapshot-supported", "false");
+ } else {
+ mParameters.set("video-snapshot-supported", "true");
+ }
+
+ //Set default power mode
+ mParameters.set(CameraParameters::KEY_POWER_MODE,"Normal_Power");
+
+ //Set Camera Mode
+ mParameters.set(CameraParameters::KEY_CAMERA_MODE,0);
+ mParameters.set(CameraParameters::KEY_AE_BRACKET_HDR,"Off");
+
+ //Set Antibanding
+ mParameters.set(CameraParameters::KEY_ANTIBANDING,
+ CameraParameters::ANTIBANDING_OFF);
+ mParameters.set(CameraParameters::KEY_SUPPORTED_ANTIBANDING,
+ mAntibandingValues);
+
+ //Set Effect
+ mParameters.set(CameraParameters::KEY_EFFECT,
+ CameraParameters::EFFECT_NONE);
+ mParameters.set(CameraParameters::KEY_SUPPORTED_EFFECTS, mEffectValues);
+
+ //Set Auto Exposure
+ mParameters.set(CameraParameters::KEY_AUTO_EXPOSURE,
+ CameraParameters::AUTO_EXPOSURE_FRAME_AVG);
+ mParameters.set(CameraParameters::KEY_SUPPORTED_AUTO_EXPOSURE, mAutoExposureValues);
+
+ //Set WhiteBalance
+ mParameters.set(CameraParameters::KEY_WHITE_BALANCE,
+ CameraParameters::WHITE_BALANCE_AUTO);
+ mParameters.set(CameraParameters::KEY_SUPPORTED_WHITE_BALANCE,mWhitebalanceValues);
+
+ //Set AEC_LOCK
+ mParameters.set(CameraParameters::KEY_AUTO_EXPOSURE_LOCK, "false");
+ if(cam_config_is_parm_supported(mCameraId, MM_CAMERA_PARM_AEC_LOCK)){
+ mParameters.set(CameraParameters::KEY_AUTO_EXPOSURE_LOCK_SUPPORTED, "true");
+ } else {
+ mParameters.set(CameraParameters::KEY_AUTO_EXPOSURE_LOCK_SUPPORTED, "false");
+ }
+ //Set AWB_LOCK
+ mParameters.set(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK, "false");
+ if(cam_config_is_parm_supported(mCameraId, MM_CAMERA_PARM_AWB_LOCK))
+ mParameters.set(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK_SUPPORTED, "true");
+ else
+ mParameters.set(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK_SUPPORTED, "false");
+
+ //Set Focus Mode
+ if(mHasAutoFocusSupport){
+ mParameters.set(CameraParameters::KEY_FOCUS_MODE,
+ CameraParameters::FOCUS_MODE_AUTO);
+ mFocusMode = AF_MODE_AUTO;
+ mParameters.set(CameraParameters::KEY_SUPPORTED_FOCUS_MODES,
+ mFocusModeValues);
+ mParameters.set(CameraParameters::KEY_MAX_NUM_FOCUS_AREAS, "1");
+ mParameters.set(CameraParameters::KEY_MAX_NUM_METERING_AREAS, "1");
+ } else {
+ mParameters.set(CameraParameters::KEY_FOCUS_MODE,
+ CameraParameters::FOCUS_MODE_INFINITY);
+ mFocusMode = DONT_CARE;
+ mParameters.set(CameraParameters::KEY_SUPPORTED_FOCUS_MODES,
+ CameraParameters::FOCUS_MODE_INFINITY);
+ mParameters.set(CameraParameters::KEY_MAX_NUM_FOCUS_AREAS, "0");
+ mParameters.set(CameraParameters::KEY_MAX_NUM_METERING_AREAS, "0");
+ }
+
+ mParameters.set(CameraParameters::KEY_FOCUS_AREAS, DEFAULT_CAMERA_AREA);
+ mParameters.set(CameraParameters::KEY_METERING_AREAS, DEFAULT_CAMERA_AREA);
+
+ //Set Flash
+ if (cam_config_is_parm_supported(mCameraId, MM_CAMERA_PARM_LED_MODE)) {
+ mParameters.set(CameraParameters::KEY_FLASH_MODE,
+ CameraParameters::FLASH_MODE_OFF);
+ mParameters.set(CameraParameters::KEY_SUPPORTED_FLASH_MODES,
+ mFlashValues);
+ }
+
+ //Set Sharpness
+ mParameters.set(CameraParameters::KEY_MAX_SHARPNESS,
+ CAMERA_MAX_SHARPNESS);
+ mParameters.set(CameraParameters::KEY_SHARPNESS,
+ CAMERA_DEF_SHARPNESS);
+
+ //Set Contrast
+ mParameters.set(CameraParameters::KEY_MAX_CONTRAST,
+ CAMERA_MAX_CONTRAST);
+ mParameters.set(CameraParameters::KEY_CONTRAST,
+ CAMERA_DEF_CONTRAST);
+
+ //Set Saturation
+ mParameters.set(CameraParameters::KEY_MAX_SATURATION,
+ CAMERA_MAX_SATURATION);
+ mParameters.set(CameraParameters::KEY_SATURATION,
+ CAMERA_DEF_SATURATION);
+
+ //Set Brightness/luma-adaptaion
+ mParameters.set("luma-adaptation", "3");
+
+ mParameters.set(CameraParameters::KEY_PICTURE_FORMAT,
+ CameraParameters::PIXEL_FORMAT_JPEG);
+
+ //Set Lensshading
+ mParameters.set(CameraParameters::KEY_LENSSHADE,
+ CameraParameters::LENSSHADE_ENABLE);
+ mParameters.set(CameraParameters::KEY_SUPPORTED_LENSSHADE_MODES,
+ mLensShadeValues);
+
+ //Set ISO Mode
+ mParameters.set(CameraParameters::KEY_ISO_MODE,
+ CameraParameters::ISO_AUTO);
+ mParameters.set(CameraParameters::KEY_SUPPORTED_ISO_MODES,
+ mIsoValues);
+
+ //Set MCE
+ mParameters.set(CameraParameters::KEY_MEMORY_COLOR_ENHANCEMENT,
+ CameraParameters::MCE_ENABLE);
+ mParameters.set(CameraParameters::KEY_SUPPORTED_MEM_COLOR_ENHANCE_MODES,
+ mMceValues);
+ //Set HFR
+ if (cam_config_is_parm_supported(mCameraId, MM_CAMERA_PARM_HFR)) {
+ mParameters.set(CameraParameters::KEY_VIDEO_HIGH_FRAME_RATE,
+ CameraParameters::VIDEO_HFR_OFF);
+ mParameters.set(CameraParameters::KEY_SUPPORTED_HFR_SIZES,
+ mHfrSizeValues.string());
+ mParameters.set(CameraParameters::KEY_SUPPORTED_VIDEO_HIGH_FRAME_RATE_MODES,
+ mHfrValues);
+ } else{
+ mParameters.set(CameraParameters::KEY_SUPPORTED_HFR_SIZES,"");
+ }
+
+ //Set Histogram
+ mParameters.set(CameraParameters::KEY_HISTOGRAM,
+ CameraParameters::HISTOGRAM_DISABLE);
+ mParameters.set(CameraParameters::KEY_SUPPORTED_HISTOGRAM_MODES,
+ mHistogramValues);
+
+ //Set SkinTone Enhancement
+ mParameters.set(CameraParameters::KEY_SKIN_TONE_ENHANCEMENT,
+ CameraParameters::SKIN_TONE_ENHANCEMENT_DISABLE);
+ mParameters.set("skinToneEnhancement", "0");
+ mParameters.set(CameraParameters::KEY_SUPPORTED_SKIN_TONE_ENHANCEMENT_MODES,
+ mSkinToneEnhancementValues);
+
+ //Set Scene Mode
+ mParameters.set(CameraParameters::KEY_SCENE_MODE,
+ CameraParameters::SCENE_MODE_AUTO);
+ mParameters.set(CameraParameters::KEY_SUPPORTED_SCENE_MODES,
+ mSceneModeValues);
+
+ //Set Streaming Textures
+ mParameters.set("strtextures", "OFF");
+
+ //Set Denoise
+ mParameters.set(CameraParameters::KEY_DENOISE,
+ CameraParameters::DENOISE_OFF);
+ mParameters.set(CameraParameters::KEY_SUPPORTED_DENOISE,
+ denoise_value);
+ //Set Touch AF/AEC
+ mParameters.set(CameraParameters::KEY_TOUCH_AF_AEC,
+ CameraParameters::TOUCH_AF_AEC_OFF);
+ mParameters.set(CameraParameters::KEY_SUPPORTED_TOUCH_AF_AEC,
+ mTouchAfAecValues);
+ mParameters.set("touchAfAec-dx","100");
+ mParameters.set("touchAfAec-dy","100");
+
+ //Set Scene Detection
+ mParameters.set(CameraParameters::KEY_SCENE_DETECT,
+ CameraParameters::SCENE_DETECT_OFF);
+ mParameters.set(CameraParameters::KEY_SUPPORTED_SCENE_DETECT,
+ mSceneDetectValues);
+
+ //Set Selectable Zone AF
+ mParameters.set(CameraParameters::KEY_SELECTABLE_ZONE_AF,
+ CameraParameters::SELECTABLE_ZONE_AF_AUTO);
+ mParameters.set(CameraParameters::KEY_SUPPORTED_SELECTABLE_ZONE_AF,
+ mSelectableZoneAfValues);
+
+ //Set Face Detection
+ mParameters.set(CameraParameters::KEY_FACE_DETECTION,
+ CameraParameters::FACE_DETECTION_OFF);
+ mParameters.set(CameraParameters::KEY_SUPPORTED_FACE_DETECTION,
+ mFaceDetectionValues);
+
+ //Set Red Eye Reduction
+ mParameters.set(CameraParameters::KEY_REDEYE_REDUCTION,
+ CameraParameters::REDEYE_REDUCTION_DISABLE);
+ mParameters.set(CameraParameters::KEY_SUPPORTED_REDEYE_REDUCTION,
+ mRedeyeReductionValues);
+
+ //Set ZSL
+ mParameters.set(CameraParameters::KEY_ZSL,
+ CameraParameters::ZSL_OFF);
+ mParameters.set(CameraParameters::KEY_SUPPORTED_ZSL_MODES,
+ mZslValues);
+
+ //Set Focal length, horizontal and vertical view angles
+ float focalLength = 0.0f;
+ float horizontalViewAngle = 0.0f;
+ float verticalViewAngle = 0.0f;
+ cam_config_get_parm(mCameraId, MM_CAMERA_PARM_FOCAL_LENGTH,
+ (void *)&focalLength);
+ mParameters.setFloat(CameraParameters::KEY_FOCAL_LENGTH,
+ focalLength);
+ cam_config_get_parm(mCameraId, MM_CAMERA_PARM_HORIZONTAL_VIEW_ANGLE,
+ (void *)&horizontalViewAngle);
+ mParameters.setFloat(CameraParameters::KEY_HORIZONTAL_VIEW_ANGLE,
+ horizontalViewAngle);
+ cam_config_get_parm(mCameraId, MM_CAMERA_PARM_VERTICAL_VIEW_ANGLE,
+ (void *)&verticalViewAngle);
+ mParameters.setFloat(CameraParameters::KEY_VERTICAL_VIEW_ANGLE,
+ verticalViewAngle);
+
+ //Set Exposure Compensation
+ mParameters.set(
+ CameraParameters::KEY_MAX_EXPOSURE_COMPENSATION,
+ EXPOSURE_COMPENSATION_MAXIMUM_NUMERATOR);
+ mParameters.set(
+ CameraParameters::KEY_MIN_EXPOSURE_COMPENSATION,
+ EXPOSURE_COMPENSATION_MINIMUM_NUMERATOR);
+ mParameters.set(
+ CameraParameters::KEY_EXPOSURE_COMPENSATION,
+ EXPOSURE_COMPENSATION_DEFAULT_NUMERATOR);
+ mParameters.setFloat(
+ CameraParameters::KEY_EXPOSURE_COMPENSATION_STEP,
+ EXPOSURE_COMPENSATION_STEP);
+
+ mParameters.set("num-snaps-per-shutter", 1);
+
+ mParameters.set("capture-burst-captures-values", getZSLQueueDepth());
+ mParameters.set("capture-burst-interval-supported", "true");
+ mParameters.set("capture-burst-interval-max", BURST_INTREVAL_MAX); /*skip frames*/
+ mParameters.set("capture-burst-interval-min", BURST_INTREVAL_MIN); /*skip frames*/
+ mParameters.set("capture-burst-interval", BURST_INTREVAL_DEFAULT); /*skip frames*/
+ mParameters.set("capture-burst-retroactive", 0);
+ mParameters.set("capture-burst-retroactive-max", getZSLQueueDepth());
+ mParameters.set("capture-burst-exposures", "");
+ mParameters.set("capture-burst-exposures-values",
+ "-12,-11,-10,-9,-8,-7,-6,-5,-4,-3,-2,-1,0,1,2,3,4,5,6,7,8,9,10,11,12");
+ {
+ String8 CamModeStr;
+ char buffer[32];
+ int flag = 0;
+
+ for (int i = 0; i < HAL_CAM_MODE_MAX; i++) {
+ if ( 0 ) { /*exclude some conflicting case*/
+ } else {
+ if (flag == 0) { /*first item*/
+ snprintf(buffer, sizeof(buffer), "%d", i);
+ } else {
+ snprintf(buffer, sizeof(buffer), ",%d", i);
+ }
+ flag = 1;
+ CamModeStr.append(buffer);
+ }
+ }
+ mParameters.set("camera-mode-values", CamModeStr);
+ }
+
+ mParameters.set("ae-bracket-hdr-values",
+ create_values_str(hdr_bracket, sizeof(hdr_bracket)/sizeof(str_map) ));
+
+// if(mIs3DModeOn)
+// mParameters.set("3d-frame-format", "left-right");
+ mParameters.set("no-display-mode", 0);
+ //mUseOverlay = useOverlay();
+ mParameters.set("zoom", 0);
+
+ int mNuberOfVFEOutputs;
+ ret = cam_config_get_parm(mCameraId, MM_CAMERA_PARM_VFE_OUTPUT_ENABLE, &mNuberOfVFEOutputs);
+ if(ret != MM_CAMERA_OK) {
+ LOGE("get parm MM_CAMERA_PARM_VFE_OUTPUT_ENABLE failed");
+ ret = BAD_VALUE;
+ }
+ if(mNuberOfVFEOutputs == 1)
+ {
+ mParameters.set(CameraParameters::KEY_SINGLE_ISP_OUTPUT_ENABLED, "true");
+ } else {
+ mParameters.set(CameraParameters::KEY_SINGLE_ISP_OUTPUT_ENABLED, "false");
+ }
+
+ if (setParameters(mParameters) != NO_ERROR) {
+ LOGE("Failed to set default parameters?!");
+ }
+ mInitialized = true;
+ strTexturesOn = false;
+
+ LOGI("%s: X", __func__);
+ return;
+}
+
+/**
+ * Set the camera parameters. This returns BAD_VALUE if any parameter is
+ * invalid or not supported.
+ */
+
+int QCameraHardwareInterface::setParameters(const char *parms)
+{
+ CameraParameters param;
+ String8 str = String8(parms);
+ param.unflatten(str);
+ status_t ret = setParameters(param);
+ if(ret == NO_ERROR)
+ return 0;
+ else
+ return -1;
+}
+
+/**
+ * Set the camera parameters. This returns BAD_VALUE if any parameter is
+ * invalid or not supported. */
+status_t QCameraHardwareInterface::setParameters(const CameraParameters& params)
+{
+ status_t ret = NO_ERROR;
+
+ LOGI("%s: E", __func__);
+// Mutex::Autolock l(&mLock);
+ status_t rc, final_rc = NO_ERROR;
+
+ if ((rc = setCameraMode(params))) final_rc = rc;
+ if ((rc = setPowerMode(params))) final_rc = rc;
+ if ((rc = setPreviewSize(params))) final_rc = rc;
+ if ((rc = setVideoSize(params))) final_rc = rc;
+ if ((rc = setPictureSize(params))) final_rc = rc;
+ if ((rc = setJpegThumbnailSize(params))) final_rc = rc;
+ if ((rc = setJpegQuality(params))) final_rc = rc;
+ if ((rc = setEffect(params))) final_rc = rc;
+ if ((rc = setGpsLocation(params))) final_rc = rc;
+ if ((rc = setRotation(params))) final_rc = rc;
+ if ((rc = setZoom(params))) final_rc = rc;
+ if ((rc = setOrientation(params))) final_rc = rc;
+ if ((rc = setLensshadeValue(params))) final_rc = rc;
+ if ((rc = setMCEValue(params))) final_rc = rc;
+ if ((rc = setPictureFormat(params))) final_rc = rc;
+ if ((rc = setSharpness(params))) final_rc = rc;
+ if ((rc = setSaturation(params))) final_rc = rc;
+ if ((rc = setSceneMode(params))) final_rc = rc;
+ if ((rc = setContrast(params))) final_rc = rc;
+ if ((rc = setFaceDetect(params))) final_rc = rc;
+ if ((rc = setStrTextures(params))) final_rc = rc;
+ if ((rc = setPreviewFormat(params))) final_rc = rc;
+ if ((rc = setSkinToneEnhancement(params))) final_rc = rc;
+ if ((rc = setWaveletDenoise(params))) final_rc = rc;
+ if ((rc = setAntibanding(params))) final_rc = rc;
+ // if ((rc = setOverlayFormats(params))) final_rc = rc;
+ if ((rc = setRedeyeReduction(params))) final_rc = rc;
+ if ((rc = setCaptureBurstExp())) final_rc = rc;
+
+ const char *str_val = params.get("capture-burst-exposures");
+ if ( str_val == NULL || strlen(str_val)==0 ) {
+ char burst_exp[PROPERTY_VALUE_MAX];
+ memset(burst_exp, 0, sizeof(burst_exp));
+ property_get("persist.capture.burst.exposures", burst_exp, "");
+ if ( strlen(burst_exp)>0 ) {
+ mParameters.set("capture-burst-exposures", burst_exp);
+ }
+ } else {
+ mParameters.set("capture-burst-exposures", str_val);
+ }
+ mParameters.set("num-snaps-per-shutter", params.get("num-snaps-per-shutter"));
+
+ if ((rc = setAEBracket(params))) final_rc = rc;
+ // if ((rc = setDenoise(params))) final_rc = rc;
+ if ((rc = setPreviewFpsRange(params))) final_rc = rc;
+ if((rc = setRecordingHint(params))) final_rc = rc;
+ if ((rc = setNumOfSnapshot(params))) final_rc = rc;
+ if ((rc = setAecAwbLock(params))) final_rc = rc;
+
+ const char *str = params.get(CameraParameters::KEY_SCENE_MODE);
+ int32_t value = attr_lookup(scenemode, sizeof(scenemode) / sizeof(str_map), str);
+
+ if((value != NOT_FOUND) && (value == CAMERA_BESTSHOT_OFF )) {
+ //if ((rc = setPreviewFrameRateMode(params))) final_rc = rc;
+ if ((rc = setPreviewFrameRate(params))) final_rc = rc;
+ if ((rc = setAutoExposure(params))) final_rc = rc;
+ if ((rc = setExposureCompensation(params))) final_rc = rc;
+ if ((rc = setWhiteBalance(params))) final_rc = rc;
+ if ((rc = setFlash(params))) final_rc = rc;
+ if ((rc = setFocusMode(params))) final_rc = rc;
+ if ((rc = setBrightness(params))) final_rc = rc;
+ if ((rc = setISOValue(params))) final_rc = rc;
+ if ((rc = setFocusAreas(params))) final_rc = rc;
+ if ((rc = setMeteringAreas(params))) final_rc = rc;
+ }
+ //selectableZoneAF needs to be invoked after continuous AF
+ if ((rc = setSelectableZoneAf(params))) final_rc = rc;
+ // setHighFrameRate needs to be done at end, as there can
+ // be a preview restart, and need to use the updated parameters
+ if ((rc = setHighFrameRate(params))) final_rc = rc;
+ if ((rc = setZSLBurstLookBack(params))) final_rc = rc;
+ if ((rc = setZSLBurstInterval(params))) final_rc = rc;
+ if ((rc = setNoDisplayMode(params))) final_rc = rc;
+
+ //Update Exiftag values.
+ setExifTags();
+
+ LOGI("%s: X", __func__);
+ return final_rc;
+}
+
+/** Retrieve the camera parameters. The buffer returned by the camera HAL
+ must be returned back to it with put_parameters, if put_parameters
+ is not NULL.
+ */
+int QCameraHardwareInterface::getParameters(char **parms)
+{
+ char* rc = NULL;
+ String8 str;
+ CameraParameters param = getParameters();
+ //param.dump();
+ str = param.flatten( );
+ rc = (char *)malloc(sizeof(char)*(str.length()+1));
+ if(rc != NULL){
+ memset(rc, 0, sizeof(char)*(str.length()+1));
+ strncpy(rc, str.string(), str.length());
+ rc[str.length()] = 0;
+ *parms = rc;
+ }
+ return 0;
+}
+
+/** The camera HAL uses its own memory to pass us the parameters when we
+ call get_parameters. Use this function to return the memory back to
+ the camera HAL, if put_parameters is not NULL. If put_parameters
+ is NULL, then you have to use free() to release the memory.
+*/
+void QCameraHardwareInterface::putParameters(char *rc)
+{
+ free(rc);
+ rc = NULL;
+}
+
+CameraParameters& QCameraHardwareInterface::getParameters()
+{
+ Mutex::Autolock lock(mLock);
+ mParameters.set(CameraParameters::KEY_FOCUS_DISTANCES, mFocusDistance.string());
+ return mParameters;
+}
+
+status_t QCameraHardwareInterface::runFaceDetection()
+{
+ bool ret = true;
+
+ const char *str = mParameters.get(CameraParameters::KEY_FACE_DETECTION);
+ if (str != NULL) {
+ int value = attr_lookup(facedetection,
+ sizeof(facedetection) / sizeof(str_map), str);
+#if 0
+ mMetaDataWaitLock.lock();
+ if (value == true) {
+ if(mMetaDataHeap != NULL)
+ mMetaDataHeap.clear();
+
+ mMetaDataHeap =
+ new AshmemPool((sizeof(int)*(MAX_ROI*4+1)),
+ 1,
+ (sizeof(int)*(MAX_ROI*4+1)),
+ "metadata");
+ if (!mMetaDataHeap->initialized()) {
+ LOGE("Meta Data Heap allocation failed ");
+ mMetaDataHeap.clear();
+ LOGE("runFaceDetection X: error initializing mMetaDataHeap");
+ mMetaDataWaitLock.unlock();
+ return UNKNOWN_ERROR;
+ }
+ mSendMetaData = true;
+ } else {
+ if(mMetaDataHeap != NULL)
+ mMetaDataHeap.clear();
+ }
+ mMetaDataWaitLock.unlock();
+#endif
+ cam_ctrl_dimension_t dim;
+ cam_config_get_parm(mCameraId, MM_CAMERA_PARM_DIMENSION,&dim);
+ preview_parm_config (&dim, mParameters);
+ LOGE("%s: why set_dimension everytime?", __func__);
+ ret = cam_config_set_parm(mCameraId, MM_CAMERA_PARM_DIMENSION,&dim);
+ ret = native_set_parms(MM_CAMERA_PARM_FD, sizeof(int8_t), (void *)&value);
+ return ret ? NO_ERROR : UNKNOWN_ERROR;
+ }
+ LOGE("Invalid Face Detection value: %s", (str == NULL) ? "NULL" : str);
+ return BAD_VALUE;
+}
+
+status_t QCameraHardwareInterface::setSharpness(const CameraParameters& params)
+{
+ bool ret = false;
+ int rc = MM_CAMERA_OK;
+ LOGE("%s",__func__);
+ rc = cam_config_is_parm_supported(mCameraId, MM_CAMERA_PARM_SHARPNESS);
+ if(!rc) {
+ LOGE("%s:CONTRAST not supported", __func__);
+ return NO_ERROR;
+ }
+ int sharpness = params.getInt(CameraParameters::KEY_SHARPNESS);
+ if((sharpness < CAMERA_MIN_SHARPNESS
+ || sharpness > CAMERA_MAX_SHARPNESS))
+ return UNKNOWN_ERROR;
+
+ LOGV("setting sharpness %d", sharpness);
+ mParameters.set(CameraParameters::KEY_SHARPNESS, sharpness);
+ ret = native_set_parms(MM_CAMERA_PARM_SHARPNESS, sizeof(sharpness),
+ (void *)&sharpness);
+ return ret ? NO_ERROR : UNKNOWN_ERROR;
+}
+
+status_t QCameraHardwareInterface::setSaturation(const CameraParameters& params)
+{
+ bool ret = false;
+ int rc = MM_CAMERA_OK;
+ LOGE("%s",__func__);
+ rc = cam_config_is_parm_supported(mCameraId, MM_CAMERA_PARM_SATURATION);
+ if(!rc) {
+ LOGE("%s:MM_CAMERA_PARM_SATURATION not supported", __func__);
+ return NO_ERROR;
+ }
+ int result;
+ int saturation = params.getInt(CameraParameters::KEY_SATURATION);
+
+ if((saturation < CAMERA_MIN_SATURATION)
+ || (saturation > CAMERA_MAX_SATURATION))
+ return UNKNOWN_ERROR;
+
+ LOGV("Setting saturation %d", saturation);
+ mParameters.set(CameraParameters::KEY_SATURATION, saturation);
+ ret = native_set_parms(MM_CAMERA_PARM_SATURATION, sizeof(saturation),
+ (void *)&saturation, (int *)&result);
+ if(result != MM_CAMERA_OK)
+ LOGI("Saturation Value: %d is not set as the selected value is not supported", saturation);
+ return ret ? NO_ERROR : UNKNOWN_ERROR;
+}
+
+status_t QCameraHardwareInterface::setContrast(const CameraParameters& params)
+{
+ LOGE("%s E", __func__ );
+ int rc = MM_CAMERA_OK;
+ rc = cam_config_is_parm_supported(mCameraId, MM_CAMERA_PARM_CONTRAST);
+ if(!rc) {
+ LOGE("%s:CONTRAST not supported", __func__);
+ return NO_ERROR;
+ }
+ const char *str = params.get(CameraParameters::KEY_SCENE_MODE);
+ LOGE("Contrast : %s",str);
+ int32_t value = attr_lookup(scenemode, sizeof(scenemode) / sizeof(str_map), str);
+ if(value == CAMERA_BESTSHOT_OFF) {
+ int contrast = params.getInt(CameraParameters::KEY_CONTRAST);
+ if((contrast < CAMERA_MIN_CONTRAST)
+ || (contrast > CAMERA_MAX_CONTRAST))
+ {
+ LOGE("Contrast Value not matching");
+ return UNKNOWN_ERROR;
+ }
+ LOGV("setting contrast %d", contrast);
+ mParameters.set(CameraParameters::KEY_CONTRAST, contrast);
+ LOGE("Calling Contrast set on Lower layer");
+ bool ret = native_set_parms(MM_CAMERA_PARM_CONTRAST, sizeof(contrast),
+ (void *)&contrast);
+ LOGE("Lower layer returned %d", ret);
+ int bestshot_reconfigure;
+ cam_config_get_parm(mCameraId, MM_CAMERA_PARM_BESTSHOT_RECONFIGURE,
+ &bestshot_reconfigure);
+ if(bestshot_reconfigure) {
+ if (mContrast != contrast) {
+ mContrast = contrast;
+ if (mPreviewState == QCAMERA_HAL_PREVIEW_STARTED && ret) {
+ mRestartPreview = 1;
+ pausePreviewForZSL();
+ }
+ }
+ }
+ return ret ? NO_ERROR : UNKNOWN_ERROR;
+ } else {
+ LOGI(" Contrast value will not be set " \
+ "when the scenemode selected is %s", str);
+ return NO_ERROR;
+ }
+ return BAD_VALUE;
+}
+
+status_t QCameraHardwareInterface::setSceneDetect(const CameraParameters& params)
+{
+ LOGE("%s",__func__);
+ bool retParm;
+ int rc = MM_CAMERA_OK;
+
+ rc = cam_config_is_parm_supported(mCameraId,MM_CAMERA_PARM_ASD_ENABLE);
+ if(!rc) {
+ LOGE("%s:MM_CAMERA_PARM_ASD_ENABLE not supported", __func__);
+ return NO_ERROR;
+ }
+
+ const char *str = params.get(CameraParameters::KEY_SCENE_DETECT);
+ LOGE("Scene Detect string : %s",str);
+ if (str != NULL) {
+ int32_t value = attr_lookup(scenedetect, sizeof(scenedetect) / sizeof(str_map), str);
+ LOGE("Scenedetect Value : %d",value);
+ if (value != NOT_FOUND) {
+ mParameters.set(CameraParameters::KEY_SCENE_DETECT, str);
+
+ retParm = native_set_parms(MM_CAMERA_PARM_ASD_ENABLE, sizeof(value),
+ (void *)&value);
+
+ return retParm ? NO_ERROR : UNKNOWN_ERROR;
+ }
+ }
+ return BAD_VALUE;
+}
+
+status_t QCameraHardwareInterface::setZoom(const CameraParameters& params)
+{
+ status_t rc = NO_ERROR;
+
+ LOGE("%s: E",__func__);
+
+
+ if( !( cam_config_is_parm_supported(mCameraId, MM_CAMERA_PARM_ZOOM))) {
+ LOGE("%s:MM_CAMERA_PARM_ZOOM not supported", __func__);
+ return NO_ERROR;
+ }
+ // No matter how many different zoom values the driver can provide, HAL
+ // provides applictations the same number of zoom levels. The maximum driver
+ // zoom value depends on sensor output (VFE input) and preview size (VFE
+ // output) because VFE can only crop and cannot upscale. If the preview size
+ // is bigger, the maximum zoom ratio is smaller. However, we want the
+ // zoom ratio of each zoom level is always the same whatever the preview
+ // size is. Ex: zoom level 1 is always 1.2x, zoom level 2 is 1.44x, etc. So,
+ // we need to have a fixed maximum zoom value and do read it from the
+ // driver.
+ static const int ZOOM_STEP = 1;
+ int32_t zoom_level = params.getInt("zoom");
+ if(zoom_level >= 0 && zoom_level <= mMaxZoom-1) {
+ mParameters.set("zoom", zoom_level);
+ int32_t zoom_value = ZOOM_STEP * zoom_level;
+ bool ret = native_set_parms(MM_CAMERA_PARM_ZOOM,
+ sizeof(zoom_value), (void *)&zoom_value);
+ if(ret) {
+ mCurrentZoom=zoom_level;
+ }
+ rc = ret ? NO_ERROR : UNKNOWN_ERROR;
+ } else {
+ rc = BAD_VALUE;
+ }
+ LOGE("%s X",__func__);
+ return rc;
+
+}
+
+status_t QCameraHardwareInterface::setISOValue(const CameraParameters& params) {
+
+ status_t rc = NO_ERROR;
+ LOGE("%s",__func__);
+
+ rc = cam_config_is_parm_supported(mCameraId, MM_CAMERA_PARM_ISO);
+ if(!rc) {
+ LOGE("%s:MM_CAMERA_PARM_ISO not supported", __func__);
+ return NO_ERROR;
+ }
+ const char *str = params.get(CameraParameters::KEY_ISO_MODE);
+ LOGE("ISO string : %s",str);
+ int8_t temp_hjr;
+ if (str != NULL) {
+ int value = (camera_iso_mode_type)attr_lookup(
+ iso, sizeof(iso) / sizeof(str_map), str);
+ LOGE("ISO Value : %d",value);
+ if (value != NOT_FOUND) {
+ camera_iso_mode_type temp = (camera_iso_mode_type) value;
+ if (value == CAMERA_ISO_DEBLUR) {
+ temp_hjr = true;
+ native_set_parms(MM_CAMERA_PARM_HJR, sizeof(int8_t), (void*)&temp_hjr);
+ mHJR = value;
+ }
+ else {
+ if (mHJR == CAMERA_ISO_DEBLUR) {
+ temp_hjr = false;
+ native_set_parms(MM_CAMERA_PARM_HJR, sizeof(int8_t), (void*)&temp_hjr);
+ mHJR = value;
+ }
+ }
+
+ mParameters.set(CameraParameters::KEY_ISO_MODE, str);
+ native_set_parms(MM_CAMERA_PARM_ISO, sizeof(camera_iso_mode_type), (void *)&temp);
+ return NO_ERROR;
+ }
+ }
+ return BAD_VALUE;
+}
+
+status_t QCameraHardwareInterface::updateFocusDistances()
+{
+ LOGV("%s: IN", __FUNCTION__);
+ focus_distances_info_t focusDistances;
+ if(cam_config_get_parm(mCameraId, MM_CAMERA_PARM_FOCUS_DISTANCES,
+ &focusDistances) == MM_CAMERA_OK) {
+ String8 str;
+ char buffer[32] = {0};
+ //set all distances to infinity if focus mode is infinity
+ if(mFocusMode == AF_MODE_INFINITY) {
+ snprintf(buffer, sizeof(buffer), "Infinity,");
+ str.append(buffer);
+ snprintf(buffer, sizeof(buffer), "Infinity,");
+ str.append(buffer);
+ snprintf(buffer, sizeof(buffer), "Infinity");
+ str.append(buffer);
+ } else {
+ snprintf(buffer, sizeof(buffer), "%f", focusDistances.focus_distance[0]);
+ str.append(buffer);
+ snprintf(buffer, sizeof(buffer), ",%f", focusDistances.focus_distance[1]);
+ str.append(buffer);
+ snprintf(buffer, sizeof(buffer), ",%f", focusDistances.focus_distance[2]);
+ str.append(buffer);
+ }
+ LOGE("%s: setting KEY_FOCUS_DISTANCES as %s", __FUNCTION__, str.string());
+ mFocusDistance = str;
+ return NO_ERROR;
+ }
+ LOGE("%s: get CAMERA_PARM_FOCUS_DISTANCES failed!!!", __FUNCTION__);
+ return BAD_VALUE;
+}
+
+// Parse string like "(1, 2, 3, 4, ..., N)"
+// num is pointer to an allocated array of size N
+static int parseNDimVector(const char *str, int *num, int N, char delim = ',')
+{
+ char *start, *end;
+ if(num == NULL) {
+ LOGE("Invalid output array (num == NULL)");
+ return -1;
+ }
+ //check if string starts and ends with parantheses
+ if(str[0] != '(' || str[strlen(str)-1] != ')') {
+ LOGE("Invalid format of string %s, valid format is (n1, n2, n3, n4 ...)", str);
+ return -1;
+ }
+ start = (char*) str;
+ start++;
+ for(int i=0; i<N; i++) {
+ *(num+i) = (int) strtol(start, &end, 10);
+ if(*end != delim && i < N-1) {
+ LOGE("Cannot find delimeter '%c' in string \"%s\". end = %c", delim, str, *end);
+ return -1;
+ }
+ start = end+1;
+ }
+ return 0;
+}
+
+// parse string like "(1, 2, 3, 4, 5),(1, 2, 3, 4, 5),..."
+static int parseCameraAreaString(const char* str, int max_num_areas,
+ camera_area_t *pAreas, int *num_areas_found)
+{
+ char area_str[32];
+ const char *start, *end, *p;
+ start = str; end = NULL;
+ int values[5], index=0;
+ *num_areas_found = 0;
+
+ while(start != NULL) {
+ if(*start != '(') {
+ LOGE("%s: error: Ill formatted area string: %s", __func__, str);
+ return -1;
+ }
+ end = strchr(start, ')');
+ if(end == NULL) {
+ LOGE("%s: error: Ill formatted area string: %s", __func__, str);
+ return -1;
+ }
+ int i;
+ for (i=0,p=start; p<=end; p++, i++) {
+ area_str[i] = *p;
+ }
+ area_str[i] = '\0';
+ if(parseNDimVector(area_str, values, 5) < 0){
+ LOGE("%s: error: Failed to parse the area string: %s", __func__, area_str);
+ return -1;
+ }
+ // no more areas than max_num_areas are accepted.
+ if(index >= max_num_areas) {
+ LOGE("%s: error: too many areas specified %s", __func__, str);
+ return -1;
+ }
+ pAreas[index].x1 = values[0];
+ pAreas[index].y1 = values[1];
+ pAreas[index].x2 = values[2];
+ pAreas[index].y2 = values[3];
+ pAreas[index].weight = values[4];
+
+ index++;
+ start = strchr(end, '('); // serach for next '('
+ }
+ (*num_areas_found) = index;
+ return 0;
+}
+static bool validateCameraAreas(camera_area_t *areas, int num_areas)
+{
+ for(int i=0; i<num_areas; i++) {
+
+ // handle special case (0, 0, 0, 0, 0)
+ if((areas[i].x1 == 0) && (areas[i].y1 == 0)
+ && (areas[i].x2 == 0) && (areas[i].y2 == 0) && (areas[i].weight == 0)) {
+ continue;
+ }
+ if(areas[i].x1 < -1000) return false; // left should be >= -1000
+ if(areas[i].y1 < -1000) return false; // top should be >= -1000
+ if(areas[i].x2 > 1000) return false; // right should be <= 1000
+ if(areas[i].y2 > 1000) return false; // bottom should be <= 1000
+ if(areas[i].weight <= 0 || areas[i].weight > 1000) // weight should be in [1, 1000]
+ return false;
+ if(areas[i].x1 >= areas[i].x2) { // left should be < right
+ return false;
+ }
+ if(areas[i].y1 >= areas[i].y2) // top should be < bottom
+ return false;
+ }
+ return true;
+}
+
+status_t QCameraHardwareInterface::setFocusAreas(const CameraParameters& params)
+{
+ LOGE("%s: E", __func__);
+ status_t rc;
+ int max_num_af_areas = mParameters.getInt(CameraParameters::KEY_MAX_NUM_FOCUS_AREAS);
+ if(max_num_af_areas == 0) {
+ return NO_ERROR;
+ }
+ const char *str = params.get(CameraParameters::KEY_FOCUS_AREAS);
+ if (str == NULL) {
+ LOGE("%s: Parameter string is null", __func__);
+ rc = NO_ERROR;
+ } else {
+ camera_area_t *areas = new camera_area_t[max_num_af_areas];
+ int num_areas_found=0;
+ if(parseCameraAreaString(str, max_num_af_areas, areas, &num_areas_found) < 0) {
+ LOGE("%s: Failed to parse the string: %s", __func__, str);
+ delete areas;
+ return BAD_VALUE;
+ }
+ for(int i=0; i<num_areas_found; i++) {
+ LOGD("FocusArea[%d] = (%d, %d, %d, %d, %d)", i, (areas[i].x1), (areas[i].y1),
+ (areas[i].x2), (areas[i].y2), (areas[i].weight));
+ }
+ if(validateCameraAreas(areas, num_areas_found) == false) {
+ LOGE("%s: invalid areas specified : %s", __func__, str);
+ delete areas;
+ return BAD_VALUE;
+ }
+ mParameters.set(CameraParameters::KEY_FOCUS_AREAS, str);
+ num_areas_found = 1; //temp; need to change after the multi-roi is enabled
+
+ //if the native_set_parms is called when preview is not started, it
+ //crashes in lower layer, so return of preview is not started
+ if(mPreviewState == QCAMERA_HAL_PREVIEW_STOPPED) {
+ delete areas;
+ return NO_ERROR;
+ }
+
+ //for special area string (0, 0, 0, 0, 0), set the num_areas_found to 0,
+ //so no action is takenby the lower layer
+ if(num_areas_found == 1 && (areas[0].x1 == 0) && (areas[0].y1 == 0)
+ && (areas[0].x2 == 0) && (areas[0].y2 == 0) && (areas[0].weight == 0)) {
+ num_areas_found = 0;
+ }
+#if 1 //temp solution
+
+ roi_info_t af_roi_value;
+ memset(&af_roi_value, 0, sizeof(roi_info_t));
+ uint16_t x1, x2, y1, y2, dx, dy;
+ int previewWidth, previewHeight;
+ this->getPreviewSize(&previewWidth, &previewHeight);
+ //transform the coords from (-1000, 1000) to (0, previewWidth or previewHeight)
+ x1 = (uint16_t)((areas[0].x1 + 1000.0f)*(previewWidth/2000.0f));
+ y1 = (uint16_t)((areas[0].y1 + 1000.0f)*(previewHeight/2000.0f));
+ x2 = (uint16_t)((areas[0].x2 + 1000.0f)*(previewWidth/2000.0f));
+ y2 = (uint16_t)((areas[0].y2 + 1000.0f)*(previewHeight/2000.0f));
+ dx = x2 - x1;
+ dy = y2 - y1;
+
+ af_roi_value.num_roi = num_areas_found;
+ af_roi_value.roi[0].x = x1;
+ af_roi_value.roi[0].y = y1;
+ af_roi_value.roi[0].dx = dx;
+ af_roi_value.roi[0].dy = dy;
+ af_roi_value.is_multiwindow = 0;
+ if (native_set_parms(MM_CAMERA_PARM_AF_ROI, sizeof(roi_info_t), (void*)&af_roi_value))
+ rc = NO_ERROR;
+ else
+ rc = BAD_VALUE;
+ delete areas;
+#endif
+#if 0 //better solution with multi-roi, to be enabled later
+ af_mtr_area_t afArea;
+ afArea.num_area = num_areas_found;
+
+ uint16_t x1, x2, y1, y2, dx, dy;
+ int previewWidth, previewHeight;
+ this->getPreviewSize(&previewWidth, &previewHeight);
+
+ for(int i=0; i<num_areas_found; i++) {
+ //transform the coords from (-1000, 1000) to (0, previewWidth or previewHeight)
+ x1 = (uint16_t)((areas[i].x1 + 1000.0f)*(previewWidth/2000.0f));
+ y1 = (uint16_t)((areas[i].y1 + 1000.0f)*(previewHeight/2000.0f));
+ x2 = (uint16_t)((areas[i].x2 + 1000.0f)*(previewWidth/2000.0f));
+ y2 = (uint16_t)((areas[i].y2 + 1000.0f)*(previewHeight/2000.0f));
+ dx = x2 - x1;
+ dy = y2 - y1;
+ afArea.mtr_area[i].x = x1;
+ afArea.mtr_area[i].y = y1;
+ afArea.mtr_area[i].dx = dx;
+ afArea.mtr_area[i].dy = dy;
+ afArea.weight[i] = areas[i].weight;
+ }
+
+ if(native_set_parms(MM_CAMERA_PARM_AF_MTR_AREA, sizeof(af_mtr_area_t), (void*)&afArea))
+ rc = NO_ERROR;
+ else
+ rc = BAD_VALUE;*/
+#endif
+ }
+ LOGE("%s: X", __func__);
+ return rc;
+}
+
+status_t QCameraHardwareInterface::setMeteringAreas(const CameraParameters& params)
+{
+ LOGE("%s: E", __func__);
+ status_t rc;
+ int max_num_mtr_areas = mParameters.getInt(CameraParameters::KEY_MAX_NUM_METERING_AREAS);
+ if(max_num_mtr_areas == 0) {
+ return NO_ERROR;
+ }
+
+ const char *str = params.get(CameraParameters::KEY_METERING_AREAS);
+ if (str == NULL) {
+ LOGE("%s: Parameter string is null", __func__);
+ rc = NO_ERROR;
+ } else {
+ camera_area_t *areas = new camera_area_t[max_num_mtr_areas];
+ int num_areas_found=0;
+ if(parseCameraAreaString(str, max_num_mtr_areas, areas, &num_areas_found) < 0) {
+ LOGE("%s: Failed to parse the string: %s", __func__, str);
+ delete areas;
+ return BAD_VALUE;
+ }
+ for(int i=0; i<num_areas_found; i++) {
+ LOGD("MeteringArea[%d] = (%d, %d, %d, %d, %d)", i, (areas[i].x1), (areas[i].y1),
+ (areas[i].x2), (areas[i].y2), (areas[i].weight));
+ }
+ if(validateCameraAreas(areas, num_areas_found) == false) {
+ LOGE("%s: invalid areas specified : %s", __func__, str);
+ delete areas;
+ return BAD_VALUE;
+ }
+ mParameters.set(CameraParameters::KEY_METERING_AREAS, str);
+
+ //if the native_set_parms is called when preview is not started, it
+ //crashes in lower layer, so return of preview is not started
+ if(mPreviewState == QCAMERA_HAL_PREVIEW_STOPPED) {
+ delete areas;
+ return NO_ERROR;
+ }
+
+ num_areas_found = 1; //temp; need to change after the multi-roi is enabled
+
+ //for special area string (0, 0, 0, 0, 0), set the num_areas_found to 0,
+ //so no action is takenby the lower layer
+ if(num_areas_found == 1 && (areas[0].x1 == 0) && (areas[0].y1 == 0)
+ && (areas[0].x2 == 0) && (areas[0].y2 == 0) && (areas[0].weight == 0)) {
+ num_areas_found = 0;
+ }
+#if 1
+ cam_set_aec_roi_t aec_roi_value;
+ uint16_t x1, x2, y1, y2;
+ int previewWidth, previewHeight;
+ this->getPreviewSize(&previewWidth, &previewHeight);
+ //transform the coords from (-1000, 1000) to (0, previewWidth or previewHeight)
+ x1 = (uint16_t)((areas[0].x1 + 1000.0f)*(previewWidth/2000.0f));
+ y1 = (uint16_t)((areas[0].y1 + 1000.0f)*(previewHeight/2000.0f));
+ x2 = (uint16_t)((areas[0].x2 + 1000.0f)*(previewWidth/2000.0f));
+ y2 = (uint16_t)((areas[0].y2 + 1000.0f)*(previewHeight/2000.0f));
+ delete areas;
+
+ if(num_areas_found == 1) {
+ aec_roi_value.aec_roi_enable = AEC_ROI_ON;
+ aec_roi_value.aec_roi_type = AEC_ROI_BY_COORDINATE;
+ aec_roi_value.aec_roi_position.coordinate.x = (x1+x2)/2;
+ aec_roi_value.aec_roi_position.coordinate.y = (y1+y2)/2;
+ } else {
+ aec_roi_value.aec_roi_enable = AEC_ROI_OFF;
+ aec_roi_value.aec_roi_type = AEC_ROI_BY_COORDINATE;
+ aec_roi_value.aec_roi_position.coordinate.x = DONT_CARE_COORDINATE;
+ aec_roi_value.aec_roi_position.coordinate.y = DONT_CARE_COORDINATE;
+ }
+
+ if(native_set_parms(MM_CAMERA_PARM_AEC_ROI, sizeof(cam_set_aec_roi_t), (void *)&aec_roi_value))
+ rc = NO_ERROR;
+ else
+ rc = BAD_VALUE;
+#endif
+#if 0 //solution including multi-roi, to be enabled later
+ aec_mtr_area_t aecArea;
+ aecArea.num_area = num_areas_found;
+
+ uint16_t x1, x2, y1, y2, dx, dy;
+ int previewWidth, previewHeight;
+ this->getPreviewSize(&previewWidth, &previewHeight);
+
+ for(int i=0; i<num_areas_found; i++) {
+ //transform the coords from (-1000, 1000) to (0, previewWidth or previewHeight)
+ x1 = (uint16_t)((areas[i].x1 + 1000.0f)*(previewWidth/2000.0f));
+ y1 = (uint16_t)((areas[i].y1 + 1000.0f)*(previewHeight/2000.0f));
+ x2 = (uint16_t)((areas[i].x2 + 1000.0f)*(previewWidth/2000.0f));
+ y2 = (uint16_t)((areas[i].y2 + 1000.0f)*(previewHeight/2000.0f));
+ dx = x2 - x1;
+ dy = y2 - y1;
+ aecArea.mtr_area[i].x = x1;
+ aecArea.mtr_area[i].y = y1;
+ aecArea.mtr_area[i].dx = dx;
+ aecArea.mtr_area[i].dy = dy;
+ aecArea.weight[i] = areas[i].weight;
+ }
+ delete areas;
+
+ if(native_set_parms(MM_CAMERA_PARM_AEC_MTR_AREA, sizeof(aec_mtr_area_t), (void*)&aecArea))
+ rc = NO_ERROR;
+ else
+ rc = BAD_VALUE;
+#endif
+ }
+ LOGE("%s: X", __func__);
+ return rc;
+}
+
+status_t QCameraHardwareInterface::setFocusMode(const CameraParameters& params)
+{
+ const char *str = params.get(CameraParameters::KEY_FOCUS_MODE);
+ const char *prev_str = mParameters.get(CameraParameters::KEY_FOCUS_MODE);
+ LOGE("%s",__func__);
+ if (str != NULL) {
+ LOGE("Focus mode %s",str);
+ int32_t value = attr_lookup(focus_modes,
+ sizeof(focus_modes) / sizeof(str_map), str);
+ if (value != NOT_FOUND) {
+ mParameters.set(CameraParameters::KEY_FOCUS_MODE, str);
+ mFocusMode = value;
+
+ if(updateFocusDistances() != NO_ERROR) {
+ LOGE("%s: updateFocusDistances failed for %s", __FUNCTION__, str);
+ return UNKNOWN_ERROR;
+ }
+ mParameters.set(CameraParameters::KEY_FOCUS_DISTANCES, mFocusDistance.string());
+ if(mHasAutoFocusSupport){
+ bool ret = native_set_parms(MM_CAMERA_PARM_FOCUS_MODE,
+ sizeof(value),
+ (void *)&value);
+
+ int cafSupport = FALSE;
+ if(!strcmp(str, CameraParameters::FOCUS_MODE_CONTINUOUS_VIDEO) ||
+ !strcmp(str, CameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE)){
+ cafSupport = TRUE;
+ }
+ LOGE("Continuous Auto Focus %d", cafSupport);
+ ret = native_set_parms(MM_CAMERA_PARM_CONTINUOUS_AF, sizeof(cafSupport),
+ (void *)&cafSupport);
+ }
+
+ return NO_ERROR;
+ }
+ LOGE("%s:Could not look up str value",__func__);
+ }
+ LOGE("Invalid focus mode value: %s", (str == NULL) ? "NULL" : str);
+ return BAD_VALUE;
+}
+
+status_t QCameraHardwareInterface::setSceneMode(const CameraParameters& params)
+{
+ status_t rc = NO_ERROR;
+ LOGE("%s",__func__);
+
+ rc = cam_config_is_parm_supported(mCameraId, MM_CAMERA_PARM_BESTSHOT_MODE);
+ if(!rc) {
+ LOGE("%s:Parameter Scenemode is not supported for this sensor", __func__);
+ return NO_ERROR;
+ }
+ const char *str = params.get(CameraParameters::KEY_SCENE_MODE);
+ LOGE("Scene Mode string : %s",str);
+
+ if (str != NULL) {
+ int32_t value = attr_lookup(scenemode, sizeof(scenemode) / sizeof(str_map), str);
+ LOGE("Setting Scenemode value = %d",value );
+ if (value != NOT_FOUND) {
+ mParameters.set(CameraParameters::KEY_SCENE_MODE, str);
+ bool ret = native_set_parms(MM_CAMERA_PARM_BESTSHOT_MODE, sizeof(value),
+ (void *)&value);
+ int bestshot_reconfigure;
+ cam_config_get_parm(mCameraId, MM_CAMERA_PARM_BESTSHOT_RECONFIGURE,
+ &bestshot_reconfigure);
+ if(bestshot_reconfigure) {
+ if (mBestShotMode != value) {
+ mBestShotMode = value;
+ if (mPreviewState == QCAMERA_HAL_PREVIEW_STARTED && ret) {
+ mRestartPreview = 1;
+ pausePreviewForZSL();
+ }
+ }
+ }
+ return ret ? NO_ERROR : UNKNOWN_ERROR;
+ }
+ }
+ LOGE("Invalid scenemode value: %s", (str == NULL) ? "NULL" : str);
+ return BAD_VALUE;
+}
+
+status_t QCameraHardwareInterface::setSelectableZoneAf(const CameraParameters& params)
+{
+ LOGE("%s",__func__);
+ status_t rc = NO_ERROR;
+ if(mHasAutoFocusSupport) {
+ const char *str = params.get(CameraParameters::KEY_SELECTABLE_ZONE_AF);
+ if (str != NULL) {
+ int32_t value = attr_lookup(selectable_zone_af, sizeof(selectable_zone_af) / sizeof(str_map), str);
+ if (value != NOT_FOUND) {
+ rc = cam_config_is_parm_supported(mCameraId, MM_CAMERA_PARM_FOCUS_RECT);
+ if(!rc) {
+ LOGE("SelectableZoneAF is not supported for this sensor");
+ return NO_ERROR;
+ }else {
+ mParameters.set(CameraParameters::KEY_SELECTABLE_ZONE_AF, str);
+ bool ret = native_set_parms(MM_CAMERA_PARM_FOCUS_RECT, sizeof(value),
+ (void *)&value);
+ return ret ? NO_ERROR : UNKNOWN_ERROR;
+ }
+ }
+ }
+ LOGE("Invalid selectable zone af value: %s", (str == NULL) ? "NULL" : str);
+ return BAD_VALUE;
+
+ }
+ return NO_ERROR;
+}
+
+status_t QCameraHardwareInterface::setEffect(const CameraParameters& params)
+{
+ LOGE("%s",__func__);
+ status_t rc = NO_ERROR;
+ const char *str = params.get(CameraParameters::KEY_EFFECT);
+ int result;
+ if (str != NULL) {
+ LOGE("Setting effect %s",str);
+ int32_t value = attr_lookup(effects, sizeof(effects) / sizeof(str_map), str);
+ if (value != NOT_FOUND) {
+ rc = cam_config_is_parm_supported(mCameraId, MM_CAMERA_PARM_EFFECT);
+ if(!rc) {
+ LOGE("Camera Effect - %s mode is not supported for this sensor",str);
+ return NO_ERROR;
+ }else {
+ mParameters.set(CameraParameters::KEY_EFFECT, str);
+ LOGE("Setting effect to lower HAL : %d",value);
+ bool ret = native_set_parms(MM_CAMERA_PARM_EFFECT, sizeof(value),
+ (void *)&value,(int *)&result);
+ if(result != MM_CAMERA_OK) {
+ LOGI("Camera Effect: %s is not set as the selected value is not supported ", str);
+ }
+ int bestshot_reconfigure;
+ cam_config_get_parm(mCameraId, MM_CAMERA_PARM_BESTSHOT_RECONFIGURE,
+ &bestshot_reconfigure);
+ if(bestshot_reconfigure) {
+ if (mEffects != value) {
+ mEffects = value;
+ if (mPreviewState == QCAMERA_HAL_PREVIEW_STARTED && ret) {
+ mRestartPreview = 1;
+ pausePreviewForZSL();
+ }
+ }
+ }
+ return ret ? NO_ERROR : UNKNOWN_ERROR;
+ }
+ }
+ }
+ LOGE("Invalid effect value: %s", (str == NULL) ? "NULL" : str);
+ LOGE("setEffect X");
+ return BAD_VALUE;
+}
+
+status_t QCameraHardwareInterface::setBrightness(const CameraParameters& params) {
+
+ LOGE("%s",__func__);
+ status_t rc = NO_ERROR;
+ rc = cam_config_is_parm_supported(mCameraId, MM_CAMERA_PARM_BRIGHTNESS);
+ if(!rc) {
+ LOGE("MM_CAMERA_PARM_BRIGHTNESS mode is not supported for this sensor");
+ return NO_ERROR;
+ }
+ int brightness = params.getInt("luma-adaptation");
+ if (mBrightness != brightness) {
+ LOGV(" new brightness value : %d ", brightness);
+ mBrightness = brightness;
+ mParameters.set("luma-adaptation", brightness);
+ bool ret = native_set_parms(MM_CAMERA_PARM_BRIGHTNESS, sizeof(mBrightness),
+ (void *)&mBrightness);
+ return ret ? NO_ERROR : UNKNOWN_ERROR;
+ }
+
+ return NO_ERROR;
+}
+
+status_t QCameraHardwareInterface::setAutoExposure(const CameraParameters& params)
+{
+
+ LOGE("%s",__func__);
+ status_t rc = NO_ERROR;
+ rc = cam_config_is_parm_supported(mCameraId, MM_CAMERA_PARM_EXPOSURE);
+ if(!rc) {
+ LOGE("MM_CAMERA_PARM_EXPOSURE mode is not supported for this sensor");
+ return NO_ERROR;
+ }
+ const char *str = params.get(CameraParameters::KEY_AUTO_EXPOSURE);
+ if (str != NULL) {
+ int32_t value = attr_lookup(autoexposure, sizeof(autoexposure) / sizeof(str_map), str);
+ if (value != NOT_FOUND) {
+ mParameters.set(CameraParameters::KEY_AUTO_EXPOSURE, str);
+ bool ret = native_set_parms(MM_CAMERA_PARM_EXPOSURE, sizeof(value),
+ (void *)&value);
+ return ret ? NO_ERROR : UNKNOWN_ERROR;
+ }
+ }
+ LOGE("Invalid auto exposure value: %s", (str == NULL) ? "NULL" : str);
+ return BAD_VALUE;
+}
+
+status_t QCameraHardwareInterface::setExposureCompensation(
+ const CameraParameters & params){
+ LOGE("%s",__func__);
+ status_t rc = NO_ERROR;
+ rc = cam_config_is_parm_supported(mCameraId, MM_CAMERA_PARM_EXPOSURE_COMPENSATION);
+ if(!rc) {
+ LOGE("MM_CAMERA_PARM_EXPOSURE_COMPENSATION mode is not supported for this sensor");
+ return NO_ERROR;
+ }
+ int numerator = params.getInt(CameraParameters::KEY_EXPOSURE_COMPENSATION);
+ if(EXPOSURE_COMPENSATION_MINIMUM_NUMERATOR <= numerator &&
+ numerator <= EXPOSURE_COMPENSATION_MAXIMUM_NUMERATOR){
+ int16_t numerator16 = (int16_t)(numerator & 0x0000ffff);
+ uint16_t denominator16 = EXPOSURE_COMPENSATION_DENOMINATOR;
+ uint32_t value = 0;
+ value = numerator16 << 16 | denominator16;
+
+ mParameters.set(CameraParameters::KEY_EXPOSURE_COMPENSATION,
+ numerator);
+ bool ret = native_set_parms(MM_CAMERA_PARM_EXPOSURE_COMPENSATION,
+ sizeof(value), (void *)&value);
+ return ret ? NO_ERROR : UNKNOWN_ERROR;
+ }
+ LOGE("Invalid Exposure Compensation");
+ return BAD_VALUE;
+}
+
+status_t QCameraHardwareInterface::setWhiteBalance(const CameraParameters& params)
+{
+
+ LOGE("%s",__func__);
+ status_t rc = NO_ERROR;
+ rc = cam_config_is_parm_supported(mCameraId, MM_CAMERA_PARM_WHITE_BALANCE);
+ if(!rc) {
+ LOGE("MM_CAMERA_PARM_WHITE_BALANCE mode is not supported for this sensor");
+ return NO_ERROR;
+ }
+ int result;
+
+ const char *str = params.get(CameraParameters::KEY_WHITE_BALANCE);
+ if (str != NULL) {
+ int32_t value = attr_lookup(whitebalance, sizeof(whitebalance) / sizeof(str_map), str);
+ if (value != NOT_FOUND) {
+ mParameters.set(CameraParameters::KEY_WHITE_BALANCE, str);
+ bool ret = native_set_parms(MM_CAMERA_PARM_WHITE_BALANCE, sizeof(value),
+ (void *)&value, (int *)&result);
+ if(result != MM_CAMERA_OK) {
+ LOGI("WhiteBalance Value: %s is not set as the selected value is not supported ", str);
+ }
+ return ret ? NO_ERROR : UNKNOWN_ERROR;
+ }
+ }
+ LOGE("Invalid whitebalance value: %s", (str == NULL) ? "NULL" : str);
+ return BAD_VALUE;
+}
+status_t QCameraHardwareInterface::setAntibanding(const CameraParameters& params)
+{
+ int result;
+
+ LOGE("%s",__func__);
+ status_t rc = NO_ERROR;
+ rc = cam_config_is_parm_supported(mCameraId, MM_CAMERA_PARM_ANTIBANDING);
+ if(!rc) {
+ LOGE("ANTIBANDING mode is not supported for this sensor");
+ return NO_ERROR;
+ }
+ const char *str = params.get(CameraParameters::KEY_ANTIBANDING);
+ if (str != NULL) {
+ int value = (camera_antibanding_type)attr_lookup(
+ antibanding, sizeof(antibanding) / sizeof(str_map), str);
+ if (value != NOT_FOUND) {
+ camera_antibanding_type temp = (camera_antibanding_type) value;
+ LOGE("Antibanding Value : %d",value);
+ mParameters.set(CameraParameters::KEY_ANTIBANDING, str);
+ bool ret = native_set_parms(MM_CAMERA_PARM_ANTIBANDING,
+ sizeof(camera_antibanding_type), (void *)&value ,(int *)&result);
+ if(result != MM_CAMERA_OK) {
+ LOGI("AntiBanding Value: %s is not supported for the given BestShot Mode", str);
+ }
+ return ret ? NO_ERROR : UNKNOWN_ERROR;
+ }
+ }
+ LOGE("Invalid antibanding value: %s", (str == NULL) ? "NULL" : str);
+
+ return BAD_VALUE;
+}
+
+status_t QCameraHardwareInterface::setPreviewFrameRate(const CameraParameters& params)
+{
+ LOGE("%s: E",__func__);
+ status_t rc = NO_ERROR;
+ uint16_t fps = (uint16_t)params.getPreviewFrameRate();
+ LOGV("%s: requested preview frame rate is %d", __func__, fps);
+
+ mParameters.setPreviewFrameRate(fps);
+ LOGE("%s: X",__func__);
+ return NO_ERROR;
+}
+
+status_t QCameraHardwareInterface::setPreviewFrameRateMode(const CameraParameters& params) {
+
+ LOGE("%s",__func__);
+ status_t rc = NO_ERROR;
+ rc = cam_config_is_parm_supported(mCameraId, MM_CAMERA_PARM_FPS);
+ if(!rc) {
+ LOGE(" CAMERA FPS mode is not supported for this sensor");
+ return NO_ERROR;
+ }
+ rc = cam_config_is_parm_supported(mCameraId, MM_CAMERA_PARM_FPS_MODE);
+ if(!rc) {
+ LOGE("CAMERA FPS MODE mode is not supported for this sensor");
+ return NO_ERROR;
+ }
+
+ const char *previousMode = mParameters.getPreviewFrameRateMode();
+ const char *str = params.getPreviewFrameRateMode();
+ if (NULL == previousMode) {
+ LOGE("Preview Frame Rate Mode is NULL\n");
+ return NO_ERROR;
+ }
+ if (NULL == str) {
+ LOGE("Preview Frame Rate Mode is NULL\n");
+ return NO_ERROR;
+ }
+ if( mInitialized && !strcmp(previousMode, str)) {
+ LOGE("frame rate mode same as previous mode %s", previousMode);
+ return NO_ERROR;
+ }
+ int32_t frameRateMode = attr_lookup(frame_rate_modes, sizeof(frame_rate_modes) / sizeof(str_map),str);
+ if(frameRateMode != NOT_FOUND) {
+ LOGV("setPreviewFrameRateMode: %s ", str);
+ mParameters.setPreviewFrameRateMode(str);
+ bool ret = native_set_parms(MM_CAMERA_PARM_FPS_MODE, sizeof(frameRateMode), (void *)&frameRateMode);
+ if(!ret) return ret;
+ //set the fps value when chaging modes
+ int16_t fps = (uint16_t)params.getPreviewFrameRate();
+ if(MINIMUM_FPS <= fps && fps <=MAXIMUM_FPS){
+ mParameters.setPreviewFrameRate(fps);
+ ret = native_set_parms(MM_CAMERA_PARM_FPS,
+ sizeof(fps), (void *)&fps);
+ return ret ? NO_ERROR : UNKNOWN_ERROR;
+ }
+ LOGE("Invalid preview frame rate value: %d", fps);
+ return BAD_VALUE;
+ }
+ LOGE("Invalid preview frame rate mode value: %s", (str == NULL) ? "NULL" : str);
+
+ return BAD_VALUE;
+}
+
+status_t QCameraHardwareInterface::setSkinToneEnhancement(const CameraParameters& params) {
+ LOGE("%s",__func__);
+ status_t rc = NO_ERROR;
+ rc = cam_config_is_parm_supported(mCameraId, MM_CAMERA_PARM_SCE_FACTOR);
+ if(!rc) {
+ LOGE("SkinToneEnhancement is not supported for this sensor");
+ return NO_ERROR;
+ }
+ int skinToneValue = params.getInt("skinToneEnhancement");
+ if (mSkinToneEnhancement != skinToneValue) {
+ LOGV(" new skinTone correction value : %d ", skinToneValue);
+ mSkinToneEnhancement = skinToneValue;
+ mParameters.set("skinToneEnhancement", skinToneValue);
+ bool ret = native_set_parms(MM_CAMERA_PARM_SCE_FACTOR, sizeof(mSkinToneEnhancement),
+ (void *)&mSkinToneEnhancement);
+ return ret ? NO_ERROR : UNKNOWN_ERROR;
+ }
+ return NO_ERROR;
+}
+
+status_t QCameraHardwareInterface::setWaveletDenoise(const CameraParameters& params) {
+ LOGE("%s",__func__);
+ status_t rc = NO_ERROR;
+ rc = cam_config_is_parm_supported(mCameraId, MM_CAMERA_PARM_WAVELET_DENOISE);
+ if(rc != MM_CAMERA_PARM_SUPPORT_SET) {
+ LOGE("Wavelet Denoise is not supported for this sensor");
+ /* TO DO */
+// return NO_ERROR;
+ }
+ const char *str = params.get(CameraParameters::KEY_DENOISE);
+ if (str != NULL) {
+ int value = attr_lookup(denoise,
+ sizeof(denoise) / sizeof(str_map), str);
+ if ((value != NOT_FOUND) && (mDenoiseValue != value)) {
+ mDenoiseValue = value;
+ mParameters.set(CameraParameters::KEY_DENOISE, str);
+
+ char prop[PROPERTY_VALUE_MAX];
+ memset(prop, 0, sizeof(prop));
+ property_get("persist.denoise.process.plates", prop, "0");
+
+ denoise_param_t temp;
+ memset(&temp, 0, sizeof(denoise_param_t));
+ temp.denoise_enable = value;
+ temp.process_plates = atoi(prop);
+ LOGE("Denoise enable=%d, plates=%d", temp.denoise_enable, temp.process_plates);
+ bool ret = native_set_parms(MM_CAMERA_PARM_WAVELET_DENOISE, sizeof(temp),
+ (void *)&temp);
+ return ret ? NO_ERROR : UNKNOWN_ERROR;
+ }
+ return NO_ERROR;
+ }
+ LOGE("Invalid Denoise value: %s", (str == NULL) ? "NULL" : str);
+ return BAD_VALUE;
+}
+
+status_t QCameraHardwareInterface::setVideoSize(const CameraParameters& params)
+{
+ const char *str= NULL;
+ LOGE("%s: E", __func__);
+ str = params.get(CameraParameters::KEY_VIDEO_SIZE);
+ if(!str) {
+ mParameters.set(CameraParameters::KEY_VIDEO_SIZE, "");
+ //If application didn't set this parameter string, use the values from
+ //getPreviewSize() as video dimensions.
+ LOGE("No Record Size requested, use the preview dimensions");
+ videoWidth = mPreviewWidth;
+ videoHeight = mPreviewHeight;
+ } else {
+ //Extract the record witdh and height that application requested.
+ LOGI("%s: requested record size %s", __func__, str);
+ if(!parse_size(str, videoWidth, videoHeight)) {
+ mParameters.set(CameraParameters::KEY_VIDEO_SIZE, str);
+ //VFE output1 shouldn't be greater than VFE output2.
+ if( (mPreviewWidth > videoWidth) || (mPreviewHeight > videoHeight)) {
+ //Set preview sizes as record sizes.
+ LOGE("Preview size %dx%d is greater than record size %dx%d,\
+ resetting preview size to record size",mPreviewWidth,
+ mPreviewHeight, videoWidth, videoHeight);
+ mPreviewWidth = videoWidth;
+ mPreviewHeight = videoHeight;
+ mParameters.setPreviewSize(mPreviewWidth, mPreviewHeight);
+ }
+
+ if(mIs3DModeOn == true) {
+ /* As preview and video frames are same in 3D mode,
+ * preview size should be same as video size. This
+ * cahnge is needed to take of video resolutions
+ * like 720P and 1080p where the application can
+ * request different preview sizes like 768x432
+ */
+ LOGE("3D mod is on");
+ mPreviewWidth = videoWidth;
+ mPreviewHeight = videoHeight;
+ mParameters.setPreviewSize(mPreviewWidth, mPreviewHeight);
+ }
+ } else {
+ mParameters.set(CameraParameters::KEY_VIDEO_SIZE, "");
+ LOGE("%s: error :failed to parse parameter record-size (%s)", __func__, str);
+ return BAD_VALUE;
+ }
+ }
+ LOGE("%s: preview dimensions: %dx%d", __func__, mPreviewWidth, mPreviewHeight);
+ LOGE("%s: video dimensions: %dx%d", __func__, videoWidth, videoHeight);
+ mDimension.display_width = mPreviewWidth;
+ mDimension.display_height= mPreviewHeight;
+ mDimension.orig_video_width = videoWidth;
+ mDimension.orig_video_height = videoHeight;
+ mDimension.video_width = videoWidth;
+ mDimension.video_height = videoHeight;
+
+ LOGE("%s: X", __func__);
+ return NO_ERROR;
+}
+
+status_t QCameraHardwareInterface::setCameraMode(const CameraParameters& params) {
+ int32_t value = params.getInt(CameraParameters::KEY_CAMERA_MODE);
+ mParameters.set(CameraParameters::KEY_CAMERA_MODE,value);
+
+ LOGI("ZSL is enabled %d", value);
+ if (value == 1) {
+ myMode = (camera_mode_t)(myMode | CAMERA_ZSL_MODE);
+ } else {
+ myMode = (camera_mode_t)(myMode & ~CAMERA_ZSL_MODE);
+ }
+ return NO_ERROR;
+}
+
+status_t QCameraHardwareInterface::setPowerMode(const CameraParameters& params) {
+ uint32_t value = NORMAL_POWER;
+ const char *powermode = NULL;
+
+ powermode = params.get(CameraParameters::KEY_POWER_MODE);
+ if (powermode != NULL) {
+ value = attr_lookup(power_modes,
+ sizeof(power_modes) / sizeof(str_map), powermode);
+ if((value == LOW_POWER) || mHFRLevel > 1) {
+ LOGI("Enable Low Power Mode");
+ value = LOW_POWER;
+ mPowerMode = value;
+ mParameters.set(CameraParameters::KEY_POWER_MODE,"Low_Power");
+ } else {
+ LOGE("Enable Normal Power Mode");
+ mPowerMode = value;
+ mParameters.set(CameraParameters::KEY_POWER_MODE,"Normal_Power");
+ }
+ }
+
+ LOGI("%s Low power mode %s value = %d", __func__,
+ value ? "Enabled" : "Disabled", value);
+ native_set_parms(MM_CAMERA_PARM_LOW_POWER_MODE, sizeof(value),
+ (void *)&value);
+ return NO_ERROR;
+}
+
+
+status_t QCameraHardwareInterface::setPreviewSize(const CameraParameters& params)
+{
+ int width, height;
+ params.getPreviewSize(&width, &height);
+ LOGE("################requested preview size %d x %d", width, height);
+
+ // Validate the preview size
+ for (size_t i = 0; i < mPreviewSizeCount; ++i) {
+ if (width == mPreviewSizes[i].width
+ && height == mPreviewSizes[i].height) {
+ mParameters.setPreviewSize(width, height);
+ LOGE("setPreviewSize: width: %d heigh: %d", width, height);
+ mPreviewWidth = width;
+ mPreviewHeight = height;
+ mDimension.display_width = width;
+ mDimension.display_height = height;
+ return NO_ERROR;
+ }
+ }
+ LOGE("Invalid preview size requested: %dx%d", width, height);
+ return BAD_VALUE;
+}
+status_t QCameraHardwareInterface::setPreviewFpsRange(const CameraParameters& params)
+{
+ LOGV("%s: E", __func__);
+ int minFps,maxFps;
+ int prevMinFps, prevMaxFps;
+ int rc = NO_ERROR;
+ bool found = false;
+
+ mParameters.getPreviewFpsRange(&prevMinFps, &prevMaxFps);
+ LOGE("%s: Existing FpsRange Values:(%d, %d)", __func__, prevMinFps, prevMaxFps);
+ params.getPreviewFpsRange(&minFps,&maxFps);
+ LOGE("%s: Requested FpsRange Values:(%d, %d)", __func__, minFps, maxFps);
+
+ if(mInitialized && (minFps == prevMinFps && maxFps == prevMaxFps)) {
+ LOGE("%s: No change in FpsRange", __func__);
+ rc = NO_ERROR;
+ goto end;
+ }
+ for(size_t i=0; i<FPS_RANGES_SUPPORTED_COUNT; i++) {
+ // if the value is in the supported list
+ if(minFps==FpsRangesSupported[i].minFPS && maxFps == FpsRangesSupported[i].maxFPS){
+ found = true;
+ LOGE("FPS: i=%d : minFps = %d, maxFps = %d ",i,FpsRangesSupported[i].minFPS,FpsRangesSupported[i].maxFPS );
+ mParameters.setPreviewFpsRange(minFps,maxFps);
+ // validate the values
+ bool valid = true;
+ // FPS can not be negative
+ if(minFps < 0 || maxFps < 0) valid = false;
+ // minFps must be >= maxFps
+ if(minFps > maxFps) valid = false;
+
+ if(valid) {
+ //Set the FPS mode
+ const char *str = (minFps == maxFps) ?
+ CameraParameters::KEY_PREVIEW_FRAME_RATE_FIXED_MODE:
+ CameraParameters::KEY_PREVIEW_FRAME_RATE_AUTO_MODE;
+ LOGE("%s FPS_MODE = %s", __func__, str);
+ int32_t frameRateMode = attr_lookup(frame_rate_modes,
+ sizeof(frame_rate_modes) / sizeof(str_map),str);
+ bool ret;
+ ret = native_set_parms(MM_CAMERA_PARM_FPS_MODE, sizeof(int32_t),
+ (void *)&frameRateMode);
+
+ //set FPS values
+ uint32_t fps; //lower 2 bytes specify maxFps and higher 2 bytes specify minFps
+ fps = ((uint32_t)(minFps/1000) << 16) + ((uint16_t)(maxFps/1000));
+ ret = native_set_parms(MM_CAMERA_PARM_FPS, sizeof(uint32_t), (void *)&fps);
+ mParameters.setPreviewFpsRange(minFps, maxFps);
+ if(ret)
+ rc = NO_ERROR;
+ else {
+ rc = BAD_VALUE;
+ LOGE("%s: error: native_set_params failed", __func__);
+ }
+ } else {
+ LOGE("%s: error: invalid FPS range value", __func__);
+ rc = BAD_VALUE;
+ }
+ }
+ }
+ if(found == false){
+ LOGE("%s: error: FPS range value not supported", __func__);
+ rc = BAD_VALUE;
+ }
+end:
+ LOGV("%s: X", __func__);
+ return rc;
+}
+
+status_t QCameraHardwareInterface::setJpegThumbnailSize(const CameraParameters& params){
+ int width = params.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH);
+ int height = params.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT);
+
+ LOGE("requested jpeg thumbnail size %d x %d", width, height);
+
+ // Validate the picture size
+ for (unsigned int i = 0; i < thumbnail_sizes_count; ++i) {
+ if (width == default_thumbnail_sizes[i].width
+ && height == default_thumbnail_sizes[i].height) {
+ thumbnailWidth = width;
+ thumbnailHeight = height;
+ mParameters.set(CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH, width);
+ mParameters.set(CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT, height);
+ return NO_ERROR;
+ }
+ }
+ LOGE("error: setting jpeg thumbnail size");
+ return BAD_VALUE;
+}
+status_t QCameraHardwareInterface::setPictureSize(const CameraParameters& params)
+{
+ int width, height;
+ LOGE("QualcommCameraHardware::setPictureSize E");
+ params.getPictureSize(&width, &height);
+ LOGE("requested picture size %d x %d", width, height);
+
+ // Validate the picture size
+ for (int i = 0; i < mSupportedPictureSizesCount; ++i) {
+ if (width == mPictureSizesPtr[i].width
+ && height == mPictureSizesPtr[i].height) {
+ int old_width, old_height;
+ mParameters.getPictureSize(&old_width,&old_height);
+ if(width != old_width || height != old_height) {
+ mRestartPreview = true;
+ }
+ mParameters.setPictureSize(width, height);
+ mDimension.picture_width = width;
+ mDimension.picture_height = height;
+ return NO_ERROR;
+ }
+ }
+ /* Dimension not among the ones in the list. Check if
+ * its a valid dimension, if it is, then configure the
+ * camera accordingly. else reject it.
+ */
+ if( isValidDimension(width, height) ) {
+ mParameters.setPictureSize(width, height);
+ mDimension.picture_width = width;
+ mDimension.picture_height = height;
+ return NO_ERROR;
+ } else
+ LOGE("Invalid picture size requested: %dx%d", width, height);
+ return BAD_VALUE;
+}
+
+status_t QCameraHardwareInterface::setJpegRotation(int isZsl) {
+ return mm_jpeg_encoder_setRotation(mRotation, isZsl);
+}
+
+int QCameraHardwareInterface::getJpegRotation(void) {
+ return mRotation;
+}
+
+int QCameraHardwareInterface::getISOSpeedValue()
+{
+ const char *iso_str = mParameters.get(CameraParameters::KEY_ISO_MODE);
+ int iso_index = attr_lookup(iso, sizeof(iso) / sizeof(str_map), iso_str);
+ int iso_value = iso_speed_values[iso_index];
+ return iso_value;
+}
+
+
+status_t QCameraHardwareInterface::setJpegQuality(const CameraParameters& params) {
+ status_t rc = NO_ERROR;
+ int quality = params.getInt(CameraParameters::KEY_JPEG_QUALITY);
+ LOGE("setJpegQuality E");
+ if (quality >= 0 && quality <= 100) {
+ mParameters.set(CameraParameters::KEY_JPEG_QUALITY, quality);
+ mJpegQuality = quality;
+ } else {
+ LOGE("Invalid jpeg quality=%d", quality);
+ rc = BAD_VALUE;
+ }
+
+ quality = params.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY);
+ if (quality >= 0 && quality <= 100) {
+ mParameters.set(CameraParameters::KEY_JPEG_THUMBNAIL_QUALITY, quality);
+ } else {
+ LOGE("Invalid jpeg thumbnail quality=%d", quality);
+ rc = BAD_VALUE;
+ }
+ LOGE("setJpegQuality X");
+ return rc;
+}
+
+status_t QCameraHardwareInterface::
+setNumOfSnapshot(const CameraParameters& params) {
+ status_t rc = NO_ERROR;
+
+ int num_of_snapshot = getNumOfSnapshots(params);
+
+ if (num_of_snapshot <= 0) {
+ num_of_snapshot = 1;
+ }
+ LOGI("number of snapshots = %d", num_of_snapshot);
+ mParameters.set("num-snaps-per-shutter", num_of_snapshot);
+
+ bool result = native_set_parms(MM_CAMERA_PARM_SNAPSHOT_BURST_NUM,
+ sizeof(int),
+ (void *)&num_of_snapshot);
+ if(!result)
+ LOGI("%s:Failure setting number of snapshots!!!", __func__);
+ return rc;
+}
+
+status_t QCameraHardwareInterface::setPreviewFormat(const CameraParameters& params) {
+ const char *str = params.getPreviewFormat();
+ int32_t previewFormat = attr_lookup(preview_formats, sizeof(preview_formats) / sizeof(str_map), str);
+ if(previewFormat != NOT_FOUND) {
+ int num = sizeof(preview_format_info_list)/sizeof(preview_format_info_t);
+ int i;
+
+ for (i = 0; i < num; i++) {
+ if (preview_format_info_list[i].Hal_format == previewFormat) {
+ mPreviewFormatInfo = preview_format_info_list[i];
+ break;
+ }
+ }
+
+ if (i == num) {
+ mPreviewFormatInfo.mm_cam_format = CAMERA_YUV_420_NV21;
+ mPreviewFormatInfo.padding = CAMERA_PAD_TO_WORD;
+ return BAD_VALUE;
+ }
+ bool ret = native_set_parms(MM_CAMERA_PARM_PREVIEW_FORMAT, sizeof(cam_format_t),
+ (void *)&mPreviewFormatInfo.mm_cam_format);
+ mParameters.set(CameraParameters::KEY_PREVIEW_FORMAT, str);
+ mPreviewFormat = mPreviewFormatInfo.mm_cam_format;
+ LOGI("Setting preview format to %d, i =%d, num=%d, hal_format=%d",
+ mPreviewFormat, i, num, mPreviewFormatInfo.Hal_format);
+ return NO_ERROR;
+ } else if ( strTexturesOn ) {
+ mPreviewFormatInfo.mm_cam_format = CAMERA_YUV_420_NV21;
+ mPreviewFormatInfo.padding = CAMERA_PAD_TO_4K;
+ } else {
+ mPreviewFormatInfo.mm_cam_format = CAMERA_YUV_420_NV21;
+ mPreviewFormatInfo.padding = CAMERA_PAD_TO_WORD;
+ }
+ LOGE("Invalid preview format value: %s", (str == NULL) ? "NULL" : str);
+ return BAD_VALUE;
+}
+
+status_t QCameraHardwareInterface::setStrTextures(const CameraParameters& params) {
+ const char *str = params.get("strtextures");
+ const char *prev_str = mParameters.get("strtextures");
+
+ if(str != NULL) {
+ if(!strcmp(str,prev_str)) {
+ return NO_ERROR;
+ }
+ int str_size = strlen(str);
+ mParameters.set("strtextures", str);
+ if(str_size == 2) {
+ if(!strncmp(str, "on", str_size) || !strncmp(str, "ON", str_size)){
+ LOGI("Resetting mUseOverlay to false");
+ strTexturesOn = true;
+ mUseOverlay = false;
+ }
+ }else if(str_size == 3){
+ if (!strncmp(str, "off", str_size) || !strncmp(str, "OFF", str_size)) {
+ strTexturesOn = false;
+ mUseOverlay = true;
+ }
+ }
+
+ }
+ return NO_ERROR;
+}
+
+status_t QCameraHardwareInterface::setFlash(const CameraParameters& params)
+{
+ LOGI("%s: E",__func__);
+ int rc = cam_config_is_parm_supported(mCameraId, MM_CAMERA_PARM_LED_MODE);
+ if(!rc) {
+ LOGE("%s:LED FLASH not supported", __func__);
+ return NO_ERROR;
+ }
+
+ const char *str = params.get(CameraParameters::KEY_FLASH_MODE);
+ if (str != NULL) {
+ int32_t value = attr_lookup(flash, sizeof(flash) / sizeof(str_map), str);
+ if (value != NOT_FOUND) {
+ mParameters.set(CameraParameters::KEY_FLASH_MODE, str);
+ bool ret = native_set_parms(MM_CAMERA_PARM_LED_MODE,
+ sizeof(value), (void *)&value);
+ return ret ? NO_ERROR : UNKNOWN_ERROR;
+ }
+ }
+ LOGE("Invalid flash mode value: %s", (str == NULL) ? "NULL" : str);
+
+ return BAD_VALUE;
+}
+
+status_t QCameraHardwareInterface::setAecAwbLock(const CameraParameters & params)
+{
+ LOGD("%s : E", __func__);
+ status_t rc = NO_ERROR;
+ int32_t value;
+ const char* str;
+
+ //for AEC lock
+ str = params.get(CameraParameters::KEY_AUTO_EXPOSURE_LOCK);
+ value = (strcmp(str, "true") == 0)? 1 : 0;
+ mParameters.set(CameraParameters::KEY_AUTO_EXPOSURE_LOCK, str);
+ rc = (native_set_parms(MM_CAMERA_PARM_AEC_LOCK, sizeof(int32_t), (void *)(&value))) ?
+ NO_ERROR : UNKNOWN_ERROR;
+
+ //for AWB lock
+ str = params.get(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK);
+ value = (strcmp(str, "true") == 0)? 1 : 0;
+ mParameters.set(CameraParameters::KEY_AUTO_WHITEBALANCE_LOCK, str);
+ rc = (native_set_parms(MM_CAMERA_PARM_AWB_LOCK, sizeof(int32_t), (void *)(&value))) ?
+ NO_ERROR : UNKNOWN_ERROR;
+ LOGD("%s : X", __func__);
+ return rc;
+}
+
+status_t QCameraHardwareInterface::setOverlayFormats(const CameraParameters& params)
+{
+ mParameters.set("overlay-format", HAL_PIXEL_FORMAT_YCbCr_420_SP);
+ if(mIs3DModeOn == true) {
+ int ovFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP|HAL_3D_IN_SIDE_BY_SIDE_L_R|HAL_3D_OUT_SIDE_BY_SIDE;
+ mParameters.set("overlay-format", ovFormat);
+ }
+ return NO_ERROR;
+}
+
+status_t QCameraHardwareInterface::setMCEValue(const CameraParameters& params)
+{
+ LOGE("%s",__func__);
+ status_t rc = NO_ERROR;
+ rc = cam_config_is_parm_supported(mCameraId,MM_CAMERA_PARM_MCE);
+ if(!rc) {
+ LOGE("MM_CAMERA_PARM_MCE mode is not supported for this sensor");
+ return NO_ERROR;
+ }
+ const char *str = params.get(CameraParameters::KEY_MEMORY_COLOR_ENHANCEMENT);
+ if (str != NULL) {
+ int value = attr_lookup(mce, sizeof(mce) / sizeof(str_map), str);
+ if (value != NOT_FOUND) {
+ int temp = (int8_t)value;
+ LOGI("%s: setting MCE value of %s", __FUNCTION__, str);
+ mParameters.set(CameraParameters::KEY_MEMORY_COLOR_ENHANCEMENT, str);
+
+ native_set_parms(MM_CAMERA_PARM_MCE, sizeof(int8_t), (void *)&temp);
+ return NO_ERROR;
+ }
+ }
+ LOGE("Invalid MCE value: %s", (str == NULL) ? "NULL" : str);
+
+ return NO_ERROR;
+}
+
+status_t QCameraHardwareInterface::setHighFrameRate(const CameraParameters& params)
+{
+
+ bool mCameraRunning;
+
+ int rc = cam_config_is_parm_supported(mCameraId, MM_CAMERA_PARM_HFR);
+ if(!rc) {
+ LOGE("%s: MM_CAMERA_PARM_HFR not supported", __func__);
+ return NO_ERROR;
+ }
+
+ const char *str = params.get(CameraParameters::KEY_VIDEO_HIGH_FRAME_RATE);
+ if (str != NULL) {
+ int value = attr_lookup(hfr, sizeof(hfr) / sizeof(str_map), str);
+ if (value != NOT_FOUND) {
+ mHFRLevel = (int32_t)value;
+ //Check for change in HFR value
+ const char *oldHfr = mParameters.get(CameraParameters::KEY_VIDEO_HIGH_FRAME_RATE);
+ if(strcmp(oldHfr, str)){
+ mParameters.set(CameraParameters::KEY_VIDEO_HIGH_FRAME_RATE, str);
+// mHFRMode = true;
+ mCameraRunning=isPreviewRunning();
+ if(mCameraRunning == true) {
+// mHFRThreadWaitLock.lock();
+// pthread_attr_t pattr;
+// pthread_attr_init(&pattr);
+// pthread_attr_setdetachstate(&pattr, PTHREAD_CREATE_DETACHED);
+// mHFRThreadRunning = !pthread_create(&mHFRThread,
+// &pattr,
+// hfr_thread,
+// (void*)NULL);
+// mHFRThreadWaitLock.unlock();
+ stopPreviewInternal();
+ mPreviewState = QCAMERA_HAL_PREVIEW_STOPPED;
+ native_set_parms(MM_CAMERA_PARM_HFR, sizeof(int32_t), (void *)&mHFRLevel);
+ mPreviewState = QCAMERA_HAL_PREVIEW_START;
+ if (startPreview2() == NO_ERROR)
+ mPreviewState = QCAMERA_HAL_PREVIEW_STARTED;
+ return NO_ERROR;
+ }
+ }
+ native_set_parms(MM_CAMERA_PARM_HFR, sizeof(int32_t), (void *)&mHFRLevel);
+ return NO_ERROR;
+ }
+ }
+ LOGE("Invalid HFR value: %s", (str == NULL) ? "NULL" : str);
+ return NO_ERROR;
+}
+
+status_t QCameraHardwareInterface::setLensshadeValue(const CameraParameters& params)
+{
+
+ int rc = cam_config_is_parm_supported(mCameraId, MM_CAMERA_PARM_ROLLOFF);
+ if(!rc) {
+ LOGE("%s:LENS SHADING not supported", __func__);
+ return NO_ERROR;
+ }
+
+ const char *str = params.get(CameraParameters::KEY_LENSSHADE);
+ if (str != NULL) {
+ int value = attr_lookup(lensshade,
+ sizeof(lensshade) / sizeof(str_map), str);
+ if (value != NOT_FOUND) {
+ int8_t temp = (int8_t)value;
+ mParameters.set(CameraParameters::KEY_LENSSHADE, str);
+ native_set_parms(MM_CAMERA_PARM_ROLLOFF, sizeof(int8_t), (void *)&temp);
+ return NO_ERROR;
+ }
+ }
+ LOGE("Invalid lensShade value: %s", (str == NULL) ? "NULL" : str);
+ return BAD_VALUE;
+}
+
+status_t QCameraHardwareInterface::setFaceDetect(const CameraParameters& params)
+{
+ const char *str = params.get(CameraParameters::KEY_FACE_DETECTION);
+ LOGE("setFaceDetect: %s", str);
+ if (str != NULL) {
+ int value = attr_lookup(facedetection,
+ sizeof(facedetection) / sizeof(str_map), str);
+ mFaceDetectOn = value;
+ LOGE("%s Face detection value = %d",__func__, value);
+ cam_ctrl_dimension_t dim;
+// cam_config_get_parm(mCameraId, MM_CAMERA_PARM_DIMENSION,&dim);
+// preview_parm_config (&dim, mParameters);
+// cam_config_set_parm(mCameraId, MM_CAMERA_PARM_DIMENSION,&dim);
+ native_set_parms(MM_CAMERA_PARM_FD, sizeof(int8_t), (void *)&value);
+ mParameters.set(CameraParameters::KEY_FACE_DETECTION, str);
+ return NO_ERROR;
+ }
+ LOGE("Invalid Face Detection value: %s", (str == NULL) ? "NULL" : str);
+ return BAD_VALUE;
+}
+status_t QCameraHardwareInterface::setFaceDetection(const char *str)
+{
+ if(supportsFaceDetection() == false){
+ LOGE("Face detection is not enabled");
+ return NO_ERROR;
+ }
+ if (str != NULL) {
+ int value = attr_lookup(facedetection,
+ sizeof(facedetection) / sizeof(str_map), str);
+ if (value != NOT_FOUND) {
+ mMetaDataWaitLock.lock();
+ mFaceDetectOn = value;
+ mMetaDataWaitLock.unlock();
+ mParameters.set(CameraParameters::KEY_FACE_DETECTION, str);
+ native_set_parms(MM_CAMERA_PARM_FD, sizeof(int8_t), (void *)&value);
+ mParameters.set(CameraParameters::KEY_FACE_DETECTION, str);
+ return NO_ERROR;
+ }
+ }
+ LOGE("Invalid Face Detection value: %s", (str == NULL) ? "NULL" : str);
+ return BAD_VALUE;
+}
+
+status_t QCameraHardwareInterface::setAEBracket(const CameraParameters& params)
+{
+ if(!cam_config_is_parm_supported(mCameraId,MM_CAMERA_PARM_HDR) || (myMode & CAMERA_ZSL_MODE)) {
+ LOGI("Parameter HDR is not supported for this sensor/ ZSL mode");
+
+ if (myMode & CAMERA_ZSL_MODE) {
+ LOGE("In ZSL mode, reset AEBBracket to HDR_OFF mode");
+ exp_bracketing_t temp;
+ memset(&temp, 0, sizeof(temp));
+ mHdrMode = HDR_BRACKETING_OFF;
+ temp.hdr_enable= FALSE;
+ temp.mode = HDR_BRACKETING_OFF;
+ native_set_parms(MM_CAMERA_PARM_HDR, sizeof(exp_bracketing_t), (void *)&temp);
+ }
+ return NO_ERROR;
+ }
+ const char *str = params.get(CameraParameters::KEY_AE_BRACKET_HDR);
+
+ if (str != NULL) {
+ int value = attr_lookup(hdr_bracket,
+ sizeof(hdr_bracket) / sizeof(str_map), str);
+ exp_bracketing_t temp;
+ memset(&temp, 0, sizeof(temp));
+ switch (value) {
+ case HDR_MODE:
+ {
+ mHdrMode = HDR_MODE;
+ temp.hdr_enable= TRUE;
+ temp.mode = HDR_MODE;
+ temp.total_frames = 3;
+ temp.total_hal_frames = getNumOfSnapshots();
+ LOGI("%s: setting HDR frames (%d)", __FUNCTION__, temp.total_hal_frames);
+ native_set_parms(MM_CAMERA_PARM_HDR, sizeof(exp_bracketing_t), (void *)&temp);
+ }
+ break;
+ case EXP_BRACKETING_MODE:
+ {
+ int numFrames = getNumOfSnapshots();
+ const char *str_val = params.get("capture-burst-exposures");
+ if ((str_val != NULL) && (strlen(str_val)>0)) {
+ LOGI("%s: capture-burst-exposures %s", __FUNCTION__, str_val);
+
+ mHdrMode = EXP_BRACKETING_MODE;
+ temp.hdr_enable = FALSE;
+ temp.mode = EXP_BRACKETING_MODE;
+ temp.total_frames = (numFrames > MAX_SNAPSHOT_BUFFERS -2) ? MAX_SNAPSHOT_BUFFERS -2 : numFrames;
+ temp.total_hal_frames = temp.total_frames;
+ strlcpy(temp.values, str_val, MAX_EXP_BRACKETING_LENGTH);
+ LOGI("%s: setting Exposure Bracketing value of %s, frame (%d)", __FUNCTION__, temp.values, temp.total_hal_frames);
+ native_set_parms(MM_CAMERA_PARM_HDR, sizeof(exp_bracketing_t), (void *)&temp);
+ }
+ else {
+ /* Apps not set capture-burst-exposures, error case fall into bracketing off mode */
+ LOGI("%s: capture-burst-exposures not set, back to HDR OFF mode", __FUNCTION__);
+ mHdrMode = HDR_BRACKETING_OFF;
+ temp.hdr_enable= FALSE;
+ temp.mode = HDR_BRACKETING_OFF;
+ native_set_parms(MM_CAMERA_PARM_HDR, sizeof(exp_bracketing_t), (void *)&temp);
+ }
+ }
+ break;
+ case HDR_BRACKETING_OFF:
+ default:
+ {
+ mHdrMode = HDR_BRACKETING_OFF;
+ temp.hdr_enable= FALSE;
+ temp.mode = HDR_BRACKETING_OFF;
+ native_set_parms(MM_CAMERA_PARM_HDR, sizeof(exp_bracketing_t), (void *)&temp);
+ }
+ break;
+ }
+
+ /* save the value*/
+ mParameters.set(CameraParameters::KEY_AE_BRACKET_HDR, str);
+ }
+ return NO_ERROR;
+}
+
+status_t QCameraHardwareInterface::setCaptureBurstExp()
+{
+ char burst_exp[PROPERTY_VALUE_MAX];
+ memset(burst_exp, 0, sizeof(burst_exp));
+ property_get("persist.capture.burst.exposures", burst_exp, "");
+ if (NULL != burst_exp)
+ mParameters.set("capture-burst-exposures", burst_exp);
+ return NO_ERROR;
+}
+
+status_t QCameraHardwareInterface::setRedeyeReduction(const CameraParameters& params)
+{
+ if(supportsRedEyeReduction() == false) {
+ LOGE("Parameter Redeye Reduction is not supported for this sensor");
+ return NO_ERROR;
+ }
+
+ const char *str = params.get(CameraParameters::KEY_REDEYE_REDUCTION);
+ if (str != NULL) {
+ int value = attr_lookup(redeye_reduction, sizeof(redeye_reduction) / sizeof(str_map), str);
+ if (value != NOT_FOUND) {
+ int8_t temp = (int8_t)value;
+ LOGI("%s: setting Redeye Reduction value of %s", __FUNCTION__, str);
+ mParameters.set(CameraParameters::KEY_REDEYE_REDUCTION, str);
+
+ native_set_parms(MM_CAMERA_PARM_REDEYE_REDUCTION, sizeof(int8_t), (void *)&temp);
+ return NO_ERROR;
+ }
+ }
+ LOGE("Invalid Redeye Reduction value: %s", (str == NULL) ? "NULL" : str);
+ return BAD_VALUE;
+}
+
+status_t QCameraHardwareInterface::setGpsLocation(const CameraParameters& params)
+{
+ const char *method = params.get(CameraParameters::KEY_GPS_PROCESSING_METHOD);
+ if (method) {
+ mParameters.set(CameraParameters::KEY_GPS_PROCESSING_METHOD, method);
+ }else {
+ mParameters.remove(CameraParameters::KEY_GPS_PROCESSING_METHOD);
+ }
+
+ const char *latitude = params.get(CameraParameters::KEY_GPS_LATITUDE);
+ if (latitude) {
+ LOGE("latitude %s",latitude);
+ mParameters.set(CameraParameters::KEY_GPS_LATITUDE, latitude);
+ }else {
+ mParameters.remove(CameraParameters::KEY_GPS_LATITUDE);
+ }
+
+ const char *latitudeRef = params.get(CameraParameters::KEY_GPS_LATITUDE_REF);
+ if (latitudeRef) {
+ mParameters.set(CameraParameters::KEY_GPS_LATITUDE_REF, latitudeRef);
+ }else {
+ mParameters.remove(CameraParameters::KEY_GPS_LATITUDE_REF);
+ }
+
+ const char *longitude = params.get(CameraParameters::KEY_GPS_LONGITUDE);
+ if (longitude) {
+ mParameters.set(CameraParameters::KEY_GPS_LONGITUDE, longitude);
+ }else {
+ mParameters.remove(CameraParameters::KEY_GPS_LONGITUDE);
+ }
+
+ const char *longitudeRef = params.get(CameraParameters::KEY_GPS_LONGITUDE_REF);
+ if (longitudeRef) {
+ mParameters.set(CameraParameters::KEY_GPS_LONGITUDE_REF, longitudeRef);
+ }else {
+ mParameters.remove(CameraParameters::KEY_GPS_LONGITUDE_REF);
+ }
+
+ const char *altitudeRef = params.get(CameraParameters::KEY_GPS_ALTITUDE_REF);
+ if (altitudeRef) {
+ mParameters.set(CameraParameters::KEY_GPS_ALTITUDE_REF, altitudeRef);
+ }else {
+ mParameters.remove(CameraParameters::KEY_GPS_ALTITUDE_REF);
+ }
+
+ const char *altitude = params.get(CameraParameters::KEY_GPS_ALTITUDE);
+ if (altitude) {
+ mParameters.set(CameraParameters::KEY_GPS_ALTITUDE, altitude);
+ }else {
+ mParameters.remove(CameraParameters::KEY_GPS_ALTITUDE);
+ }
+
+ const char *status = params.get(CameraParameters::KEY_GPS_STATUS);
+ if (status) {
+ mParameters.set(CameraParameters::KEY_GPS_STATUS, status);
+ }
+
+ const char *dateTime = params.get(CameraParameters::KEY_EXIF_DATETIME);
+ if (dateTime) {
+ mParameters.set(CameraParameters::KEY_EXIF_DATETIME, dateTime);
+ }else {
+ mParameters.remove(CameraParameters::KEY_EXIF_DATETIME);
+ }
+
+ const char *timestamp = params.get(CameraParameters::KEY_GPS_TIMESTAMP);
+ if (timestamp) {
+ mParameters.set(CameraParameters::KEY_GPS_TIMESTAMP, timestamp);
+ }else {
+ mParameters.remove(CameraParameters::KEY_GPS_TIMESTAMP);
+ }
+ LOGE("setGpsLocation X");
+ return NO_ERROR;
+}
+
+status_t QCameraHardwareInterface::setRotation(const CameraParameters& params)
+{
+ status_t rc = NO_ERROR;
+ int rotation = params.getInt(CameraParameters::KEY_ROTATION);
+ if (rotation != NOT_FOUND) {
+ if (rotation == 0 || rotation == 90 || rotation == 180
+ || rotation == 270) {
+ mParameters.set(CameraParameters::KEY_ROTATION, rotation);
+ mRotation = rotation;
+ } else {
+ LOGE("Invalid rotation value: %d", rotation);
+ rc = BAD_VALUE;
+ }
+ }
+ LOGE("setRotation");
+ return rc;
+}
+
+status_t QCameraHardwareInterface::setDenoise(const CameraParameters& params)
+{
+#if 0
+ if(!mCfgControl.mm_camera_is_supported(MM_CAMERA_PARM_WAVELET_DENOISE)) {
+ LOGE("Wavelet Denoise is not supported for this sensor");
+ return NO_ERROR;
+ }
+ const char *str = params.get(CameraParameters::KEY_DENOISE);
+ if (str != NULL) {
+ int value = attr_lookup(denoise,
+ sizeof(denoise) / sizeof(str_map), str);
+ if ((value != NOT_FOUND) && (mDenoiseValue != value)) {
+ mDenoiseValue = value;
+ mParameters.set(CameraParameters::KEY_DENOISE, str);
+ bool ret = native_set_parms(MM_CAMERA_PARM_WAVELET_DENOISE, sizeof(value),
+ (void *)&value);
+ return ret ? NO_ERROR : UNKNOWN_ERROR;
+ }
+ return NO_ERROR;
+ }
+ LOGE("Invalid Denoise value: %s", (str == NULL) ? "NULL" : str);
+#endif
+ return BAD_VALUE;
+}
+
+status_t QCameraHardwareInterface::setOrientation(const CameraParameters& params)
+{
+ const char *str = params.get("orientation");
+
+ if (str != NULL) {
+ if (strcmp(str, "portrait") == 0 || strcmp(str, "landscape") == 0) {
+ // Camera service needs this to decide if the preview frames and raw
+ // pictures should be rotated.
+ mParameters.set("orientation", str);
+ } else {
+ LOGE("Invalid orientation value: %s", str);
+ return BAD_VALUE;
+ }
+ }
+ return NO_ERROR;
+}
+
+status_t QCameraHardwareInterface::setPictureFormat(const CameraParameters& params)
+{
+ const char * str = params.get(CameraParameters::KEY_PICTURE_FORMAT);
+
+ if(str != NULL){
+ int32_t value = attr_lookup(picture_formats,
+ sizeof(picture_formats) / sizeof(str_map), str);
+ if(value != NOT_FOUND){
+ mParameters.set(CameraParameters::KEY_PICTURE_FORMAT, str);
+ } else {
+ LOGE("Invalid Picture Format value: %s", str);
+ return BAD_VALUE;
+ }
+ }
+ return NO_ERROR;
+}
+
+status_t QCameraHardwareInterface::setRecordingHintValue(const int32_t value)
+{
+ native_set_parms(MM_CAMERA_PARM_RECORDING_HINT, sizeof(value),
+ (void *)&value);
+ if (value == TRUE){
+ native_set_parms(MM_CAMERA_PARM_CAF_ENABLE, sizeof(value),
+ (void *)&value);
+ }
+ setDISMode();
+ setFullLiveshot();
+ return NO_ERROR;
+}
+
+status_t QCameraHardwareInterface::setRecordingHint(const CameraParameters& params)
+{
+
+ const char * str = params.get(CameraParameters::KEY_RECORDING_HINT);
+
+ if(str != NULL){
+ int32_t value = attr_lookup(recording_Hints,
+ sizeof(recording_Hints) / sizeof(str_map), str);
+ if(value != NOT_FOUND){
+ mRecordingHint = value;
+ setRecordingHintValue(mRecordingHint);
+ mParameters.set(CameraParameters::KEY_RECORDING_HINT, str);
+ return NO_ERROR;
+ } else {
+ LOGE("Invalid Picture Format value: %s", str);
+ setDISMode();
+ setFullLiveshot();
+ return BAD_VALUE;
+ }
+ }
+ setDISMode();
+ setFullLiveshot();
+ return NO_ERROR;
+}
+
+status_t QCameraHardwareInterface::setDISMode() {
+ /* Enable DIS only if
+ * - Camcorder mode AND
+ * - DIS property is set AND
+ * - Not in Low power mode. */
+ uint32_t value = mRecordingHint && mDisEnabled
+ && !isLowPowerCamcorder();
+
+ LOGI("%s DIS is %s value = %d", __func__,
+ value ? "Enabled" : "Disabled", value);
+ native_set_parms(MM_CAMERA_PARM_DIS_ENABLE, sizeof(value),
+ (void *)&value);
+ return NO_ERROR;
+}
+
+status_t QCameraHardwareInterface::setFullLiveshot()
+{
+ /* Enable full size liveshot only if
+ * - Camcorder mode AND
+ * - Full size liveshot is enabled. */
+ uint32_t value = mRecordingHint && mFullLiveshotEnabled
+ && !isLowPowerCamcorder();
+
+ if (((mDimension.picture_width == mDimension.video_width) &&
+ (mDimension.picture_height == mDimension.video_height))) {
+ /* If video size matches the live snapshot size
+ * turn off full size liveshot to get higher fps. */
+ value = 0;
+ }
+
+ LOGI("%s Full size liveshot %s value = %d", __func__,
+ value ? "Enabled" : "Disabled", value);
+ native_set_parms(MM_CAMERA_PARM_FULL_LIVESHOT, sizeof(value),
+ (void *)&value);
+ return NO_ERROR;
+}
+
+
+isp3a_af_mode_t QCameraHardwareInterface::getAutoFocusMode(
+ const CameraParameters& params)
+{
+ isp3a_af_mode_t afMode = AF_MODE_MAX;
+ afMode = (isp3a_af_mode_t)mFocusMode;
+ return afMode;
+}
+
+void QCameraHardwareInterface::getPictureSize(int *picture_width,
+ int *picture_height) const
+{
+ mParameters.getPictureSize(picture_width, picture_height);
+}
+
+void QCameraHardwareInterface::getPreviewSize(int *preview_width,
+ int *preview_height) const
+{
+ mParameters.getPreviewSize(preview_width, preview_height);
+}
+
+cam_format_t QCameraHardwareInterface::getPreviewFormat() const
+{
+ cam_format_t foramt = CAMERA_YUV_420_NV21;
+ const char *str = mParameters.getPreviewFormat();
+ int32_t value = attr_lookup(preview_formats,
+ sizeof(preview_formats)/sizeof(str_map),
+ str);
+
+ if(value != NOT_FOUND) {
+ int num = sizeof(preview_format_info_list)/sizeof(preview_format_info_t);
+ int i;
+ for (i = 0; i < num; i++) {
+ if (preview_format_info_list[i].Hal_format == value) {
+ foramt = preview_format_info_list[i].mm_cam_format;
+ break;
+ }
+ }
+ }
+
+ return foramt;
+}
+
+cam_pad_format_t QCameraHardwareInterface::getPreviewPadding() const
+{
+ return mPreviewFormatInfo.padding;
+}
+
+int QCameraHardwareInterface::getJpegQuality() const
+{
+ return mJpegQuality;
+}
+
+int QCameraHardwareInterface::getNumOfSnapshots(void) const
+{
+ char prop[PROPERTY_VALUE_MAX];
+ memset(prop, 0, sizeof(prop));
+ property_get("persist.camera.snapshot.number", prop, "0");
+ LOGI("%s: prop enable/disable = %d", __func__, atoi(prop));
+ if (atoi(prop)) {
+ LOGE("%s: Reading maximum no of snapshots = %d"
+ "from properties", __func__, atoi(prop));
+ return atoi(prop);
+ } else {
+ return mParameters.getInt("num-snaps-per-shutter");
+ }
+}
+
+int QCameraHardwareInterface::getNumOfSnapshots(const CameraParameters& params)
+{
+ char prop[PROPERTY_VALUE_MAX];
+ memset(prop, 0, sizeof(prop));
+ property_get("persist.camera.snapshot.number", prop, "0");
+ LOGI("%s: prop enable/disable = %d", __func__, atoi(prop));
+ if (atoi(prop)) {
+ LOGI("%s: Reading maximum no of snapshots = %d"
+ "from properties", __func__, atoi(prop));
+ return atoi(prop);
+ } else {
+ return params.getInt("num-snaps-per-shutter");
+ }
+
+}
+
+int QCameraHardwareInterface::
+getThumbSizesFromAspectRatio(uint32_t aspect_ratio,
+ int *picture_width,
+ int *picture_height)
+{
+ for(unsigned int i = 0; i < THUMBNAIL_SIZE_COUNT; i++ ){
+ if(thumbnail_sizes[i].aspect_ratio == aspect_ratio)
+ {
+ *picture_width = thumbnail_sizes[i].width;
+ *picture_height = thumbnail_sizes[i].height;
+ return NO_ERROR;
+ }
+ }
+
+ return BAD_VALUE;
+}
+
+bool QCameraHardwareInterface::isRawSnapshot()
+{
+ const char *format = mParameters.getPictureFormat();
+ if( format!= NULL &&
+ !strcmp(format, CameraParameters::PIXEL_FORMAT_RAW)){
+ return true;
+ }
+ else{
+ return false;
+ }
+}
+
+status_t QCameraHardwareInterface::setPreviewSizeTable(void)
+{
+ status_t ret = NO_ERROR;
+ mm_camera_dimension_t dim;
+ struct camera_size_type* preview_size_table;
+ int preview_table_size;
+ int i = 0;
+ char str[10] = {0};
+
+ /* Initialize table with default values */
+ preview_size_table = default_preview_sizes;
+ preview_table_size = preview_sizes_count;
+
+
+ /* Get maximum preview size supported by sensor*/
+ memset(&dim, 0, sizeof(mm_camera_dimension_t));
+ ret = cam_config_get_parm(mCameraId,
+ MM_CAMERA_PARM_MAX_PREVIEW_SIZE, &dim);
+ if (ret != NO_ERROR) {
+ LOGE("%s: Failure getting Max Preview Size supported by camera",
+ __func__);
+ goto end;
+ }
+
+ LOGD("%s: Max Preview Sizes Supported: %d X %d", __func__,
+ dim.width, dim.height);
+
+ for (i = 0; i < preview_table_size; i++) {
+ if ((preview_size_table->width <= dim.width) &&
+ (preview_size_table->height <= dim.height)) {
+ LOGD("%s: Camera Preview Size Table "
+ "Max width: %d height %d table_size: %d",
+ __func__, preview_size_table->width,
+ preview_size_table->height, preview_table_size - i);
+ break;
+ }
+ preview_size_table++;
+ }
+ //set preferred preview size to maximum preview size
+ sprintf(str, "%dx%d", preview_size_table->width, preview_size_table->height);
+ mParameters.set(CameraParameters::KEY_PREFERRED_PREVIEW_SIZE_FOR_VIDEO, str);
+ LOGD("KEY_PREFERRED_PREVIEW_SIZE_FOR_VIDEO = %s", str);
+
+end:
+ /* Save the table in global member*/
+ mPreviewSizes = preview_size_table;
+ mPreviewSizeCount = preview_table_size - i;
+
+ return ret;
+}
+
+status_t QCameraHardwareInterface::setPictureSizeTable(void)
+{
+ status_t ret = NO_ERROR;
+ mm_camera_dimension_t dim;
+ struct camera_size_type* picture_size_table;
+ int picture_table_size;
+ int i = 0, count = 0;
+
+ /* Initialize table with default values */
+ picture_table_size = sizeof(default_picture_sizes)/
+ sizeof(default_picture_sizes[0]);
+ picture_size_table = default_picture_sizes;
+ mPictureSizes =
+ ( struct camera_size_type *)malloc(picture_table_size *
+ sizeof(struct camera_size_type));
+ if (mPictureSizes == NULL) {
+ LOGE("%s: Failre allocating memory to store picture size table",__func__);
+ goto end;
+ }
+
+ /* Get maximum picture size supported by sensor*/
+ memset(&dim, 0, sizeof(mm_camera_dimension_t));
+ ret = cam_config_get_parm(mCameraId,
+ MM_CAMERA_PARM_MAX_PICTURE_SIZE, &dim);
+ if (ret != NO_ERROR) {
+ LOGE("%s: Failure getting Max Picture Size supported by camera",
+ __func__);
+ ret = NO_MEMORY;
+ free(mPictureSizes);
+ mPictureSizes = NULL;
+ goto end;
+ }
+
+ LOGD("%s: Max Picture Sizes Supported: %d X %d", __func__,
+ dim.width, dim.height);
+
+ for (i = 0; i < picture_table_size; i++) {
+ /* We'll store those dimensions whose width AND height
+ are less than or equal to maximum supported */
+ if ((picture_size_table->width <= dim.width) &&
+ (picture_size_table->height <= dim.height)) {
+ LOGD("%s: Camera Picture Size Table "
+ "Max width: %d height %d table_size: %d",
+ __func__, picture_size_table->width,
+ picture_size_table->height, count+1);
+ mPictureSizes[count].height = picture_size_table->height;
+ mPictureSizes[count].width = picture_size_table->width;
+ count++;
+ }
+ picture_size_table++;
+ }
+ mPictureSizeCount = count;
+
+end:
+ /* In case of error, we use default picture sizes */
+ if (ret != NO_ERROR) {
+ mPictureSizes = default_picture_sizes;
+ mPictureSizeCount = picture_table_size;
+ }
+ return ret;
+}
+
+status_t QCameraHardwareInterface::setVideoSizeTable(void)
+{
+ status_t ret = NO_ERROR;
+ mm_camera_dimension_t dim;
+ struct camera_size_type* video_size_table;
+ int video_table_size;
+ int i = 0, count = 0;
+ LOGE("%s: E", __func__);
+
+ /* Initialize table with default values */
+ video_table_size = video_sizes_count;
+ video_size_table = default_video_sizes;
+ mVideoSizes =
+ (struct camera_size_type *)malloc(video_table_size *
+ sizeof(struct camera_size_type));
+ if(mVideoSizes == NULL) {
+ LOGE("%s: error allocating memory to store video size table",__func__);
+ ret = BAD_VALUE;
+ goto end;
+ }
+
+ /* Get maximum video size supported by sensor*/
+ memset(&dim, 0, sizeof(mm_camera_dimension_t));
+ ret = cam_config_get_parm(mCameraId,
+ MM_CAMERA_PARM_MAX_VIDEO_SIZE, &dim);
+ if(ret != NO_ERROR) {
+ LOGE("%s: error getting Max Video Size supported by camera",
+ __func__);
+ ret = NO_MEMORY;
+ free(mVideoSizes);
+ mVideoSizes = NULL;
+ ret = BAD_VALUE;
+ goto end;
+ }
+
+ LOGD("%s: Max Video Size Supported: %d X %d", __func__,
+ dim.width, dim.height);
+
+ for(i=0; i < video_table_size; i++) {
+ /* We'll store those dimensions whose width AND height
+ are less than or equal to maximum supported */
+ if((video_size_table->width <= dim.width) &&
+ (video_size_table->height <= dim.height)) {
+ LOGD("%s: Supported Video Size [%d] = %dx%d", __func__, count, video_size_table->width,
+ video_size_table->height);
+ mVideoSizes[count].height = video_size_table->height;
+ mVideoSizes[count].width = video_size_table->width;
+ count++;
+ }
+ video_size_table++;
+ }
+ mVideoSizeCount = count;
+
+end:
+ LOGE("%s: X", __func__);
+ return ret;
+}
+
+void QCameraHardwareInterface::freeVideoSizeTable(void)
+{
+ if(mVideoSizes != NULL)
+ {
+ free(mVideoSizes);
+ }
+ mVideoSizeCount = 0;
+}
+
+
+void QCameraHardwareInterface::freePictureTable(void)
+{
+ /* If we couldn't allocate memory to store picture table
+ we use the picture table pointer to point to default
+ picture table array. In that case we cannot free it.*/
+ if ((mPictureSizes != default_picture_sizes) && mPictureSizes) {
+ free(mPictureSizes);
+ }
+}
+
+status_t QCameraHardwareInterface::setHistogram(int histogram_en)
+{
+ LOGE("setHistogram: E");
+ if(mStatsOn == histogram_en) {
+ return NO_ERROR;
+ }
+
+ mSendData = histogram_en;
+ mStatsOn = histogram_en;
+ mCurrentHisto = -1;
+ mStatSize = sizeof(uint32_t)* HISTOGRAM_STATS_SIZE;
+
+ if (histogram_en == QCAMERA_PARM_ENABLE) {
+ /*Currently the Ashmem is multiplying the buffer size with total number
+ of buffers and page aligning. This causes a crash in JNI as each buffer
+ individually expected to be page aligned */
+ int page_size_minus_1 = getpagesize() - 1;
+ int statSize = sizeof (camera_preview_histogram_info );
+ int32_t mAlignedStatSize = ((statSize + page_size_minus_1) & (~page_size_minus_1));
+#if 0
+ mStatHeap =
+ new AshmemPool(mAlignedStatSize, 3, statSize, "stat");
+ if (!mStatHeap->initialized()) {
+ LOGE("Stat Heap X failed ");
+ mStatHeap.clear();
+ mStatHeap = NULL;
+ return UNKNOWN_ERROR;
+ }
+#endif
+ for(int cnt = 0; cnt<3; cnt++) {
+ mStatsMapped[cnt]=mGetMemory(-1, mStatSize, 1, mCallbackCookie);
+ if(mStatsMapped[cnt] == NULL) {
+ LOGE("Failed to get camera memory for stats heap index: %d", cnt);
+ return(-1);
+ } else {
+ LOGE("Received following info for stats mapped data:%p,handle:%p, size:%d,release:%p",
+ mStatsMapped[cnt]->data ,mStatsMapped[cnt]->handle, mStatsMapped[cnt]->size, mStatsMapped[cnt]->release);
+ }
+ mHistServer.size = sizeof(camera_preview_histogram_info);
+#ifdef USE_ION
+ if(allocate_ion_memory(&mHistServer, cnt, ION_CP_MM_HEAP_ID) < 0) {
+ LOGE("%s ION alloc failed\n", __func__);
+ return -1;
+ }
+#else
+ mHistServer.fd[cnt] = open("/dev/pmem_adsp", O_RDWR|O_SYNC);
+ if(mHistServer.fd[cnt] <= 0) {
+ LOGE("%s: no pmem for frame %d", __func__, cnt);
+ return -1;
+ }
+#endif
+ mHistServer.camera_memory[cnt]=mGetMemory(mHistServer.fd[cnt],mHistServer.size, 1, mCallbackCookie);
+ if(mHistServer.camera_memory[cnt] == NULL) {
+ LOGE("Failed to get camera memory for server side histogram index: %d", cnt);
+ return(-1);
+ } else {
+ LOGE("Received following info for server side histogram data:%p,handle:%p, size:%d,release:%p",
+ mHistServer.camera_memory[cnt]->data ,mHistServer.camera_memory[cnt]->handle,
+ mHistServer.camera_memory[cnt]->size, mHistServer.camera_memory[cnt]->release);
+ }
+ /*Register buffer at back-end*/
+ if (NO_ERROR != sendMappingBuf(0, cnt, mHistServer.fd[cnt],
+ mHistServer.size, mCameraId,
+ CAM_SOCK_MSG_TYPE_HIST_MAPPING)) {
+ LOGE("%s could not send buffer to back-end\n", __func__);
+ }
+ }
+ }
+ LOGV("Setting histogram = %d", histogram_en);
+ native_set_parms(MM_CAMERA_PARM_HISTOGRAM, sizeof(int), &histogram_en);
+ if(histogram_en == QCAMERA_PARM_DISABLE)
+ {
+ //release memory
+ for(int i=0; i<3; i++){
+ if(mStatsMapped[i] != NULL) {
+ mStatsMapped[i]->release(mStatsMapped[i]);
+ }
+ /*Unregister buffer at back-end */
+ if (NO_ERROR != sendUnMappingBuf(0, i, mCameraId, CAM_SOCK_MSG_TYPE_HIST_UNMAPPING)) {
+ LOGE("%s could not unregister buffer from back-end\n", __func__);
+ }
+ if(mHistServer.camera_memory[i] != NULL) {
+ mHistServer.camera_memory[i]->release(mHistServer.camera_memory[i]);
+ }
+ close(mHistServer.fd[i]);
+#ifdef USE_ION
+ deallocate_ion_memory(&mHistServer, i);
+#endif
+ }
+ }
+ return NO_ERROR;
+}
+
+status_t QCameraHardwareInterface::setZSLBurstLookBack(const CameraParameters& params)
+{
+ const char *v = params.get("capture-burst-retroactive");
+ if (v) {
+ int look_back = atoi(v);
+ LOGI("%s: look_back =%d", __func__, look_back);
+ mParameters.set("capture-burst-retroactive", look_back);
+ }
+ return NO_ERROR;
+}
+
+status_t QCameraHardwareInterface::setZSLBurstInterval(const CameraParameters& params)
+{
+ mZslInterval = BURST_INTREVAL_DEFAULT;
+ const char *v = params.get("capture-burst-interval");
+ if (v) {
+ int interval = atoi(v);
+ LOGI("%s: Interval =%d", __func__, interval);
+ if(interval < BURST_INTREVAL_MIN ||interval > BURST_INTREVAL_MAX ) {
+ return BAD_VALUE;
+ }
+ mZslInterval = interval;
+ }
+ return NO_ERROR;
+}
+
+int QCameraHardwareInterface::getZSLBurstInterval( void )
+{
+ int val;
+
+ if (mZslInterval == BURST_INTREVAL_DEFAULT) {
+ char prop[PROPERTY_VALUE_MAX];
+ memset(prop, 0, sizeof(prop));
+ property_get("persist.camera.zsl.interval", prop, "1");
+ val = atoi(prop);
+ LOGD("%s: prop interval = %d", __func__, val);
+ } else {
+ val = mZslInterval;
+ }
+ return val;
+}
+
+
+int QCameraHardwareInterface::getZSLQueueDepth(void) const
+{
+ char prop[PROPERTY_VALUE_MAX];
+ memset(prop, 0, sizeof(prop));
+ property_get("persist.camera.zsl.queuedepth", prop, "2");
+ LOGI("%s: prop = %d", __func__, atoi(prop));
+ return atoi(prop);
+}
+
+int QCameraHardwareInterface::getZSLBackLookCount(void) const
+{
+ int look_back;
+ char prop[PROPERTY_VALUE_MAX];
+ memset(prop, 0, sizeof(prop));
+ property_get("persist.camera.zsl.backlookcnt", prop, "0");
+ LOGI("%s: prop = %d", __func__, atoi(prop));
+ look_back = atoi(prop);
+ if (look_back == 0 ) {
+ look_back = mParameters.getInt("capture-burst-retroactive");
+ LOGE("%s: look_back = %d", __func__, look_back);
+ }
+ return look_back;
+}
+
+//EXIF functions
+void QCameraHardwareInterface::deinitExifData()
+{
+ LOGD("Clearing EXIF data");
+ for(int i=0; i<MAX_EXIF_TABLE_ENTRIES; i++)
+ {
+ //clear all data
+ memset(&mExifData[i], 0x00, sizeof(exif_tags_info_t));
+ }
+ mExifTableNumEntries = 0;
+}
+
+void QCameraHardwareInterface::addExifTag(exif_tag_id_t tagid, exif_tag_type_t type,
+ uint32_t count, uint8_t copy, void *data) {
+
+ if(mExifTableNumEntries >= MAX_EXIF_TABLE_ENTRIES) {
+ LOGE("%s: Number of entries exceeded limit", __func__);
+ return;
+ }
+ int index = mExifTableNumEntries;
+ mExifData[index].tag_id = tagid;
+ mExifData[index].tag_entry.type = type;
+ mExifData[index].tag_entry.count = count;
+ mExifData[index].tag_entry.copy = copy;
+ if((type == EXIF_RATIONAL) && (count > 1))
+ mExifData[index].tag_entry.data._rats = (rat_t *)data;
+ if((type == EXIF_RATIONAL) && (count == 1))
+ mExifData[index].tag_entry.data._rat = *(rat_t *)data;
+ else if(type == EXIF_ASCII)
+ mExifData[index].tag_entry.data._ascii = (char *)data;
+ else if(type == EXIF_BYTE)
+ mExifData[index].tag_entry.data._byte = *(uint8_t *)data;
+ else if((type == EXIF_SHORT) && (count > 1))
+ mExifData[index].tag_entry.data._shorts = (uint16_t *)data;
+ else if((type == EXIF_SHORT) && (count == 1))
+ mExifData[index].tag_entry.data._short = *(uint16_t *)data;
+ // Increase number of entries
+ mExifTableNumEntries++;
+}
+
+rat_t getRational(int num, int denom)
+{
+ rat_t temp = {num, denom};
+ return temp;
+}
+
+void QCameraHardwareInterface::initExifData(){
+ if(mExifValues.dateTime) {
+ addExifTag(EXIFTAGID_EXIF_DATE_TIME_ORIGINAL, EXIF_ASCII,
+ 20, 1, (void *)mExifValues.dateTime);
+ }
+ addExifTag(EXIFTAGID_FOCAL_LENGTH, EXIF_RATIONAL, 1, 1, (void *)&(mExifValues.focalLength));
+ addExifTag(EXIFTAGID_ISO_SPEED_RATING,EXIF_SHORT,1,1,(void *)&(mExifValues.isoSpeed));
+
+ if(mExifValues.mGpsProcess) {
+ addExifTag(EXIFTAGID_GPS_PROCESSINGMETHOD, EXIF_ASCII,
+ EXIF_ASCII_PREFIX_SIZE + strlen(mExifValues.gpsProcessingMethod + EXIF_ASCII_PREFIX_SIZE) + 1,
+ 1, (void *)mExifValues.gpsProcessingMethod);
+ }
+
+ if(mExifValues.mLatitude) {
+ addExifTag(EXIFTAGID_GPS_LATITUDE, EXIF_RATIONAL, 3, 1, (void *)mExifValues.latitude);
+
+ if(mExifValues.latRef) {
+ addExifTag(EXIFTAGID_GPS_LATITUDE_REF, EXIF_ASCII, 2,
+ 1, (void *)mExifValues.latRef);
+ }
+ }
+
+ if(mExifValues.mLongitude) {
+ addExifTag(EXIFTAGID_GPS_LONGITUDE, EXIF_RATIONAL, 3, 1, (void *)mExifValues.longitude);
+
+ if(mExifValues.lonRef) {
+ addExifTag(EXIFTAGID_GPS_LONGITUDE_REF, EXIF_ASCII, 2,
+ 1, (void *)mExifValues.lonRef);
+ }
+ }
+
+ if(mExifValues.mAltitude) {
+ addExifTag(EXIFTAGID_GPS_ALTITUDE, EXIF_RATIONAL, 1,
+ 1, (void *)&(mExifValues.altitude));
+
+ addExifTag(EXIFTAGID_GPS_ALTITUDE_REF, EXIF_BYTE, 1, 1, (void *)&mExifValues.mAltitude_ref);
+ }
+
+ if(mExifValues.mTimeStamp) {
+ time_t unixTime;
+ struct tm *UTCTimestamp;
+
+ unixTime = (time_t)mExifValues.mGPSTimestamp;
+ UTCTimestamp = gmtime(&unixTime);
+
+ strftime(mExifValues.gpsDateStamp, sizeof(mExifValues.gpsDateStamp), "%Y:%m:%d", UTCTimestamp);
+ addExifTag(EXIFTAGID_GPS_DATESTAMP, EXIF_ASCII,
+ strlen(mExifValues.gpsDateStamp)+1 , 1, (void *)mExifValues.gpsDateStamp);
+
+ mExifValues.gpsTimeStamp[0] = getRational(UTCTimestamp->tm_hour, 1);
+ mExifValues.gpsTimeStamp[1] = getRational(UTCTimestamp->tm_min, 1);
+ mExifValues.gpsTimeStamp[2] = getRational(UTCTimestamp->tm_sec, 1);
+
+ addExifTag(EXIFTAGID_GPS_TIMESTAMP, EXIF_RATIONAL,
+ 3, 1, (void *)mExifValues.gpsTimeStamp);
+ LOGE("EXIFTAGID_GPS_TIMESTAMP set");
+ }
+
+}
+
+//Add all exif tags in this function
+void QCameraHardwareInterface::setExifTags()
+{
+ const char *str;
+
+ //set TimeStamp
+ str = mParameters.get(CameraParameters::KEY_EXIF_DATETIME);
+ if(str != NULL) {
+ strncpy(mExifValues.dateTime, str, 19);
+ mExifValues.dateTime[19] = '\0';
+ }
+
+ //Set focal length
+ int focalLengthValue = (int) (mParameters.getFloat(
+ CameraParameters::KEY_FOCAL_LENGTH) * FOCAL_LENGTH_DECIMAL_PRECISION);
+
+ mExifValues.focalLength = getRational(focalLengthValue, FOCAL_LENGTH_DECIMAL_PRECISION);
+
+ //Set ISO Speed
+ mExifValues.isoSpeed = getISOSpeedValue();
+
+ //set gps tags
+ setExifTagsGPS();
+}
+
+void QCameraHardwareInterface::setExifTagsGPS()
+{
+ const char *str = NULL;
+
+ //Set GPS processing method
+ str = mParameters.get(CameraParameters::KEY_GPS_PROCESSING_METHOD);
+ if(str != NULL) {
+ memcpy(mExifValues.gpsProcessingMethod, ExifAsciiPrefix, EXIF_ASCII_PREFIX_SIZE);
+ strncpy(mExifValues.gpsProcessingMethod + EXIF_ASCII_PREFIX_SIZE, str,
+ GPS_PROCESSING_METHOD_SIZE - 1);
+ mExifValues.gpsProcessingMethod[EXIF_ASCII_PREFIX_SIZE + GPS_PROCESSING_METHOD_SIZE-1] = '\0';
+ LOGE("EXIFTAGID_GPS_PROCESSINGMETHOD = %s %s", mExifValues.gpsProcessingMethod,
+ mExifValues.gpsProcessingMethod+8);
+ mExifValues.mGpsProcess = true;
+ }else{
+ mExifValues.mGpsProcess = false;
+ }
+ str = NULL;
+
+ //Set Latitude
+ str = mParameters.get(CameraParameters::KEY_GPS_LATITUDE);
+ if(str != NULL) {
+ parseGPSCoordinate(str, mExifValues.latitude);
+ LOGE("EXIFTAGID_GPS_LATITUDE = %s", str);
+
+ //set Latitude Ref
+ float latitudeValue = mParameters.getFloat(CameraParameters::KEY_GPS_LATITUDE);
+ if(latitudeValue < 0.0f) {
+ mExifValues.latRef[0] = 'S';
+ } else {
+ mExifValues.latRef[0] = 'N';
+ }
+ mExifValues.latRef[1] = '\0';
+ mExifValues.mLatitude = true;
+ mParameters.set(CameraParameters::KEY_GPS_LATITUDE_REF,mExifValues.latRef);
+ LOGE("EXIFTAGID_GPS_LATITUDE_REF = %s", mExifValues.latRef);
+ }else{
+ mExifValues.mLatitude = false;
+ }
+
+ //set Longitude
+ str = NULL;
+ str = mParameters.get(CameraParameters::KEY_GPS_LONGITUDE);
+ if(str != NULL) {
+ parseGPSCoordinate(str, mExifValues.longitude);
+ LOGE("EXIFTAGID_GPS_LONGITUDE = %s", str);
+
+ //set Longitude Ref
+ float longitudeValue = mParameters.getFloat(CameraParameters::KEY_GPS_LONGITUDE);
+ if(longitudeValue < 0.0f) {
+ mExifValues.lonRef[0] = 'W';
+ } else {
+ mExifValues.lonRef[0] = 'E';
+ }
+ mExifValues.lonRef[1] = '\0';
+ mExifValues.mLongitude = true;
+ LOGE("EXIFTAGID_GPS_LONGITUDE_REF = %s", mExifValues.lonRef);
+ mParameters.set(CameraParameters::KEY_GPS_LONGITUDE_REF, mExifValues.lonRef);
+ }else{
+ mExifValues.mLongitude = false;
+ }
+
+ //set Altitude
+ str = mParameters.get(CameraParameters::KEY_GPS_ALTITUDE);
+ if(str != NULL) {
+ double value = atof(str);
+ mExifValues.mAltitude_ref = 0;
+ if(value < 0){
+ mExifValues.mAltitude_ref = 1;
+ value = -value;
+ }
+ mExifValues.altitude = getRational(value*1000, 1000);
+ mExifValues.mAltitude = true;
+ //set AltitudeRef
+ mParameters.set(CameraParameters::KEY_GPS_ALTITUDE_REF, mExifValues.mAltitude_ref);
+ LOGE("EXIFTAGID_GPS_ALTITUDE = %f", value);
+ }else{
+ mExifValues.mAltitude = false;
+ }
+
+ //set Gps TimeStamp
+ str = NULL;
+ str = mParameters.get(CameraParameters::KEY_GPS_TIMESTAMP);
+ if(str != NULL) {
+ mExifValues.mTimeStamp = true;
+ mExifValues.mGPSTimestamp = atol(str);
+ }else{
+ mExifValues.mTimeStamp = false;
+ }
+}
+
+//latlonString is string formatted coordinate
+//coord is rat_t[3]
+void QCameraHardwareInterface::parseGPSCoordinate(const char *latlonString, rat_t* coord)
+{
+ if(coord == NULL) {
+ LOGE("%s: error, invalid argument coord == NULL", __func__);
+ return;
+ }
+ float degF = fabs(atof(latlonString));
+ float minF = (degF- (int) degF) * 60;
+ float secF = (minF - (int) minF) * 60;
+
+ coord[0] = getRational((int) degF, 1);
+ coord[1] = getRational((int) minF, 1);
+ coord[2] = getRational((int) (secF * 10000), 10000);
+}
+
+bool QCameraHardwareInterface::isLowPowerCamcorder() {
+
+ if (mPowerMode == LOW_POWER)
+ return true;
+
+ if(mHFRLevel > 1) /* hard code the value now. Need to move tgtcommon to camear.h */
+ return true;
+
+ return false;
+}
+
+status_t QCameraHardwareInterface::setNoDisplayMode(const CameraParameters& params)
+{
+ char prop[PROPERTY_VALUE_MAX];
+ memset(prop, 0, sizeof(prop));
+ property_get("persist.camera.nodisplay", prop, "0");
+ int prop_val = atoi(prop);
+
+ if (prop_val == 0) {
+ const char *str_val = params.get("no-display-mode");
+ if(str_val && strlen(str_val) > 0) {
+ mNoDisplayMode = atoi(str_val);
+ } else {
+ mNoDisplayMode = 0;
+ }
+ LOGD("Param mNoDisplayMode =%d", mNoDisplayMode);
+ } else {
+ mNoDisplayMode = prop_val;
+ LOGD("prop mNoDisplayMode =%d", mNoDisplayMode);
+ }
+ return NO_ERROR;
+}
+
+}; /*namespace android */
diff --git a/camera/QCamera/HAL/core/src/QCameraHWI_Preview.cpp b/camera/QCamera/HAL/core/src/QCameraHWI_Preview.cpp
new file mode 100644
index 0000000..30f06a5
--- /dev/null
+++ b/camera/QCamera/HAL/core/src/QCameraHWI_Preview.cpp
@@ -0,0 +1,1434 @@
+/*
+** Copyright (c) 2011-2012 The Linux Foundation. All rights reserved.
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+** http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+/*#error uncomment this for compiler test!*/
+
+#define LOG_TAG "QCameraHWI_Preview"
+#include <utils/Log.h>
+#include <utils/threads.h>
+#include <fcntl.h>
+#include <sys/mman.h>
+#include "QCameraHAL.h"
+#include "QCameraHWI.h"
+#include <gralloc_priv.h>
+#include <genlock.h>
+
+#define UNLIKELY(exp) __builtin_expect(!!(exp), 0)
+
+/* QCameraHWI_Preview class implementation goes here*/
+/* following code implement the preview mode's image capture & display logic of this class*/
+
+namespace android {
+
+// ---------------------------------------------------------------------------
+// Preview Callback
+// ---------------------------------------------------------------------------
+static void preview_notify_cb(mm_camera_ch_data_buf_t *frame,
+ void *user_data)
+{
+ QCameraStream_preview *pme = (QCameraStream_preview *)user_data;
+ mm_camera_ch_data_buf_t *bufs_used = 0;
+ LOGV("%s: E", __func__);
+ /* for peview data, there is no queue, so directly use*/
+ if(pme==NULL) {
+ LOGE("%s: X : Incorrect cookie",__func__);
+ /*Call buf done*/
+ return;
+ }
+
+ pme->processPreviewFrame(frame);
+ LOGV("%s: X", __func__);
+}
+
+status_t QCameraStream_preview::setPreviewWindow(preview_stream_ops_t* window)
+{
+ status_t retVal = NO_ERROR;
+ LOGE(" %s: E ", __FUNCTION__);
+ if( window == NULL) {
+ LOGW(" Setting NULL preview window ");
+ /* TODO: Current preview window will be invalidated.
+ * Release all the buffers back */
+ // relinquishBuffers();
+ }
+ Mutex::Autolock lock(mStopCallbackLock);
+ mPreviewWindow = window;
+ LOGV(" %s : X ", __FUNCTION__ );
+ return retVal;
+}
+
+status_t QCameraStream_preview::getBufferFromSurface() {
+ int err = 0;
+ int numMinUndequeuedBufs = 0;
+ int format = 0;
+ status_t ret = NO_ERROR;
+ int gralloc_usage;
+
+ LOGI(" %s : E ", __FUNCTION__);
+
+ if( mPreviewWindow == NULL) {
+ LOGE("%s: mPreviewWindow = NULL", __func__);
+ return INVALID_OPERATION;
+ }
+ cam_ctrl_dimension_t dim;
+
+ //mDisplayLock.lock();
+ ret = cam_config_get_parm(mCameraId, MM_CAMERA_PARM_DIMENSION,&dim);
+
+ format = mHalCamCtrl->getPreviewFormatInfo().Hal_format;
+ if(ret != NO_ERROR) {
+ LOGE("%s: display format %d is not supported", __func__, dim.prev_format);
+ goto end;
+ }
+ numMinUndequeuedBufs = 0;
+ if(mPreviewWindow->get_min_undequeued_buffer_count) {
+ err = mPreviewWindow->get_min_undequeued_buffer_count(mPreviewWindow, &numMinUndequeuedBufs);
+ if (err != 0) {
+ LOGE("get_min_undequeued_buffer_count failed: %s (%d)",
+ strerror(-err), -err);
+ ret = UNKNOWN_ERROR;
+ goto end;
+ }
+ }
+ mHalCamCtrl->mPreviewMemoryLock.lock();
+ mHalCamCtrl->mPreviewMemory.buffer_count = kPreviewBufferCount + numMinUndequeuedBufs;
+ if(mHalCamCtrl->isZSLMode()) {
+ if(mHalCamCtrl->getZSLQueueDepth() > numMinUndequeuedBufs)
+ mHalCamCtrl->mPreviewMemory.buffer_count +=
+ mHalCamCtrl->getZSLQueueDepth() - numMinUndequeuedBufs;
+ }
+ err = mPreviewWindow->set_buffer_count(mPreviewWindow, mHalCamCtrl->mPreviewMemory.buffer_count );
+ if (err != 0) {
+ LOGE("set_buffer_count failed: %s (%d)",
+ strerror(-err), -err);
+ ret = UNKNOWN_ERROR;
+ goto end;
+ }
+ err = mPreviewWindow->set_buffers_geometry(mPreviewWindow,
+ dim.display_width, dim.display_height, format);
+ if (err != 0) {
+ LOGE("set_buffers_geometry failed: %s (%d)",
+ strerror(-err), -err);
+ ret = UNKNOWN_ERROR;
+ goto end;
+ }
+
+ ret = cam_config_get_parm(mCameraId, MM_CAMERA_PARM_VFE_OUTPUT_ENABLE, &mVFEOutputs);
+ if(ret != MM_CAMERA_OK) {
+ LOGE("get parm MM_CAMERA_PARM_VFE_OUTPUT_ENABLE failed");
+ ret = BAD_VALUE;
+ goto end;
+ }
+
+ //as software encoder is used to encode 720p, to enhance the performance
+ //cashed pmem is used here
+ if(mVFEOutputs == 1 && dim.display_height == 720)
+ gralloc_usage = CAMERA_GRALLOC_HEAP_ID | CAMERA_GRALLOC_FALLBACK_HEAP_ID;
+ else
+ gralloc_usage = CAMERA_GRALLOC_HEAP_ID | CAMERA_GRALLOC_FALLBACK_HEAP_ID |
+ CAMERA_GRALLOC_CACHING_ID;
+ err = mPreviewWindow->set_usage(mPreviewWindow, gralloc_usage);
+ if(err != 0) {
+ /* set_usage error out */
+ LOGE("%s: set_usage rc = %d", __func__, err);
+ ret = UNKNOWN_ERROR;
+ goto end;
+ }
+ ret = cam_config_get_parm(mCameraId, MM_CAMERA_PARM_HFR_FRAME_SKIP, &mHFRFrameSkip);
+ if(ret != MM_CAMERA_OK) {
+ LOGE("get parm MM_CAMERA_PARM_HFR_FRAME_SKIP failed");
+ ret = BAD_VALUE;
+ goto end;
+ }
+ for (int cnt = 0; cnt < mHalCamCtrl->mPreviewMemory.buffer_count; cnt++) {
+ int stride;
+ err = mPreviewWindow->dequeue_buffer(mPreviewWindow,
+ &mHalCamCtrl->mPreviewMemory.buffer_handle[cnt],
+ &mHalCamCtrl->mPreviewMemory.stride[cnt]);
+ if(!err) {
+ LOGE("%s: dequeue buf hdl =%p", __func__, *mHalCamCtrl->mPreviewMemory.buffer_handle[cnt]);
+ err = mPreviewWindow->lock_buffer(this->mPreviewWindow,
+ mHalCamCtrl->mPreviewMemory.buffer_handle[cnt]);
+ // lock the buffer using genlock
+ LOGE("%s: camera call genlock_lock, hdl=%p", __FUNCTION__, (*mHalCamCtrl->mPreviewMemory.buffer_handle[cnt]));
+ if (GENLOCK_NO_ERROR != genlock_lock_buffer((native_handle_t *)(*mHalCamCtrl->mPreviewMemory.buffer_handle[cnt]),
+ GENLOCK_WRITE_LOCK, GENLOCK_MAX_TIMEOUT)) {
+ LOGE("%s: genlock_lock_buffer(WRITE) failed", __FUNCTION__);
+ mHalCamCtrl->mPreviewMemory.local_flag[cnt] = BUFFER_UNLOCKED;
+ //mHalCamCtrl->mPreviewMemoryLock.unlock();
+ //return -EINVAL;
+ } else {
+ LOGE("%s: genlock_lock_buffer hdl =%p", __FUNCTION__, *mHalCamCtrl->mPreviewMemory.buffer_handle[cnt]);
+ mHalCamCtrl->mPreviewMemory.local_flag[cnt] = BUFFER_LOCKED;
+ }
+ } else {
+ mHalCamCtrl->mPreviewMemory.local_flag[cnt] = BUFFER_NOT_OWNED;
+ LOGE("%s: dequeue_buffer idx = %d err = %d", __func__, cnt, err);
+ }
+
+ LOGE("%s: dequeue buf: %p\n", __func__, mHalCamCtrl->mPreviewMemory.buffer_handle[cnt]);
+
+ if(err != 0) {
+ LOGE("%s: dequeue_buffer failed: %s (%d)", __func__,
+ strerror(-err), -err);
+ ret = UNKNOWN_ERROR;
+ for(int i = 0; i < cnt; i++) {
+ if (BUFFER_LOCKED == mHalCamCtrl->mPreviewMemory.local_flag[i]) {
+ LOGE("%s: camera call genlock_unlock", __FUNCTION__);
+ if (GENLOCK_FAILURE == genlock_unlock_buffer((native_handle_t *)
+ (*(mHalCamCtrl->mPreviewMemory.buffer_handle[i])))) {
+ LOGE("%s: genlock_unlock_buffer failed: hdl =%p", __FUNCTION__, (*(mHalCamCtrl->mPreviewMemory.buffer_handle[i])) );
+ //mHalCamCtrl->mPreviewMemoryLock.unlock();
+ //return -EINVAL;
+ } else {
+ mHalCamCtrl->mPreviewMemory.local_flag[i] = BUFFER_UNLOCKED;
+ }
+ }
+ if( mHalCamCtrl->mPreviewMemory.local_flag[i] != BUFFER_NOT_OWNED) {
+ err = mPreviewWindow->cancel_buffer(mPreviewWindow,
+ mHalCamCtrl->mPreviewMemory.buffer_handle[i]);
+ }
+ mHalCamCtrl->mPreviewMemory.local_flag[i] = BUFFER_NOT_OWNED;
+ LOGE("%s: cancel_buffer: hdl =%p", __func__, (*mHalCamCtrl->mPreviewMemory.buffer_handle[i]));
+ mHalCamCtrl->mPreviewMemory.buffer_handle[i] = NULL;
+ }
+ memset(&mHalCamCtrl->mPreviewMemory, 0, sizeof(mHalCamCtrl->mPreviewMemory));
+ goto end;
+ }
+
+ mHalCamCtrl->mPreviewMemory.private_buffer_handle[cnt] =
+ (struct private_handle_t *)(*mHalCamCtrl->mPreviewMemory.buffer_handle[cnt]);
+#ifdef USE_ION
+ mHalCamCtrl->mPreviewMemory.main_ion_fd[cnt] = open("/dev/ion", O_RDONLY);
+ if (mHalCamCtrl->mPreviewMemory.main_ion_fd[cnt] < 0) {
+ LOGE("%s: failed: could not open ion device\n", __func__);
+ } else {
+ mHalCamCtrl->mPreviewMemory.ion_info_fd[cnt].fd =
+ mHalCamCtrl->mPreviewMemory.private_buffer_handle[cnt]->fd;
+ if (ioctl(mHalCamCtrl->mPreviewMemory.main_ion_fd[cnt],
+ ION_IOC_IMPORT, &mHalCamCtrl->mPreviewMemory.ion_info_fd[cnt]) < 0)
+ LOGE("ION import failed\n");
+ }
+#endif
+ mHalCamCtrl->mPreviewMemory.camera_memory[cnt] =
+ mHalCamCtrl->mGetMemory(mHalCamCtrl->mPreviewMemory.private_buffer_handle[cnt]->fd,
+ mHalCamCtrl->mPreviewMemory.private_buffer_handle[cnt]->size, 1, (void *)this);
+ LOGE("%s: idx = %d, fd = %d, size = %d, offset = %d", __func__,
+ cnt, mHalCamCtrl->mPreviewMemory.private_buffer_handle[cnt]->fd,
+ mHalCamCtrl->mPreviewMemory.private_buffer_handle[cnt]->size,
+ mHalCamCtrl->mPreviewMemory.private_buffer_handle[cnt]->offset);
+ }
+
+
+ memset(&mHalCamCtrl->mMetadata, 0, sizeof(mHalCamCtrl->mMetadata));
+ memset(mHalCamCtrl->mFace, 0, sizeof(mHalCamCtrl->mFace));
+
+ LOGI(" %s : X ",__FUNCTION__);
+end:
+ //mDisplayLock.unlock();
+ mHalCamCtrl->mPreviewMemoryLock.unlock();
+
+ return ret;
+}
+
+status_t QCameraStream_preview::putBufferToSurface() {
+ int err = 0;
+ status_t ret = NO_ERROR;
+
+ LOGI(" %s : E ", __FUNCTION__);
+
+ mHalCamCtrl->mPreviewMemoryLock.lock();
+ for (int cnt = 0; cnt < mHalCamCtrl->mPreviewMemory.buffer_count; cnt++) {
+ if (cnt < mHalCamCtrl->mPreviewMemory.buffer_count) {
+ if (NO_ERROR != mHalCamCtrl->sendUnMappingBuf(MSM_V4L2_EXT_CAPTURE_MODE_PREVIEW, cnt, mCameraId,
+ CAM_SOCK_MSG_TYPE_FD_UNMAPPING)) {
+ LOGE("%s: sending data Msg Failed", __func__);
+ }
+ }
+
+ mHalCamCtrl->mPreviewMemory.camera_memory[cnt]->release(mHalCamCtrl->mPreviewMemory.camera_memory[cnt]);
+#ifdef USE_ION
+ struct ion_handle_data ion_handle;
+ ion_handle.handle = mHalCamCtrl->mPreviewMemory.ion_info_fd[cnt].handle;
+ if (ioctl(mHalCamCtrl->mPreviewMemory.main_ion_fd[cnt], ION_IOC_FREE, &ion_handle)
+ < 0)
+ LOGE("%s: ion free failed\n", __func__);
+ close(mHalCamCtrl->mPreviewMemory.main_ion_fd[cnt]);
+#endif
+ if (BUFFER_LOCKED == mHalCamCtrl->mPreviewMemory.local_flag[cnt]) {
+ LOGD("%s: camera call genlock_unlock", __FUNCTION__);
+ if (GENLOCK_FAILURE == genlock_unlock_buffer((native_handle_t *)
+ (*(mHalCamCtrl->mPreviewMemory.buffer_handle[cnt])))) {
+ LOGE("%s: genlock_unlock_buffer failed, handle =%p", __FUNCTION__, (*(mHalCamCtrl->mPreviewMemory.buffer_handle[cnt])));
+ continue;
+ //mHalCamCtrl->mPreviewMemoryLock.unlock();
+ //return -EINVAL;
+ } else {
+
+ LOGD("%s: genlock_unlock_buffer, handle =%p", __FUNCTION__, (*(mHalCamCtrl->mPreviewMemory.buffer_handle[cnt])));
+ mHalCamCtrl->mPreviewMemory.local_flag[cnt] = BUFFER_UNLOCKED;
+ }
+ }
+ if( mHalCamCtrl->mPreviewMemory.local_flag[cnt] != BUFFER_NOT_OWNED) {
+ err = mPreviewWindow->cancel_buffer(mPreviewWindow, mHalCamCtrl->mPreviewMemory.buffer_handle[cnt]);
+ LOGD("%s: cancel_buffer: hdl =%p", __func__, (*mHalCamCtrl->mPreviewMemory.buffer_handle[cnt]));
+ }
+ mHalCamCtrl->mPreviewMemory.local_flag[cnt] = BUFFER_NOT_OWNED;
+
+ LOGD(" put buffer %d successfully", cnt);
+ }
+
+ if (mDisplayBuf.preview.buf.mp != NULL) {
+ delete[] mDisplayBuf.preview.buf.mp;
+ mDisplayBuf.preview.buf.mp = NULL;
+ }
+
+ mHalCamCtrl->mPreviewMemoryLock.unlock();
+ memset(&mHalCamCtrl->mPreviewMemory, 0, sizeof(mHalCamCtrl->mPreviewMemory));
+ LOGI(" %s : X ",__FUNCTION__);
+ return NO_ERROR;
+}
+
+
+status_t QCameraStream_preview::getBufferNoDisplay( )
+{
+ int err = 0;
+ status_t ret = NO_ERROR;
+ int i, num_planes, frame_len, y_off, cbcr_off;
+ cam_ctrl_dimension_t dim;
+ uint32_t planes[VIDEO_MAX_PLANES];
+
+ LOGI("%s : E ", __FUNCTION__);
+
+
+ ret = cam_config_get_parm(mCameraId, MM_CAMERA_PARM_DIMENSION, &dim);
+ if(ret != NO_ERROR) {
+ LOGE("%s: display format %d is not supported", __func__, dim.prev_format);
+ goto end;
+ }
+ mHalCamCtrl->mPreviewMemoryLock.lock();
+ mHalCamCtrl->mNoDispPreviewMemory.buffer_count = kPreviewBufferCount;
+ if(mHalCamCtrl->isZSLMode()) {
+ if(mHalCamCtrl->getZSLQueueDepth() > kPreviewBufferCount - 3)
+ mHalCamCtrl->mNoDispPreviewMemory.buffer_count =
+ mHalCamCtrl->getZSLQueueDepth() + 3;
+ }
+
+ num_planes = dim.display_frame_offset.num_planes;
+ for ( i = 0; i < num_planes; i++) {
+ planes[i] = dim.display_frame_offset.mp[i].len;
+ }
+
+ frame_len = dim.picture_frame_offset.frame_len;
+ y_off = dim.picture_frame_offset.mp[0].offset;
+ cbcr_off = dim.picture_frame_offset.mp[1].offset;
+ LOGE("%s: main image: rotation = %d, yoff = %d, cbcroff = %d, size = %d, width = %d, height = %d",
+ __func__, dim.rotation, y_off, cbcr_off, frame_len,
+ dim.display_width, dim.display_height);
+ if (mHalCamCtrl->initHeapMem(&mHalCamCtrl->mNoDispPreviewMemory,
+ mHalCamCtrl->mNoDispPreviewMemory.buffer_count,
+ frame_len, y_off, cbcr_off, MSM_PMEM_MAINIMG,
+ NULL,NULL, num_planes, planes) < 0) {
+ ret = NO_MEMORY;
+ goto end;
+ };
+
+ memset(&mHalCamCtrl->mMetadata, 0, sizeof(mHalCamCtrl->mMetadata));
+ memset(mHalCamCtrl->mFace, 0, sizeof(mHalCamCtrl->mFace));
+
+ LOGI(" %s : X ",__FUNCTION__);
+end:
+ //mDisplayLock.unlock();
+ mHalCamCtrl->mPreviewMemoryLock.unlock();
+
+ return NO_ERROR;
+}
+
+status_t QCameraStream_preview::freeBufferNoDisplay()
+{
+ int err = 0;
+ status_t ret = NO_ERROR;
+
+ LOGI(" %s : E ", __FUNCTION__);
+
+ //mDisplayLock.lock();
+ mHalCamCtrl->mPreviewMemoryLock.lock();
+ for (int cnt = 0; cnt < mHalCamCtrl->mNoDispPreviewMemory.buffer_count; cnt++) {
+ if (cnt < mHalCamCtrl->mNoDispPreviewMemory.buffer_count) {
+ if (NO_ERROR != mHalCamCtrl->sendUnMappingBuf(MSM_V4L2_EXT_CAPTURE_MODE_PREVIEW,
+ cnt, mCameraId, CAM_SOCK_MSG_TYPE_FD_UNMAPPING)) {
+ LOGE("%s: sending data Msg Failed", __func__);
+ }
+ }
+ }
+ mHalCamCtrl->releaseHeapMem(&mHalCamCtrl->mNoDispPreviewMemory);
+ memset(&mHalCamCtrl->mNoDispPreviewMemory, 0, sizeof(mHalCamCtrl->mNoDispPreviewMemory));
+ if (mDisplayBuf.preview.buf.mp != NULL) {
+ delete[] mDisplayBuf.preview.buf.mp;
+ mDisplayBuf.preview.buf.mp = NULL;
+ }
+
+ mHalCamCtrl->mPreviewMemoryLock.unlock();
+ LOGI(" %s : X ",__FUNCTION__);
+ return NO_ERROR;
+}
+
+void QCameraStream_preview::notifyROIEvent(fd_roi_t roi)
+{
+ switch (roi.type) {
+ case FD_ROI_TYPE_HEADER:
+ {
+ mDisplayLock.lock();
+ mNumFDRcvd = 0;
+ memset(mHalCamCtrl->mFace, 0, sizeof(mHalCamCtrl->mFace));
+ mHalCamCtrl->mMetadata.faces = mHalCamCtrl->mFace;
+ mHalCamCtrl->mMetadata.number_of_faces = roi.d.hdr.num_face_detected;
+ if(mHalCamCtrl->mMetadata.number_of_faces > MAX_ROI)
+ mHalCamCtrl->mMetadata.number_of_faces = MAX_ROI;
+ mDisplayLock.unlock();
+
+ if (mHalCamCtrl->mMetadata.number_of_faces == 0) {
+ // Clear previous faces
+ mHalCamCtrl->mCallbackLock.lock();
+ camera_data_callback pcb = mHalCamCtrl->mDataCb;
+ mHalCamCtrl->mCallbackLock.unlock();
+
+ if (pcb && (mHalCamCtrl->mMsgEnabled & CAMERA_MSG_PREVIEW_METADATA)){
+ LOGE("%s: Face detection RIO callback", __func__);
+ pcb(CAMERA_MSG_PREVIEW_METADATA, NULL, 0, &mHalCamCtrl->mMetadata, mHalCamCtrl->mCallbackCookie);
+ }
+ }
+ }
+ break;
+ case FD_ROI_TYPE_DATA:
+ {
+ mDisplayLock.lock();
+ int idx = roi.d.data.idx;
+ if (idx >= mHalCamCtrl->mMetadata.number_of_faces) {
+ mDisplayLock.unlock();
+ LOGE("%s: idx %d out of boundary %d", __func__, idx, mHalCamCtrl->mMetadata.number_of_faces);
+ break;
+ }
+
+ mHalCamCtrl->mFace[idx].id = roi.d.data.face.id;
+ mHalCamCtrl->mFace[idx].score = roi.d.data.face.score;
+
+ // top
+ mHalCamCtrl->mFace[idx].rect[0] =
+ roi.d.data.face.face_boundary.x*2000/mHalCamCtrl->mDimension.display_width - 1000;
+ //right
+ mHalCamCtrl->mFace[idx].rect[1] =
+ roi.d.data.face.face_boundary.y*2000/mHalCamCtrl->mDimension.display_height - 1000;
+ //bottom
+ mHalCamCtrl->mFace[idx].rect[2] = mHalCamCtrl->mFace[idx].rect[0] +
+ roi.d.data.face.face_boundary.dx*2000/mHalCamCtrl->mDimension.display_width;
+ //left
+ mHalCamCtrl->mFace[idx].rect[3] = mHalCamCtrl->mFace[idx].rect[1] +
+ roi.d.data.face.face_boundary.dy*2000/mHalCamCtrl->mDimension.display_height;
+
+ // Center of left eye
+ mHalCamCtrl->mFace[idx].left_eye[0] =
+ roi.d.data.face.left_eye_center[0]*2000/mHalCamCtrl->mDimension.display_width - 1000;
+ mHalCamCtrl->mFace[idx].left_eye[1] =
+ roi.d.data.face.left_eye_center[1]*2000/mHalCamCtrl->mDimension.display_height - 1000;
+
+ // Center of right eye
+ mHalCamCtrl->mFace[idx].right_eye[0] =
+ roi.d.data.face.right_eye_center[0]*2000/mHalCamCtrl->mDimension.display_width - 1000;
+ mHalCamCtrl->mFace[idx].right_eye[1] =
+ roi.d.data.face.right_eye_center[1]*2000/mHalCamCtrl->mDimension.display_height - 1000;
+
+ // Center of mouth
+ mHalCamCtrl->mFace[idx].mouth[0] =
+ roi.d.data.face.mouth_center[0]*2000/mHalCamCtrl->mDimension.display_width - 1000;
+ mHalCamCtrl->mFace[idx].mouth[1] =
+ roi.d.data.face.mouth_center[1]*2000/mHalCamCtrl->mDimension.display_height - 1000;
+
+ mHalCamCtrl->mFace[idx].smile_degree = roi.d.data.face.smile_degree;
+ mHalCamCtrl->mFace[idx].smile_score = roi.d.data.face.smile_confidence;
+ mHalCamCtrl->mFace[idx].blink_detected = roi.d.data.face.blink_detected;
+ mHalCamCtrl->mFace[idx].face_recognised = roi.d.data.face.is_face_recognised;
+ mHalCamCtrl->mFace[idx].gaze_angle = roi.d.data.face.gaze_angle;
+ /* newly added */
+ mHalCamCtrl->mFace[idx].updown_dir = roi.d.data.face.updown_dir;
+ mHalCamCtrl->mFace[idx].leftright_dir = roi.d.data.face.leftright_dir;
+ mHalCamCtrl->mFace[idx].roll_dir = roi.d.data.face.roll_dir;
+ mHalCamCtrl->mFace[idx].leye_blink = roi.d.data.face.left_blink;
+ mHalCamCtrl->mFace[idx].reye_blink = roi.d.data.face.right_blink;
+ mHalCamCtrl->mFace[idx].left_right_gaze = roi.d.data.face.left_right_gaze;
+ mHalCamCtrl->mFace[idx].top_bottom_gaze = roi.d.data.face.top_bottom_gaze;
+ LOGE("%s: Face(%d, %d, %d, %d), leftEye(%d, %d), rightEye(%d, %d), mouth(%d, %d), smile(%d, %d), face_recg(%d)", __func__,
+ mHalCamCtrl->mFace[idx].rect[0], mHalCamCtrl->mFace[idx].rect[1],
+ mHalCamCtrl->mFace[idx].rect[2], mHalCamCtrl->mFace[idx].rect[3],
+ mHalCamCtrl->mFace[idx].left_eye[0], mHalCamCtrl->mFace[idx].left_eye[1],
+ mHalCamCtrl->mFace[idx].right_eye[0], mHalCamCtrl->mFace[idx].right_eye[1],
+ mHalCamCtrl->mFace[idx].mouth[0], mHalCamCtrl->mFace[idx].mouth[1],
+ mHalCamCtrl->mFace[idx].smile_degree, mHalCamCtrl->mFace[idx].smile_score,
+ mHalCamCtrl->mFace[idx].face_recognised);
+ LOGE("%s: gaze(%d, %d, %d), updown(%d), leftright(%d), roll(%d), blink(%d, %d, %d)", __func__,
+ mHalCamCtrl->mFace[idx].gaze_angle, mHalCamCtrl->mFace[idx].left_right_gaze,
+ mHalCamCtrl->mFace[idx].top_bottom_gaze, mHalCamCtrl->mFace[idx].updown_dir,
+ mHalCamCtrl->mFace[idx].leftright_dir, mHalCamCtrl->mFace[idx].roll_dir,
+ mHalCamCtrl->mFace[idx].blink_detected,
+ mHalCamCtrl->mFace[idx].leye_blink, mHalCamCtrl->mFace[idx].reye_blink);
+
+ mNumFDRcvd++;
+ mDisplayLock.unlock();
+
+ if (mNumFDRcvd == mHalCamCtrl->mMetadata.number_of_faces) {
+ mHalCamCtrl->mCallbackLock.lock();
+ camera_data_callback pcb = mHalCamCtrl->mDataCb;
+ mHalCamCtrl->mCallbackLock.unlock();
+
+ if (pcb && (mHalCamCtrl->mMsgEnabled & CAMERA_MSG_PREVIEW_METADATA)){
+ LOGE("%s: Face detection RIO callback with %d faces detected (score=%d)", __func__, mNumFDRcvd, mHalCamCtrl->mFace[idx].score);
+ pcb(CAMERA_MSG_PREVIEW_METADATA, NULL, 0, &mHalCamCtrl->mMetadata, mHalCamCtrl->mCallbackCookie);
+ }
+ }
+ }
+ break;
+ }
+}
+
+status_t QCameraStream_preview::initDisplayBuffers()
+{
+ status_t ret = NO_ERROR;
+ int width = 0; /* width of channel */
+ int height = 0; /* height of channel */
+ uint32_t frame_len = 0; /* frame planner length */
+ int buffer_num = 4; /* number of buffers for display */
+ const char *pmem_region;
+ uint8_t num_planes = 0;
+ uint32_t planes[VIDEO_MAX_PLANES];
+ void *vaddr = NULL;
+ cam_ctrl_dimension_t dim;
+
+ LOGE("%s:BEGIN",__func__);
+ memset(&mHalCamCtrl->mMetadata, 0, sizeof(camera_frame_metadata_t));
+ mHalCamCtrl->mPreviewMemoryLock.lock();
+ memset(&mHalCamCtrl->mPreviewMemory, 0, sizeof(mHalCamCtrl->mPreviewMemory));
+ mHalCamCtrl->mPreviewMemoryLock.unlock();
+ memset(&mNotifyBuffer, 0, sizeof(mNotifyBuffer));
+
+/* get preview size, by qury mm_camera*/
+ memset(&dim, 0, sizeof(cam_ctrl_dimension_t));
+
+ memset(&(this->mDisplayStreamBuf),0, sizeof(this->mDisplayStreamBuf));
+
+ ret = cam_config_get_parm(mCameraId, MM_CAMERA_PARM_DIMENSION, &dim);
+ if (MM_CAMERA_OK != ret) {
+ LOGE("%s: error - can't get camera dimension!", __func__);
+ LOGE("%s: X", __func__);
+ return BAD_VALUE;
+ }else {
+ width = dim.display_width,
+ height = dim.display_height;
+ }
+
+ ret = getBufferFromSurface();
+ if(ret != NO_ERROR) {
+ LOGE("%s: cannot get memory from surface texture client, ret = %d", __func__, ret);
+ return ret;
+ }
+
+ /* set 4 buffers for display */
+ mHalCamCtrl->mPreviewMemoryLock.lock();
+ memset(&mDisplayStreamBuf, 0, sizeof(mDisplayStreamBuf));
+ this->mDisplayStreamBuf.num = mHalCamCtrl->mPreviewMemory.buffer_count;
+ this->myMode=myMode; /*Need to assign this in constructor after translating from mask*/
+ num_planes = 2;
+ planes[0] = dim.display_frame_offset.mp[0].len;
+ planes[1] = dim.display_frame_offset.mp[1].len;
+ this->mDisplayStreamBuf.frame_len = dim.display_frame_offset.frame_len;
+
+ memset(&mDisplayBuf, 0, sizeof(mDisplayBuf));
+ mDisplayBuf.preview.buf.mp = new mm_camera_mp_buf_t[mDisplayStreamBuf.num];
+ if (!mDisplayBuf.preview.buf.mp) {
+ LOGE("%s Error allocating memory for mplanar struct ", __func__);
+ ret = NO_MEMORY;
+ goto error;
+ }
+ memset(mDisplayBuf.preview.buf.mp, 0,
+ mDisplayStreamBuf.num * sizeof(mm_camera_mp_buf_t));
+
+ /*allocate memory for the buffers*/
+ for(int i = 0; i < mDisplayStreamBuf.num; i++){
+ if (mHalCamCtrl->mPreviewMemory.private_buffer_handle[i] == NULL)
+ continue;
+ mDisplayStreamBuf.frame[i].fd = mHalCamCtrl->mPreviewMemory.private_buffer_handle[i]->fd;
+ mDisplayStreamBuf.frame[i].cbcr_off = planes[0];
+ mDisplayStreamBuf.frame[i].y_off = 0;
+ mDisplayStreamBuf.frame[i].path = OUTPUT_TYPE_P;
+ mHalCamCtrl->mPreviewMemory.addr_offset[i] =
+ mHalCamCtrl->mPreviewMemory.private_buffer_handle[i]->offset;
+ mDisplayStreamBuf.frame[i].buffer =
+ (long unsigned int)mHalCamCtrl->mPreviewMemory.camera_memory[i]->data;
+ mDisplayStreamBuf.frame[i].ion_alloc.len = mHalCamCtrl->mPreviewMemory.private_buffer_handle[i]->size;
+ mDisplayStreamBuf.frame[i].ion_dev_fd = mHalCamCtrl->mPreviewMemory.main_ion_fd[i];
+ mDisplayStreamBuf.frame[i].fd_data = mHalCamCtrl->mPreviewMemory.ion_info_fd[i];
+
+ LOGE("%s: idx = %d, fd = %d, size = %d, cbcr_offset = %d, y_offset = %d, "
+ "offset = %d, vaddr = 0x%x", __func__, i, mDisplayStreamBuf.frame[i].fd,
+ mHalCamCtrl->mPreviewMemory.private_buffer_handle[i]->size,
+ mDisplayStreamBuf.frame[i].cbcr_off, mDisplayStreamBuf.frame[i].y_off,
+ mHalCamCtrl->mPreviewMemory.addr_offset[i],
+ (uint32_t)mDisplayStreamBuf.frame[i].buffer);
+
+ ret = mHalCamCtrl->sendMappingBuf(
+ MSM_V4L2_EXT_CAPTURE_MODE_PREVIEW,
+ i,
+ mDisplayStreamBuf.frame[i].fd,
+ mHalCamCtrl->mPreviewMemory.private_buffer_handle[i]->size,
+ mCameraId, CAM_SOCK_MSG_TYPE_FD_MAPPING);
+ if (NO_ERROR != ret) {
+ LOGE("%s: sending mapping data Msg Failed", __func__);
+ goto error;
+ }
+
+ mDisplayBuf.preview.buf.mp[i].frame = mDisplayStreamBuf.frame[i];
+ mDisplayBuf.preview.buf.mp[i].frame_offset = mHalCamCtrl->mPreviewMemory.addr_offset[i];
+ mDisplayBuf.preview.buf.mp[i].num_planes = num_planes;
+
+ /* Plane 0 needs to be set seperately. Set other planes
+ * in a loop. */
+ mDisplayBuf.preview.buf.mp[i].planes[0].length = planes[0];
+ mDisplayBuf.preview.buf.mp[i].planes[0].m.userptr = mDisplayStreamBuf.frame[i].fd;
+ mDisplayBuf.preview.buf.mp[i].planes[0].data_offset = 0;
+ mDisplayBuf.preview.buf.mp[i].planes[0].reserved[0] =
+ mDisplayBuf.preview.buf.mp[i].frame_offset;
+ for (int j = 1; j < num_planes; j++) {
+ mDisplayBuf.preview.buf.mp[i].planes[j].length = planes[j];
+ mDisplayBuf.preview.buf.mp[i].planes[j].m.userptr =
+ mDisplayStreamBuf.frame[i].fd;
+ mDisplayBuf.preview.buf.mp[i].planes[j].data_offset = 0;
+ mDisplayBuf.preview.buf.mp[i].planes[j].reserved[0] =
+ mDisplayBuf.preview.buf.mp[i].planes[j-1].reserved[0] +
+ mDisplayBuf.preview.buf.mp[i].planes[j-1].length;
+ }
+
+ for (int j = 0; j < num_planes; j++)
+ LOGE("Planes: %d length: %d userptr: %lu offset: %d\n", j,
+ mDisplayBuf.preview.buf.mp[i].planes[j].length,
+ mDisplayBuf.preview.buf.mp[i].planes[j].m.userptr,
+ mDisplayBuf.preview.buf.mp[i].planes[j].reserved[0]);
+ }/*end of for loop*/
+
+ /* register the streaming buffers for the channel*/
+ mDisplayBuf.ch_type = MM_CAMERA_CH_PREVIEW;
+ mDisplayBuf.preview.num = mDisplayStreamBuf.num;
+ mHalCamCtrl->mPreviewMemoryLock.unlock();
+ LOGE("%s:END",__func__);
+ return NO_ERROR;
+
+error:
+ mHalCamCtrl->mPreviewMemoryLock.unlock();
+ putBufferToSurface();
+
+ LOGV("%s: X", __func__);
+ return ret;
+}
+
+status_t QCameraStream_preview::initPreviewOnlyBuffers()
+{
+ status_t ret = NO_ERROR;
+ int width = 0; /* width of channel */
+ int height = 0; /* height of channel */
+ uint32_t frame_len = 0; /* frame planner length */
+ int buffer_num = 4; /* number of buffers for display */
+ const char *pmem_region;
+ uint8_t num_planes = 0;
+ uint32_t planes[VIDEO_MAX_PLANES];
+
+ cam_ctrl_dimension_t dim;
+
+ LOGE("%s:BEGIN",__func__);
+ memset(&mHalCamCtrl->mMetadata, 0, sizeof(camera_frame_metadata_t));
+ mHalCamCtrl->mPreviewMemoryLock.lock();
+ memset(&mHalCamCtrl->mNoDispPreviewMemory, 0, sizeof(mHalCamCtrl->mNoDispPreviewMemory));
+ mHalCamCtrl->mPreviewMemoryLock.unlock();
+ memset(&mNotifyBuffer, 0, sizeof(mNotifyBuffer));
+
+/* get preview size, by qury mm_camera*/
+ memset(&dim, 0, sizeof(cam_ctrl_dimension_t));
+ ret = cam_config_get_parm(mCameraId, MM_CAMERA_PARM_DIMENSION, &dim);
+ if (MM_CAMERA_OK != ret) {
+ LOGE("%s: error - can't get camera dimension!", __func__);
+ LOGE("%s: X", __func__);
+ return BAD_VALUE;
+ }else {
+ width = dim.display_width;
+ height = dim.display_height;
+ }
+
+ ret = getBufferNoDisplay( );
+ if(ret != NO_ERROR) {
+ LOGE("%s: cannot get memory from surface texture client, ret = %d", __func__, ret);
+ return ret;
+ }
+
+ /* set 4 buffers for display */
+ memset(&mDisplayStreamBuf, 0, sizeof(mDisplayStreamBuf));
+ mHalCamCtrl->mPreviewMemoryLock.lock();
+ this->mDisplayStreamBuf.num = mHalCamCtrl->mNoDispPreviewMemory.buffer_count;
+ this->myMode=myMode; /*Need to assign this in constructor after translating from mask*/
+ num_planes = dim.display_frame_offset.num_planes;
+ for (int i = 0; i < num_planes; i++) {
+ planes[i] = dim.display_frame_offset.mp[i].len;
+ }
+ this->mDisplayStreamBuf.frame_len = dim.display_frame_offset.frame_len;
+
+ memset(&mDisplayBuf, 0, sizeof(mDisplayBuf));
+ mDisplayBuf.preview.buf.mp = new mm_camera_mp_buf_t[mDisplayStreamBuf.num];
+ if (!mDisplayBuf.preview.buf.mp) {
+ LOGE("%s Error allocating memory for mplanar struct ", __func__);
+ }
+ memset(mDisplayBuf.preview.buf.mp, 0,
+ mDisplayStreamBuf.num * sizeof(mm_camera_mp_buf_t));
+
+ /*allocate memory for the buffers*/
+ void *vaddr = NULL;
+ for(int i = 0; i < mDisplayStreamBuf.num; i++){
+ if (mHalCamCtrl->mNoDispPreviewMemory.camera_memory[i] == NULL)
+ continue;
+ mDisplayStreamBuf.frame[i].fd = mHalCamCtrl->mNoDispPreviewMemory.fd[i];
+ mDisplayStreamBuf.frame[i].cbcr_off = planes[0];
+ mDisplayStreamBuf.frame[i].y_off = 0;
+ mDisplayStreamBuf.frame[i].path = OUTPUT_TYPE_P;
+ mDisplayStreamBuf.frame[i].buffer =
+ (long unsigned int)mHalCamCtrl->mNoDispPreviewMemory.camera_memory[i]->data;
+ mDisplayStreamBuf.frame[i].ion_dev_fd = mHalCamCtrl->mNoDispPreviewMemory.main_ion_fd[i];
+ mDisplayStreamBuf.frame[i].fd_data = mHalCamCtrl->mNoDispPreviewMemory.ion_info_fd[i];
+
+ LOGE("%s: idx = %d, fd = %d, size = %d, cbcr_offset = %d, y_offset = %d, "
+ "vaddr = 0x%x", __func__, i, mDisplayStreamBuf.frame[i].fd,
+ frame_len,
+ mDisplayStreamBuf.frame[i].cbcr_off, mDisplayStreamBuf.frame[i].y_off,
+ (uint32_t)mDisplayStreamBuf.frame[i].buffer);
+
+ if (NO_ERROR != mHalCamCtrl->sendMappingBuf(
+ MSM_V4L2_EXT_CAPTURE_MODE_PREVIEW,
+ i,
+ mDisplayStreamBuf.frame[i].fd,
+ mHalCamCtrl->mNoDispPreviewMemory.size,
+ mCameraId, CAM_SOCK_MSG_TYPE_FD_MAPPING)) {
+ LOGE("%s: sending mapping data Msg Failed", __func__);
+ }
+
+ mDisplayBuf.preview.buf.mp[i].frame = mDisplayStreamBuf.frame[i];
+ mDisplayBuf.preview.buf.mp[i].frame_offset = mDisplayStreamBuf.frame[i].y_off;
+ mDisplayBuf.preview.buf.mp[i].num_planes = num_planes;
+
+ /* Plane 0 needs to be set seperately. Set other planes
+ * in a loop. */
+ mDisplayBuf.preview.buf.mp[i].planes[0].length = planes[0];
+ mDisplayBuf.preview.buf.mp[i].planes[0].m.userptr = mDisplayStreamBuf.frame[i].fd;
+ mDisplayBuf.preview.buf.mp[i].planes[0].data_offset = 0;
+ mDisplayBuf.preview.buf.mp[i].planes[0].reserved[0] =
+ mDisplayBuf.preview.buf.mp[i].frame_offset;
+ for (int j = 1; j < num_planes; j++) {
+ mDisplayBuf.preview.buf.mp[i].planes[j].length = planes[j];
+ mDisplayBuf.preview.buf.mp[i].planes[j].m.userptr =
+ mDisplayStreamBuf.frame[i].fd;
+ mDisplayBuf.preview.buf.mp[i].planes[j].data_offset = 0;
+ mDisplayBuf.preview.buf.mp[i].planes[j].reserved[0] =
+ mDisplayBuf.preview.buf.mp[i].planes[j-1].reserved[0] +
+ mDisplayBuf.preview.buf.mp[i].planes[j-1].length;
+ }
+
+ for (int j = 0; j < num_planes; j++)
+ LOGE("Planes: %d length: %d userptr: %lu offset: %d\n", j,
+ mDisplayBuf.preview.buf.mp[i].planes[j].length,
+ mDisplayBuf.preview.buf.mp[i].planes[j].m.userptr,
+ mDisplayBuf.preview.buf.mp[i].planes[j].reserved[0]);
+ }/*end of for loop*/
+
+ /* register the streaming buffers for the channel*/
+ mDisplayBuf.ch_type = MM_CAMERA_CH_PREVIEW;
+ mDisplayBuf.preview.num = mDisplayStreamBuf.num;
+ mHalCamCtrl->mPreviewMemoryLock.unlock();
+ LOGE("%s:END",__func__);
+ return NO_ERROR;
+
+end:
+ if (MM_CAMERA_OK == ret ) {
+ LOGV("%s: X - NO_ERROR ", __func__);
+ return NO_ERROR;
+ }
+
+ LOGV("%s: out of memory clean up", __func__);
+ /* release the allocated memory */
+
+ LOGV("%s: X - BAD_VALUE ", __func__);
+ return BAD_VALUE;
+}
+
+
+void QCameraStream_preview::dumpFrameToFile(struct msm_frame* newFrame)
+{
+ int32_t enabled = 0;
+ int frm_num;
+ uint32_t skip_mode;
+ char value[PROPERTY_VALUE_MAX];
+ char buf[32];
+ int w, h;
+ static int count = 0;
+ cam_ctrl_dimension_t dim;
+ int file_fd;
+ int rc = 0;
+ int len;
+ unsigned long addr;
+ unsigned long * tmp = (unsigned long *)newFrame->buffer;
+ addr = *tmp;
+ status_t ret = cam_config_get_parm(mHalCamCtrl->mCameraId,
+ MM_CAMERA_PARM_DIMENSION, &dim);
+
+ w = dim.display_width;
+ h = dim.display_height;
+ len = (w * h)*3/2;
+ count++;
+ if(count < 100) {
+ snprintf(buf, sizeof(buf), "/data/mzhu%d.yuv", count);
+ file_fd = open(buf, O_RDWR | O_CREAT, 0777);
+
+ rc = write(file_fd, (const void *)addr, len);
+ LOGE("%s: file='%s', vaddr_old=0x%x, addr_map = 0x%p, len = %d, rc = %d",
+ __func__, buf, (uint32_t)newFrame->buffer, (void *)addr, len, rc);
+ close(file_fd);
+ LOGE("%s: dump %s, rc = %d, len = %d", __func__, buf, rc, len);
+ }
+}
+
+status_t QCameraStream_preview::processPreviewFrameWithDisplay(
+ mm_camera_ch_data_buf_t *frame)
+{
+ LOGV("%s",__func__);
+ int err = 0;
+ int msgType = 0;
+ int i;
+ camera_memory_t *data = NULL;
+ camera_frame_metadata_t *metadata = NULL;
+
+ Mutex::Autolock lock(mStopCallbackLock);
+ if(!mActive) {
+ LOGE("Preview Stopped. Returning callback");
+ return NO_ERROR;
+ }
+
+ if(mHalCamCtrl==NULL) {
+ LOGE("%s: X: HAL control object not set",__func__);
+ /*Call buf done*/
+ return BAD_VALUE;
+ }
+ mHalCamCtrl->mCallbackLock.lock();
+ camera_data_timestamp_callback rcb = mHalCamCtrl->mDataCbTimestamp;
+ void *rdata = mHalCamCtrl->mCallbackCookie;
+ mHalCamCtrl->mCallbackLock.unlock();
+ nsecs_t timeStamp = seconds_to_nanoseconds(frame->def.frame->ts.tv_sec) ;
+ timeStamp += frame->def.frame->ts.tv_nsec;
+
+ if(mFirstFrameRcvd == false) {
+ mm_camera_util_profile("HAL: First preview frame received");
+ mFirstFrameRcvd = true;
+ }
+
+ if (UNLIKELY(mHalCamCtrl->mDebugFps)) {
+ mHalCamCtrl->debugShowPreviewFPS();
+ }
+ //dumpFrameToFile(frame->def.frame);
+ mHalCamCtrl->dumpFrameToFile(frame->def.frame, HAL_DUMP_FRM_PREVIEW);
+
+ mHalCamCtrl->mPreviewMemoryLock.lock();
+ mNotifyBuffer[frame->def.idx] = *frame;
+
+ LOGI("Enqueue buf handle %p\n",
+ mHalCamCtrl->mPreviewMemory.buffer_handle[frame->def.idx]);
+ LOGD("%s: camera call genlock_unlock", __FUNCTION__);
+ if (BUFFER_LOCKED == mHalCamCtrl->mPreviewMemory.local_flag[frame->def.idx]) {
+ LOGD("%s: genlock_unlock_buffer hdl =%p", __FUNCTION__, (*mHalCamCtrl->mPreviewMemory.buffer_handle[frame->def.idx]));
+ if (GENLOCK_FAILURE == genlock_unlock_buffer((native_handle_t*)
+ (*mHalCamCtrl->mPreviewMemory.buffer_handle[frame->def.idx]))) {
+ LOGE("%s: genlock_unlock_buffer failed", __FUNCTION__);
+ //mHalCamCtrl->mPreviewMemoryLock.unlock();
+ //return -EINVAL;
+ } else {
+ mHalCamCtrl->mPreviewMemory.local_flag[frame->def.idx] = BUFFER_UNLOCKED;
+ }
+ } else {
+ LOGE("%s: buffer to be enqueued is not locked", __FUNCTION__);
+ //mHalCamCtrl->mPreviewMemoryLock.unlock();
+ //return -EINVAL;
+ }
+
+#ifdef USE_ION
+ struct ion_flush_data cache_inv_data;
+ int ion_fd;
+ ion_fd = frame->def.frame->ion_dev_fd;
+ cache_inv_data.vaddr = (void *)frame->def.frame->buffer;
+ cache_inv_data.fd = frame->def.frame->fd;
+ cache_inv_data.handle = frame->def.frame->fd_data.handle;
+ cache_inv_data.length = frame->def.frame->ion_alloc.len;
+
+ if (mHalCamCtrl->cache_ops(ion_fd, &cache_inv_data, ION_IOC_CLEAN_CACHES) < 0)
+ LOGE("%s: Cache clean for Preview buffer %p fd = %d failed", __func__,
+ cache_inv_data.vaddr, cache_inv_data.fd);
+#endif
+
+ if(mHFRFrameSkip == 1)
+ {
+ const char *str = mHalCamCtrl->mParameters.get(
+ CameraParameters::KEY_VIDEO_HIGH_FRAME_RATE);
+ if(str != NULL){
+ int is_hfr_off = 0;
+ mHFRFrameCnt++;
+ if(!strcmp(str, CameraParameters::VIDEO_HFR_OFF)) {
+ is_hfr_off = 1;
+ err = this->mPreviewWindow->enqueue_buffer(this->mPreviewWindow,
+ (buffer_handle_t *)mHalCamCtrl->mPreviewMemory.buffer_handle[frame->def.idx]);
+ } else if (!strcmp(str, CameraParameters::VIDEO_HFR_2X)) {
+ mHFRFrameCnt %= 2;
+ } else if (!strcmp(str, CameraParameters::VIDEO_HFR_3X)) {
+ mHFRFrameCnt %= 3;
+ } else if (!strcmp(str, CameraParameters::VIDEO_HFR_4X)) {
+ mHFRFrameCnt %= 4;
+ }
+ if(mHFRFrameCnt == 0)
+ err = this->mPreviewWindow->enqueue_buffer(this->mPreviewWindow,
+ (buffer_handle_t *)mHalCamCtrl->mPreviewMemory.buffer_handle[frame->def.idx]);
+ else if(!is_hfr_off)
+ err = this->mPreviewWindow->cancel_buffer(this->mPreviewWindow,
+ (buffer_handle_t *)mHalCamCtrl->mPreviewMemory.buffer_handle[frame->def.idx]);
+ } else
+ err = this->mPreviewWindow->enqueue_buffer(this->mPreviewWindow,
+ (buffer_handle_t *)mHalCamCtrl->mPreviewMemory.buffer_handle[frame->def.idx]);
+ } else {
+ err = this->mPreviewWindow->enqueue_buffer(this->mPreviewWindow,
+ (buffer_handle_t *)mHalCamCtrl->mPreviewMemory.buffer_handle[frame->def.idx]);
+ }
+ if(err != 0) {
+ LOGE("%s: enqueue_buffer failed, err = %d", __func__, err);
+ } else {
+ LOGD("%s: enqueue_buffer hdl=%p", __func__, *mHalCamCtrl->mPreviewMemory.buffer_handle[frame->def.idx]);
+ mHalCamCtrl->mPreviewMemory.local_flag[frame->def.idx] = BUFFER_NOT_OWNED;
+ }
+ buffer_handle_t *buffer_handle = NULL;
+ int tmp_stride = 0;
+ err = this->mPreviewWindow->dequeue_buffer(this->mPreviewWindow,
+ &buffer_handle, &tmp_stride);
+ if (err == NO_ERROR && buffer_handle != NULL) {
+
+ LOGD("%s: dequed buf hdl =%p", __func__, *buffer_handle);
+ for(i = 0; i < mHalCamCtrl->mPreviewMemory.buffer_count; i++) {
+ if(mHalCamCtrl->mPreviewMemory.buffer_handle[i] == buffer_handle) {
+ mHalCamCtrl->mPreviewMemory.local_flag[i] = BUFFER_UNLOCKED;
+ break;
+ }
+ }
+ if (i < mHalCamCtrl->mPreviewMemory.buffer_count ) {
+ err = this->mPreviewWindow->lock_buffer(this->mPreviewWindow, buffer_handle);
+ LOGD("%s: camera call genlock_lock: hdl =%p", __FUNCTION__, *buffer_handle);
+ if (GENLOCK_FAILURE == genlock_lock_buffer((native_handle_t*)(*buffer_handle), GENLOCK_WRITE_LOCK,
+ GENLOCK_MAX_TIMEOUT)) {
+ LOGE("%s: genlock_lock_buffer(WRITE) failed", __FUNCTION__);
+ //mHalCamCtrl->mPreviewMemoryLock.unlock();
+ // return -EINVAL;
+ } else {
+ mHalCamCtrl->mPreviewMemory.local_flag[i] = BUFFER_LOCKED;
+
+ if(MM_CAMERA_OK != cam_evt_buf_done(mCameraId, &mNotifyBuffer[i])) {
+ LOGE("BUF DONE FAILED");
+ }
+ }
+ }
+ } else
+ LOGE("%s: error in dequeue_buffer, enqueue_buffer idx = %d, no free buffer now", __func__, frame->def.idx);
+ /* Save the last displayed frame. We'll be using it to fill the gap between
+ when preview stops and postview start during snapshot.*/
+ mLastQueuedFrame = &(mDisplayStreamBuf.frame[frame->def.idx]);
+ mHalCamCtrl->mPreviewMemoryLock.unlock();
+
+ mHalCamCtrl->mCallbackLock.lock();
+ camera_data_callback pcb = mHalCamCtrl->mDataCb;
+ mHalCamCtrl->mCallbackLock.unlock();
+ LOGD("Message enabled = 0x%x", mHalCamCtrl->mMsgEnabled);
+
+ camera_memory_t *previewMem = NULL;
+
+ if (pcb != NULL) {
+ LOGD("%s: mMsgEnabled =0x%x, preview format =%d", __func__,
+ mHalCamCtrl->mMsgEnabled, mHalCamCtrl->mPreviewFormat);
+ //Sending preview callback if corresponding Msgs are enabled
+ if(mHalCamCtrl->mMsgEnabled & CAMERA_MSG_PREVIEW_FRAME) {
+ LOGE("Q%s: PCB callback enabled", __func__);
+ msgType |= CAMERA_MSG_PREVIEW_FRAME;
+ int previewBufSize;
+ /* The preview buffer size sent back in the callback should be (width*height*bytes_per_pixel)
+ * As all preview formats we support, use 12 bits per pixel, buffer size = previewWidth * previewHeight * 3/2.
+ * We need to put a check if some other formats are supported in future. (punits) */
+ if((mHalCamCtrl->mPreviewFormat == CAMERA_YUV_420_NV21) || (mHalCamCtrl->mPreviewFormat == CAMERA_YUV_420_NV12) ||
+ (mHalCamCtrl->mPreviewFormat == CAMERA_YUV_420_YV12))
+ {
+ previewBufSize = mHalCamCtrl->mPreviewWidth * mHalCamCtrl->mPreviewHeight * 3/2;
+ if(previewBufSize != mHalCamCtrl->mPreviewMemory.private_buffer_handle[frame->def.idx]->size) {
+ previewMem = mHalCamCtrl->mGetMemory(mHalCamCtrl->mPreviewMemory.private_buffer_handle[frame->def.idx]->fd,
+ previewBufSize, 1, mHalCamCtrl->mCallbackCookie);
+ if (!previewMem || !previewMem->data) {
+ LOGE("%s: mGetMemory failed.\n", __func__);
+ } else {
+ data = previewMem;
+ }
+ } else
+ data = mHalCamCtrl->mPreviewMemory.camera_memory[frame->def.idx];
+ } else {
+ data = mHalCamCtrl->mPreviewMemory.camera_memory[frame->def.idx];
+ LOGE("Invalid preview format, buffer size in preview callback may be wrong.");
+ }
+ } else {
+ data = NULL;
+ }
+ if(msgType) {
+ mStopCallbackLock.unlock();
+ if(mActive)
+ pcb(msgType, data, 0, metadata, mHalCamCtrl->mCallbackCookie);
+ if (previewMem)
+ previewMem->release(previewMem);
+ }
+ LOGD("end of cb");
+ } else {
+ LOGD("%s PCB is not enabled", __func__);
+ }
+ if(rcb != NULL && mVFEOutputs == 1)
+ {
+ int flagwait = 1;
+ if(mHalCamCtrl->mStartRecording == true &&
+ ( mHalCamCtrl->mMsgEnabled & CAMERA_MSG_VIDEO_FRAME))
+ {
+ if (mHalCamCtrl->mStoreMetaDataInFrame)
+ {
+ if(mHalCamCtrl->mRecordingMemory.metadata_memory[frame->def.idx])
+ {
+ flagwait = 1;
+ mStopCallbackLock.unlock();
+ rcb(timeStamp, CAMERA_MSG_VIDEO_FRAME,
+ mHalCamCtrl->mRecordingMemory.metadata_memory[frame->def.idx],
+ 0, mHalCamCtrl->mCallbackCookie);
+ }else
+ flagwait = 0;
+ }
+ else
+ {
+ mStopCallbackLock.unlock();
+ rcb(timeStamp, CAMERA_MSG_VIDEO_FRAME,
+ mHalCamCtrl->mPreviewMemory.camera_memory[frame->def.idx],
+ 0, mHalCamCtrl->mCallbackCookie);
+ }
+
+ if(flagwait){
+ Mutex::Autolock rLock(&mHalCamCtrl->mRecordFrameLock);
+ if (mHalCamCtrl->mReleasedRecordingFrame != true) {
+ mHalCamCtrl->mRecordWait.wait(mHalCamCtrl->mRecordFrameLock);
+ }
+ mHalCamCtrl->mReleasedRecordingFrame = false;
+ }
+ }
+ }
+ /* Save the last displayed frame. We'll be using it to fill the gap between
+ when preview stops and postview start during snapshot.*/
+ //mLastQueuedFrame = frame->def.frame;
+/*
+ if(MM_CAMERA_OK != cam_evt_buf_done(mCameraId, frame))
+ {
+ LOGE("BUF DONE FAILED");
+ return BAD_VALUE;
+ }
+*/
+ return NO_ERROR;
+}
+
+
+status_t QCameraStream_preview::processPreviewFrameWithOutDisplay(
+ mm_camera_ch_data_buf_t *frame)
+{
+ LOGV("%s",__func__);
+ int err = 0;
+ int msgType = 0;
+ int i;
+ camera_memory_t *data = NULL;
+ camera_frame_metadata_t *metadata = NULL;
+
+ Mutex::Autolock lock(mStopCallbackLock);
+ if(!mActive) {
+ LOGE("Preview Stopped. Returning callback");
+ return NO_ERROR;
+ }
+ if(mHalCamCtrl==NULL) {
+ LOGE("%s: X: HAL control object not set",__func__);
+ /*Call buf done*/
+ return BAD_VALUE;
+ }
+
+ if (UNLIKELY(mHalCamCtrl->mDebugFps)) {
+ mHalCamCtrl->debugShowPreviewFPS();
+ }
+ //dumpFrameToFile(frame->def.frame);
+ mHalCamCtrl->dumpFrameToFile(frame->def.frame, HAL_DUMP_FRM_PREVIEW);
+
+ mHalCamCtrl->mPreviewMemoryLock.lock();
+ mNotifyBuffer[frame->def.idx] = *frame;
+
+ /* Save the last displayed frame. We'll be using it to fill the gap between
+ when preview stops and postview start during snapshot.*/
+ mLastQueuedFrame = &(mDisplayStreamBuf.frame[frame->def.idx]);
+ mHalCamCtrl->mPreviewMemoryLock.unlock();
+
+ mHalCamCtrl->mCallbackLock.lock();
+ camera_data_callback pcb = mHalCamCtrl->mDataCb;
+ mHalCamCtrl->mCallbackLock.unlock();
+ LOGD("Message enabled = 0x%x", mHalCamCtrl->mMsgEnabled);
+
+ camera_memory_t *previewMem = NULL;
+ int previewWidth, previewHeight;
+ mHalCamCtrl->mParameters.getPreviewSize(&previewWidth, &previewHeight);
+
+#ifdef USE_ION
+ struct ion_flush_data cache_inv_data;
+ int ion_fd;
+ ion_fd = frame->def.frame->ion_dev_fd;
+ cache_inv_data.vaddr = (void *)frame->def.frame->buffer;
+ cache_inv_data.fd = frame->def.frame->fd;
+ cache_inv_data.handle = frame->def.frame->fd_data.handle;
+ cache_inv_data.length = frame->def.frame->ion_alloc.len;
+
+ if (mHalCamCtrl->cache_ops(ion_fd, &cache_inv_data, ION_IOC_CLEAN_CACHES) < 0)
+ LOGE("%s: Cache clean for Preview buffer %p fd = %d failed", __func__,
+ cache_inv_data.vaddr, cache_inv_data.fd);
+#endif
+
+ if (pcb != NULL) {
+ //Sending preview callback if corresponding Msgs are enabled
+ if(mHalCamCtrl->mMsgEnabled & CAMERA_MSG_PREVIEW_FRAME) {
+ msgType |= CAMERA_MSG_PREVIEW_FRAME;
+ int previewBufSize;
+ /* For CTS : Forcing preview memory buffer lenth to be
+ 'previewWidth * previewHeight * 3/2'.
+ Needed when gralloc allocated extra memory.*/
+ //Can add this check for other formats as well.
+ if( mHalCamCtrl->mPreviewFormat == CAMERA_YUV_420_NV21) {
+ previewBufSize = previewWidth * previewHeight * 3/2;
+ if(previewBufSize != mHalCamCtrl->mPreviewMemory.private_buffer_handle[frame->def.idx]->size) {
+ previewMem = mHalCamCtrl->mGetMemory(mHalCamCtrl->mPreviewMemory.private_buffer_handle[frame->def.idx]->fd,
+ previewBufSize, 1, mHalCamCtrl->mCallbackCookie);
+ if (!previewMem || !previewMem->data) {
+ LOGE("%s: mGetMemory failed.\n", __func__);
+ } else {
+ data = previewMem;
+ }
+ } else
+ data = mHalCamCtrl->mPreviewMemory.camera_memory[frame->def.idx];//mPreviewHeap->mBuffers[frame->def.idx];
+ } else
+ data = mHalCamCtrl->mPreviewMemory.camera_memory[frame->def.idx];//mPreviewHeap->mBuffers[frame->def.idx];
+ } else {
+ data = NULL;
+ }
+
+ if(mHalCamCtrl->mMsgEnabled & CAMERA_MSG_PREVIEW_METADATA){
+ msgType |= CAMERA_MSG_PREVIEW_METADATA;
+ metadata = &mHalCamCtrl->mMetadata;
+ } else {
+ metadata = NULL;
+ }
+ if(msgType) {
+ mStopCallbackLock.unlock();
+ if(mActive)
+ pcb(msgType, data, 0, metadata, mHalCamCtrl->mCallbackCookie);
+ if (previewMem)
+ previewMem->release(previewMem);
+ }
+
+ if(MM_CAMERA_OK != cam_evt_buf_done(mCameraId, &mNotifyBuffer[frame->def.idx])) {
+ LOGE("BUF DONE FAILED");
+ }
+
+ LOGD("end of cb");
+ }
+
+ return NO_ERROR;
+}
+
+status_t QCameraStream_preview::processPreviewFrame (
+ mm_camera_ch_data_buf_t *frame)
+{
+ if (mHalCamCtrl->isNoDisplayMode()) {
+ return processPreviewFrameWithOutDisplay(frame);
+ } else {
+ return processPreviewFrameWithDisplay(frame);
+ }
+}
+
+// ---------------------------------------------------------------------------
+// QCameraStream_preview
+// ---------------------------------------------------------------------------
+
+QCameraStream_preview::
+QCameraStream_preview(int cameraId, camera_mode_t mode)
+ : QCameraStream(cameraId,mode),
+ mLastQueuedFrame(NULL),
+ mNumFDRcvd(0),
+ mFirstFrameRcvd(false)
+ {
+ mHalCamCtrl = NULL;
+ LOGE("%s: E", __func__);
+ LOGE("%s: X", __func__);
+ }
+// ---------------------------------------------------------------------------
+// QCameraStream_preview
+// ---------------------------------------------------------------------------
+
+QCameraStream_preview::~QCameraStream_preview() {
+ LOGV("%s: E", __func__);
+ if(mActive) {
+ stop();
+ }
+ if(mInit) {
+ release();
+ }
+ mInit = false;
+ mActive = false;
+ LOGV("%s: X", __func__);
+
+}
+// ---------------------------------------------------------------------------
+// QCameraStream_preview
+// ---------------------------------------------------------------------------
+
+status_t QCameraStream_preview::init() {
+
+ status_t ret = NO_ERROR;
+ LOGV("%s: E", __func__);
+
+ ret = QCameraStream::initChannel (mCameraId, MM_CAMERA_CH_PREVIEW_MASK);
+ if (NO_ERROR!=ret) {
+ LOGE("%s E: can't init native cammera preview ch\n",__func__);
+ return ret;
+ }
+
+ LOGE("Debug : %s : initChannel",__func__);
+ /* register a notify into the mmmm_camera_t object*/
+ (void) cam_evt_register_buf_notify(mCameraId, MM_CAMERA_CH_PREVIEW,
+ preview_notify_cb,
+ MM_CAMERA_REG_BUF_CB_INFINITE,
+ 0,this);
+ LOGE("Debug : %s : cam_evt_register_buf_notify",__func__);
+ buffer_handle_t *buffer_handle = NULL;
+ int tmp_stride = 0;
+ mInit = true;
+ return ret;
+}
+// ---------------------------------------------------------------------------
+// QCameraStream_preview
+// ---------------------------------------------------------------------------
+
+status_t QCameraStream_preview::start()
+{
+ LOGV("%s: E", __func__);
+ status_t ret = NO_ERROR;
+
+ Mutex::Autolock lock(mStopCallbackLock);
+
+ /* call start() in parent class to start the monitor thread*/
+ //QCameraStream::start ();
+ setFormat(MM_CAMERA_CH_PREVIEW_MASK);
+
+ if (mHalCamCtrl->isNoDisplayMode()) {
+ if(NO_ERROR!=initPreviewOnlyBuffers()){
+ return BAD_VALUE;
+ }
+ } else {
+ if(NO_ERROR!=initDisplayBuffers()){
+ return BAD_VALUE;
+ }
+ }
+ LOGE("Debug : %s : initDisplayBuffers",__func__);
+
+ ret = cam_config_prepare_buf(mCameraId, &mDisplayBuf);
+ LOGE("Debug : %s : cam_config_prepare_buf",__func__);
+ if(ret != MM_CAMERA_OK) {
+ LOGV("%s:reg preview buf err=%d\n", __func__, ret);
+ ret = BAD_VALUE;
+ goto error;
+ }else {
+ ret = NO_ERROR;
+ }
+
+ /* For preview, the OP_MODE we set is dependent upon whether we are
+ starting camera or camcorder. For snapshot, anyway we disable preview.
+ However, for ZSL we need to set OP_MODE to OP_MODE_ZSL and not
+ OP_MODE_VIDEO. We'll set that for now in CamCtrl. So in case of
+ ZSL we skip setting Mode here */
+
+ if (!(myMode & CAMERA_ZSL_MODE)) {
+ LOGE("Setting OP MODE to MM_CAMERA_OP_MODE_VIDEO");
+ mm_camera_op_mode_type_t op_mode=MM_CAMERA_OP_MODE_VIDEO;
+ ret = cam_config_set_parm (mCameraId, MM_CAMERA_PARM_OP_MODE,
+ &op_mode);
+ LOGE("OP Mode Set");
+
+ if(MM_CAMERA_OK != ret) {
+ LOGE("%s: X :set mode MM_CAMERA_OP_MODE_VIDEO err=%d\n", __func__, ret);
+ ret = BAD_VALUE;
+ goto error;
+ }
+ }else {
+ LOGE("Setting OP MODE to MM_CAMERA_OP_MODE_ZSL");
+ mm_camera_op_mode_type_t op_mode=MM_CAMERA_OP_MODE_ZSL;
+ ret = cam_config_set_parm (mCameraId, MM_CAMERA_PARM_OP_MODE,
+ &op_mode);
+ if(MM_CAMERA_OK != ret) {
+ LOGE("%s: X :set mode MM_CAMERA_OP_MODE_ZSL err=%d\n", __func__, ret);
+ ret = BAD_VALUE;
+ goto error;
+ }
+ }
+
+ /* call mm_camera action start(...) */
+ LOGE("Starting Preview/Video Stream. ");
+ mFirstFrameRcvd = false;
+ ret = cam_ops_action(mCameraId, TRUE, MM_CAMERA_OPS_PREVIEW, 0);
+
+ if (MM_CAMERA_OK != ret) {
+ LOGE ("%s: preview streaming start err=%d\n", __func__, ret);
+ ret = BAD_VALUE;
+ goto error;
+ }
+
+ LOGE("Debug : %s : Preview streaming Started",__func__);
+ ret = NO_ERROR;
+
+ mActive = true;
+ goto end;
+
+error:
+ putBufferToSurface();
+end:
+ LOGE("%s: X", __func__);
+ return ret;
+ }
+
+
+// ---------------------------------------------------------------------------
+// QCameraStream_preview
+// ---------------------------------------------------------------------------
+ void QCameraStream_preview::stop() {
+ LOGE("%s: E", __func__);
+ int ret=MM_CAMERA_OK;
+
+ if(!mActive) {
+ return;
+ }
+ Mutex::Autolock lock(mStopCallbackLock);
+ mActive = false;
+ /* unregister the notify fn from the mmmm_camera_t object*/
+
+ LOGI("%s: Stop the thread \n", __func__);
+ /* call stop() in parent class to stop the monitor thread*/
+ ret = cam_ops_action(mCameraId, FALSE, MM_CAMERA_OPS_PREVIEW, 0);
+ if(MM_CAMERA_OK != ret) {
+ LOGE ("%s: camera preview stop err=%d\n", __func__, ret);
+ }
+ ret = cam_config_unprepare_buf(mCameraId, MM_CAMERA_CH_PREVIEW);
+ if(ret != MM_CAMERA_OK) {
+ LOGE("%s:Unreg preview buf err=%d\n", __func__, ret);
+ //ret = BAD_VALUE;
+ }
+
+ /* In case of a clean stop, we need to clean all buffers*/
+ LOGE("Debug : %s : Buffer Unprepared",__func__);
+ /*free camera_memory handles and return buffer back to surface*/
+ if (! mHalCamCtrl->isNoDisplayMode() ) {
+ putBufferToSurface();
+ } else {
+ freeBufferNoDisplay( );
+ }
+
+ LOGE("%s: X", __func__);
+
+ }
+// ---------------------------------------------------------------------------
+// QCameraStream_preview
+// ---------------------------------------------------------------------------
+ void QCameraStream_preview::release() {
+
+ LOGE("%s : BEGIN",__func__);
+ int ret=MM_CAMERA_OK,i;
+
+ if(!mInit)
+ {
+ LOGE("%s : Stream not Initalized",__func__);
+ return;
+ }
+
+ if(mActive) {
+ this->stop();
+ }
+
+ ret= QCameraStream::deinitChannel(mCameraId, MM_CAMERA_CH_PREVIEW);
+ LOGE("Debug : %s : De init Channel",__func__);
+ if(ret != MM_CAMERA_OK) {
+ LOGE("%s:Deinit preview channel failed=%d\n", __func__, ret);
+ //ret = BAD_VALUE;
+ }
+
+ (void)cam_evt_register_buf_notify(mCameraId, MM_CAMERA_CH_PREVIEW,
+ NULL,
+ (mm_camera_register_buf_cb_type_t)NULL,
+ NULL,
+ NULL);
+ mInit = false;
+ LOGE("%s: END", __func__);
+
+ }
+
+QCameraStream*
+QCameraStream_preview::createInstance(int cameraId,
+ camera_mode_t mode)
+{
+ QCameraStream* pme = new QCameraStream_preview(cameraId, mode);
+ return pme;
+}
+// ---------------------------------------------------------------------------
+// QCameraStream_preview
+// ---------------------------------------------------------------------------
+
+void QCameraStream_preview::deleteInstance(QCameraStream *p)
+{
+ if (p){
+ LOGV("%s: BEGIN", __func__);
+ p->release();
+ delete p;
+ p = NULL;
+ LOGV("%s: END", __func__);
+ }
+}
+
+
+/* Temp helper function */
+void *QCameraStream_preview::getLastQueuedFrame(void)
+{
+ return mLastQueuedFrame;
+}
+
+// ---------------------------------------------------------------------------
+// No code beyone this line
+// ---------------------------------------------------------------------------
+}; // namespace android
diff --git a/camera/QCamera/HAL/core/src/QCameraHWI_Record.cpp b/camera/QCamera/HAL/core/src/QCameraHWI_Record.cpp
new file mode 100644
index 0000000..c059c1a
--- /dev/null
+++ b/camera/QCamera/HAL/core/src/QCameraHWI_Record.cpp
@@ -0,0 +1,582 @@
+/*
+** Copyright (c) 2011 The Linux Foundation. All rights reserved.
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+** http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+/*#error uncomment this for compiler test!*/
+
+//#define LOG_NDEBUG 0
+#define LOG_NIDEBUG 0
+#define LOG_TAG "QCameraHWI_Record"
+#include <utils/Log.h>
+#include <utils/threads.h>
+#include <cutils/properties.h>
+#include <fcntl.h>
+#include <sys/mman.h>
+
+#include "QCameraStream.h"
+
+
+#define LIKELY(exp) __builtin_expect(!!(exp), 1)
+#define UNLIKELY(exp) __builtin_expect(!!(exp), 0)
+
+/* QCameraStream_record class implementation goes here*/
+/* following code implement the video streaming capture & encoding logic of this class*/
+// ---------------------------------------------------------------------------
+// QCameraStream_record createInstance()
+// ---------------------------------------------------------------------------
+namespace android {
+
+
+QCameraStream* QCameraStream_record::createInstance(int cameraId,
+ camera_mode_t mode)
+{
+ LOGV("%s: BEGIN", __func__);
+ QCameraStream* pme = new QCameraStream_record(cameraId, mode);
+ LOGV("%s: END", __func__);
+ return pme;
+}
+
+// ---------------------------------------------------------------------------
+// QCameraStream_record deleteInstance()
+// ---------------------------------------------------------------------------
+void QCameraStream_record::deleteInstance(QCameraStream *ptr)
+{
+ LOGV("%s: BEGIN", __func__);
+ if (ptr){
+ ptr->release();
+ delete ptr;
+ ptr = NULL;
+ }
+ LOGV("%s: END", __func__);
+}
+
+// ---------------------------------------------------------------------------
+// QCameraStream_record Constructor
+// ---------------------------------------------------------------------------
+QCameraStream_record::QCameraStream_record(int cameraId,
+ camera_mode_t mode)
+ :QCameraStream(cameraId,mode),
+ mDebugFps(false)
+{
+ mHalCamCtrl = NULL;
+ char value[PROPERTY_VALUE_MAX];
+ LOGV("%s: BEGIN", __func__);
+
+ property_get("persist.debug.sf.showfps", value, "0");
+ mDebugFps = atoi(value);
+
+ LOGV("%s: END", __func__);
+}
+
+// ---------------------------------------------------------------------------
+// QCameraStream_record Destructor
+// ---------------------------------------------------------------------------
+QCameraStream_record::~QCameraStream_record() {
+ LOGV("%s: BEGIN", __func__);
+ if(mActive) {
+ stop();
+ }
+ if(mInit) {
+ release();
+ }
+ mInit = false;
+ mActive = false;
+ LOGV("%s: END", __func__);
+
+}
+
+// ---------------------------------------------------------------------------
+// QCameraStream_record Callback from mm_camera
+// ---------------------------------------------------------------------------
+static void record_notify_cb(mm_camera_ch_data_buf_t *bufs_new,
+ void *user_data)
+{
+ QCameraStream_record *pme = (QCameraStream_record *)user_data;
+ mm_camera_ch_data_buf_t *bufs_used = 0;
+ LOGV("%s: BEGIN", __func__);
+
+ /*
+ * Call Function Process Video Data
+ */
+ pme->processRecordFrame(bufs_new);
+ LOGV("%s: END", __func__);
+}
+
+// ---------------------------------------------------------------------------
+// QCameraStream_record
+// ---------------------------------------------------------------------------
+status_t QCameraStream_record::init()
+{
+ status_t ret = NO_ERROR;
+ LOGV("%s: BEGIN", __func__);
+
+ /*
+ * Acquiring Video Channel
+ */
+ ret = QCameraStream::initChannel (mCameraId, MM_CAMERA_CH_VIDEO_MASK);
+ if (NO_ERROR!=ret) {
+ LOGE("%s ERROR: Can't init native cammera preview ch\n",__func__);
+ return ret;
+ }
+
+ /*
+ * Register the Callback with camera
+ */
+ (void) cam_evt_register_buf_notify(mCameraId, MM_CAMERA_CH_VIDEO,
+ record_notify_cb,
+ MM_CAMERA_REG_BUF_CB_INFINITE,
+ 0,
+ this);
+
+ mInit = true;
+ LOGV("%s: END", __func__);
+ return ret;
+}
+// ---------------------------------------------------------------------------
+// QCameraStream_record
+// ---------------------------------------------------------------------------
+
+status_t QCameraStream_record::start()
+{
+ status_t ret = NO_ERROR;
+ LOGV("%s: BEGIN", __func__);
+
+ Mutex::Autolock lock(mStopCallbackLock);
+ if(!mInit) {
+ LOGE("%s ERROR: Record buffer not registered",__func__);
+ return BAD_VALUE;
+ }
+
+ setFormat(MM_CAMERA_CH_VIDEO_MASK);
+ //mRecordFreeQueueLock.lock();
+ //mRecordFreeQueue.clear();
+ //mRecordFreeQueueLock.unlock();
+ /*
+ * Allocating Encoder Frame Buffers
+ */
+ ret = initEncodeBuffers();
+ if (NO_ERROR!=ret) {
+ LOGE("%s ERROR: Buffer Allocation Failed\n",__func__);
+ goto error;
+ }
+
+ ret = cam_config_prepare_buf(mCameraId, &mRecordBuf);
+ if(ret != MM_CAMERA_OK) {
+ LOGV("%s ERROR: Reg Record buf err=%d\n", __func__, ret);
+ ret = BAD_VALUE;
+ goto error;
+ }else{
+ ret = NO_ERROR;
+ }
+
+ /*
+ * Start Video Streaming
+ */
+ ret = cam_ops_action(mCameraId, TRUE, MM_CAMERA_OPS_VIDEO, 0);
+ if (MM_CAMERA_OK != ret) {
+ LOGE ("%s ERROR: Video streaming start err=%d\n", __func__, ret);
+ ret = BAD_VALUE;
+ goto error;
+ }else{
+ LOGE("%s : Video streaming Started",__func__);
+ ret = NO_ERROR;
+ }
+ mActive = true;
+ LOGV("%s: END", __func__);
+ return ret;
+
+error:
+ releaseEncodeBuffer();
+ LOGV("%s: END", __func__);
+ return ret;
+}
+
+void QCameraStream_record::releaseEncodeBuffer() {
+ for(int cnt = 0; cnt < mHalCamCtrl->mRecordingMemory.buffer_count; cnt++) {
+ if (NO_ERROR !=
+ mHalCamCtrl->sendUnMappingBuf(MSM_V4L2_EXT_CAPTURE_MODE_VIDEO, cnt,
+ mCameraId, CAM_SOCK_MSG_TYPE_FD_UNMAPPING))
+ LOGE("%s: Unmapping Video Data Failed", __func__);
+
+ if (mHalCamCtrl->mStoreMetaDataInFrame) {
+ struct encoder_media_buffer_type * packet =
+ (struct encoder_media_buffer_type *)
+ mHalCamCtrl->mRecordingMemory.metadata_memory[cnt]->data;
+ native_handle_delete(const_cast<native_handle_t *>(packet->meta_handle));
+ mHalCamCtrl->mRecordingMemory.metadata_memory[cnt]->release(
+ mHalCamCtrl->mRecordingMemory.metadata_memory[cnt]);
+
+ }
+ mHalCamCtrl->mRecordingMemory.camera_memory[cnt]->release(
+ mHalCamCtrl->mRecordingMemory.camera_memory[cnt]);
+ close(mHalCamCtrl->mRecordingMemory.fd[cnt]);
+ mHalCamCtrl->mRecordingMemory.fd[cnt] = -1;
+
+#ifdef USE_ION
+ mHalCamCtrl->deallocate_ion_memory(&mHalCamCtrl->mRecordingMemory, cnt);
+#endif
+ }
+ memset(&mHalCamCtrl->mRecordingMemory, 0, sizeof(mHalCamCtrl->mRecordingMemory));
+ //mNumRecordFrames = 0;
+ delete[] recordframes;
+ if (mRecordBuf.video.video.buf.mp)
+ delete[] mRecordBuf.video.video.buf.mp;
+}
+
+// ---------------------------------------------------------------------------
+// QCameraStream_record
+// ---------------------------------------------------------------------------
+void QCameraStream_record::stop()
+{
+ status_t ret = NO_ERROR;
+ LOGV("%s: BEGIN", __func__);
+
+ if(!mActive) {
+ LOGE("%s : Record stream not started",__func__);
+ return;
+ }
+ mActive = false;
+ Mutex::Autolock lock(mStopCallbackLock);
+#if 0 //mzhu, when stop recording, all frame will be dirty. no need to queue frame back to kernel any more
+ mRecordFreeQueueLock.lock();
+ while(!mRecordFreeQueue.isEmpty()) {
+ LOGV("%s : Pre-releasing of Encoder buffers!\n", __FUNCTION__);
+ mm_camera_ch_data_buf_t releasedBuf = mRecordFreeQueue.itemAt(0);
+ mRecordFreeQueue.removeAt(0);
+ mRecordFreeQueueLock.unlock();
+ LOGV("%s (%d): releasedBuf.idx = %d\n", __FUNCTION__, __LINE__,
+ releasedBuf.video.video.idx);
+ if(MM_CAMERA_OK != cam_evt_buf_done(mCameraId,&releasedBuf))
+ LOGE("%s : Buf Done Failed",__func__);
+ }
+ mRecordFreeQueueLock.unlock();
+#if 0
+ while (!mRecordFreeQueue.isEmpty()) {
+ LOGE("%s : Waiting for Encoder to release all buffer!\n", __FUNCTION__);
+ }
+#endif
+#endif // mzhu
+ /* unregister the notify fn from the mmmm_camera_t object
+ * call stop() in parent class to stop the monitor thread */
+
+ ret = cam_ops_action(mCameraId, FALSE, MM_CAMERA_OPS_VIDEO, 0);
+ if (MM_CAMERA_OK != ret) {
+ LOGE ("%s ERROR: Video streaming Stop err=%d\n", __func__, ret);
+ }
+
+ ret = cam_config_unprepare_buf(mCameraId, MM_CAMERA_CH_VIDEO);
+ if(ret != MM_CAMERA_OK){
+ LOGE("%s ERROR: Ureg video buf \n", __func__);
+ }
+
+ releaseEncodeBuffer();
+
+ mActive = false;
+ LOGV("%s: END", __func__);
+
+}
+// ---------------------------------------------------------------------------
+// QCameraStream_record
+// ---------------------------------------------------------------------------
+void QCameraStream_record::release()
+{
+ status_t ret = NO_ERROR;
+ LOGV("%s: BEGIN", __func__);
+
+ if(mActive) {
+ stop();
+ }
+ if(!mInit) {
+ LOGE("%s : Record stream not initialized",__func__);
+ return;
+ }
+
+ ret= QCameraStream::deinitChannel(mCameraId, MM_CAMERA_CH_VIDEO);
+ if(ret != MM_CAMERA_OK) {
+ LOGE("%s:Deinit Video channel failed=%d\n", __func__, ret);
+ }
+ (void)cam_evt_register_buf_notify(mCameraId, MM_CAMERA_CH_VIDEO,
+ NULL,
+ (mm_camera_register_buf_cb_type_t)NULL,
+ NULL,
+ NULL);
+ mInit = false;
+ LOGV("%s: END", __func__);
+}
+
+status_t QCameraStream_record::processRecordFrame(void *data)
+{
+ LOGV("%s : BEGIN",__func__);
+ mm_camera_ch_data_buf_t* frame = (mm_camera_ch_data_buf_t*) data;
+
+ Mutex::Autolock lock(mStopCallbackLock);
+ if(!mActive) {
+ LOGE("Recording Stopped. Returning callback");
+ return NO_ERROR;
+ }
+
+ if (UNLIKELY(mDebugFps)) {
+ debugShowVideoFPS();
+ }
+
+ mHalCamCtrl->dumpFrameToFile(frame->video.video.frame, HAL_DUMP_FRM_VIDEO);
+ mHalCamCtrl->mCallbackLock.lock();
+ camera_data_timestamp_callback rcb = mHalCamCtrl->mDataCbTimestamp;
+ void *rdata = mHalCamCtrl->mCallbackCookie;
+ mHalCamCtrl->mCallbackLock.unlock();
+
+ nsecs_t timeStamp = nsecs_t(frame->video.video.frame->ts.tv_sec)*1000000000LL + \
+ frame->video.video.frame->ts.tv_nsec;
+
+ LOGE("Send Video frame to services/encoder TimeStamp : %lld",timeStamp);
+ mRecordedFrames[frame->video.video.idx] = *frame;
+
+#ifdef USE_ION
+ struct ion_flush_data cache_inv_data;
+ int ion_fd;
+ ion_fd = frame->video.video.frame->ion_dev_fd;
+ cache_inv_data.vaddr = (void *)frame->video.video.frame->buffer;
+ cache_inv_data.fd = frame->video.video.frame->fd;
+ cache_inv_data.handle = frame->video.video.frame->fd_data.handle;
+ cache_inv_data.length = frame->video.video.frame->ion_alloc.len;
+
+ if (mHalCamCtrl->cache_ops(ion_fd, &cache_inv_data, ION_IOC_CLEAN_CACHES) < 0)
+ LOGE("%s: Cache clean for Video buffer %p fd = %d failed", __func__,
+ cache_inv_data.vaddr, cache_inv_data.fd);
+#endif
+
+ if (mHalCamCtrl->mStoreMetaDataInFrame) {
+ mStopCallbackLock.unlock();
+ if(mActive && (rcb != NULL) && (mHalCamCtrl->mMsgEnabled & CAMERA_MSG_VIDEO_FRAME)) {
+ rcb(timeStamp, CAMERA_MSG_VIDEO_FRAME,
+ mHalCamCtrl->mRecordingMemory.metadata_memory[frame->video.video.idx],
+ 0, mHalCamCtrl->mCallbackCookie);
+ }
+ } else {
+ mStopCallbackLock.unlock();
+ if(mActive && (rcb != NULL) && (mHalCamCtrl->mMsgEnabled & CAMERA_MSG_VIDEO_FRAME)) {
+ rcb(timeStamp, CAMERA_MSG_VIDEO_FRAME,
+ mHalCamCtrl->mRecordingMemory.camera_memory[frame->video.video.idx],
+ 0, mHalCamCtrl->mCallbackCookie);
+ }
+ }
+
+ LOGV("%s : END",__func__);
+ return NO_ERROR;
+}
+
+//Record Related Functions
+status_t QCameraStream_record::initEncodeBuffers()
+{
+ LOGE("%s : BEGIN",__func__);
+ status_t ret = NO_ERROR;
+ const char *pmem_region;
+ uint32_t frame_len;
+ uint8_t num_planes;
+ uint32_t planes[VIDEO_MAX_PLANES];
+ //cam_ctrl_dimension_t dim;
+ int width = 0; /* width of channel */
+ int height = 0; /* height of channel */
+ int buf_cnt;
+ pmem_region = "/dev/pmem_adsp";
+
+
+ memset(&mHalCamCtrl->mRecordingMemory, 0, sizeof(mHalCamCtrl->mRecordingMemory));
+ memset(&dim, 0, sizeof(cam_ctrl_dimension_t));
+ ret = cam_config_get_parm(mCameraId, MM_CAMERA_PARM_DIMENSION, &dim);
+ if (MM_CAMERA_OK != ret) {
+ LOGE("%s: ERROR - can't get camera dimension!", __func__);
+ return BAD_VALUE;
+ }
+ else {
+ width = dim.video_width;
+ height = dim.video_height;
+ }
+ num_planes = 2;
+ planes[0] = dim.video_frame_offset.mp[0].len;
+ planes[1] = dim.video_frame_offset.mp[1].len;
+ frame_len = dim.video_frame_offset.frame_len;
+
+ buf_cnt = VIDEO_BUFFER_COUNT;
+ if(mHalCamCtrl->isLowPowerCamcorder()) {
+ LOGE("%s: lower power camcorder selected", __func__);
+ buf_cnt = VIDEO_BUFFER_COUNT_LOW_POWER_CAMCORDER;
+ }
+ recordframes = new msm_frame[buf_cnt];
+ memset(recordframes,0,sizeof(struct msm_frame) * buf_cnt);
+
+ mRecordBuf.video.video.buf.mp = new mm_camera_mp_buf_t[buf_cnt *
+ sizeof(mm_camera_mp_buf_t)];
+ if (!mRecordBuf.video.video.buf.mp) {
+ LOGE("%s Error allocating memory for mplanar struct ", __func__);
+ return BAD_VALUE;
+ }
+ memset(mRecordBuf.video.video.buf.mp, 0,
+ buf_cnt * sizeof(mm_camera_mp_buf_t));
+
+ memset(&mHalCamCtrl->mRecordingMemory, 0, sizeof(mHalCamCtrl->mRecordingMemory));
+ for (int i=0; i<MM_CAMERA_MAX_NUM_FRAMES;i++) {
+ mHalCamCtrl->mRecordingMemory.main_ion_fd[i] = -1;
+ mHalCamCtrl->mRecordingMemory.fd[i] = -1;
+ }
+
+ mHalCamCtrl->mRecordingMemory.buffer_count = buf_cnt;
+
+ mHalCamCtrl->mRecordingMemory.size = frame_len;
+ mHalCamCtrl->mRecordingMemory.cbcr_offset = planes[0];
+
+ for (int cnt = 0; cnt < mHalCamCtrl->mRecordingMemory.buffer_count; cnt++) {
+#ifdef USE_ION
+ if(mHalCamCtrl->allocate_ion_memory(&mHalCamCtrl->mRecordingMemory, cnt,
+ ((0x1 << CAMERA_ION_HEAP_ID) | (0x1 << CAMERA_ION_FALLBACK_HEAP_ID))) < 0) {
+ LOGE("%s ION alloc failed\n", __func__);
+ return UNKNOWN_ERROR;
+ }
+#else
+ mHalCamCtrl->mRecordingMemory.fd[cnt] = open("/dev/pmem_adsp", O_RDWR|O_SYNC);
+ if(mHalCamCtrl->mRecordingMemory.fd[cnt] <= 0) {
+ LOGE("%s: no pmem for frame %d", __func__, cnt);
+ return UNKNOWN_ERROR;
+ }
+#endif
+ mHalCamCtrl->mRecordingMemory.camera_memory[cnt] =
+ mHalCamCtrl->mGetMemory(mHalCamCtrl->mRecordingMemory.fd[cnt],
+ mHalCamCtrl->mRecordingMemory.size, 1, (void *)this);
+
+ if (mHalCamCtrl->mStoreMetaDataInFrame) {
+ mHalCamCtrl->mRecordingMemory.metadata_memory[cnt] =
+ mHalCamCtrl->mGetMemory(-1,
+ sizeof(struct encoder_media_buffer_type), 1, (void *)this);
+ struct encoder_media_buffer_type * packet =
+ (struct encoder_media_buffer_type *)
+ mHalCamCtrl->mRecordingMemory.metadata_memory[cnt]->data;
+ packet->meta_handle = native_handle_create(1, 2); //1 fd, 1 offset and 1 size
+ packet->buffer_type = kMetadataBufferTypeCameraSource;
+ native_handle_t * nh = const_cast<native_handle_t *>(packet->meta_handle);
+ nh->data[0] = mHalCamCtrl->mRecordingMemory.fd[cnt];
+ nh->data[1] = 0;
+ nh->data[2] = mHalCamCtrl->mRecordingMemory.size;
+ }
+ recordframes[cnt].fd = mHalCamCtrl->mRecordingMemory.fd[cnt];
+ recordframes[cnt].buffer = (uint32_t)mHalCamCtrl->mRecordingMemory.camera_memory[cnt]->data;
+ recordframes[cnt].y_off = 0;
+ recordframes[cnt].cbcr_off = mHalCamCtrl->mRecordingMemory.cbcr_offset;
+ recordframes[cnt].path = OUTPUT_TYPE_V;
+ recordframes[cnt].fd_data = mHalCamCtrl->mRecordingMemory.ion_info_fd[cnt];
+ recordframes[cnt].ion_alloc = mHalCamCtrl->mRecordingMemory.alloc[cnt];
+ recordframes[cnt].ion_dev_fd = mHalCamCtrl->mRecordingMemory.main_ion_fd[cnt];
+
+ if (NO_ERROR !=
+ mHalCamCtrl->sendMappingBuf(MSM_V4L2_EXT_CAPTURE_MODE_VIDEO, cnt,
+ recordframes[cnt].fd, mHalCamCtrl->mRecordingMemory.size, mCameraId,
+ CAM_SOCK_MSG_TYPE_FD_MAPPING))
+ LOGE("%s: sending mapping data Msg Failed", __func__);
+
+ LOGE ("initRecord : record heap , video buffers buffer=%lu fd=%d y_off=%d cbcr_off=%d\n",
+ (unsigned long)recordframes[cnt].buffer, recordframes[cnt].fd, recordframes[cnt].y_off,
+ recordframes[cnt].cbcr_off);
+ //mNumRecordFrames++;
+
+ mRecordBuf.video.video.buf.mp[cnt].frame = recordframes[cnt];
+ mRecordBuf.video.video.buf.mp[cnt].frame_offset = 0;
+ mRecordBuf.video.video.buf.mp[cnt].num_planes = num_planes;
+ /* Plane 0 needs to be set seperately. Set other planes
+ * in a loop. */
+ mRecordBuf.video.video.buf.mp[cnt].planes[0].reserved[0] =
+ mRecordBuf.video.video.buf.mp[cnt].frame_offset;
+ mRecordBuf.video.video.buf.mp[cnt].planes[0].length = planes[0];
+ mRecordBuf.video.video.buf.mp[cnt].planes[0].m.userptr =
+ recordframes[cnt].fd;
+ for (int j = 1; j < num_planes; j++) {
+ mRecordBuf.video.video.buf.mp[cnt].planes[j].length = planes[j];
+ mRecordBuf.video.video.buf.mp[cnt].planes[j].m.userptr =
+ recordframes[cnt].fd;
+ mRecordBuf.video.video.buf.mp[cnt].planes[j].reserved[0] =
+ mRecordBuf.video.video.buf.mp[cnt].planes[j-1].reserved[0] +
+ mRecordBuf.video.video.buf.mp[cnt].planes[j-1].length;
+ }
+ }
+
+ //memset(&mRecordBuf, 0, sizeof(mRecordBuf));
+ mRecordBuf.ch_type = MM_CAMERA_CH_VIDEO;
+ mRecordBuf.video.video.num = mHalCamCtrl->mRecordingMemory.buffer_count;//kRecordBufferCount;
+ //mRecordBuf.video.video.frame_offset = &record_offset[0];
+ //mRecordBuf.video.video.frame = &recordframes[0];
+ LOGE("%s : END",__func__);
+ return NO_ERROR;
+}
+
+void QCameraStream_record::releaseRecordingFrame(const void *opaque)
+{
+ LOGV("%s : BEGIN, opaque = 0x%p",__func__, opaque);
+ if(!mActive)
+ {
+ LOGE("%s : Recording already stopped!!! Leak???",__func__);
+ return;
+ }
+ for(int cnt = 0; cnt < mHalCamCtrl->mRecordingMemory.buffer_count; cnt++) {
+ if (mHalCamCtrl->mStoreMetaDataInFrame) {
+ if(mHalCamCtrl->mRecordingMemory.metadata_memory[cnt] &&
+ mHalCamCtrl->mRecordingMemory.metadata_memory[cnt]->data == opaque) {
+ /* found the match */
+ if(MM_CAMERA_OK != cam_evt_buf_done(mCameraId, &mRecordedFrames[cnt]))
+ LOGE("%s : Buf Done Failed",__func__);
+ LOGV("%s : END",__func__);
+ return;
+ }
+ } else {
+ if(mHalCamCtrl->mRecordingMemory.camera_memory[cnt] &&
+ mHalCamCtrl->mRecordingMemory.camera_memory[cnt]->data == opaque) {
+ /* found the match */
+ if(MM_CAMERA_OK != cam_evt_buf_done(mCameraId, &mRecordedFrames[cnt]))
+ LOGE("%s : Buf Done Failed",__func__);
+ LOGV("%s : END",__func__);
+ return;
+ }
+ }
+ }
+ LOGE("%s: cannot find the matched frame with opaue = 0x%p", __func__, opaque);
+}
+
+void QCameraStream_record::debugShowVideoFPS() const
+{
+ static int mFrameCount;
+ static int mLastFrameCount = 0;
+ static nsecs_t mLastFpsTime = 0;
+ static float mFps = 0;
+ mFrameCount++;
+ nsecs_t now = systemTime();
+ nsecs_t diff = now - mLastFpsTime;
+ if (diff > ms2ns(250)) {
+ mFps = ((mFrameCount - mLastFrameCount) * float(s2ns(1))) / diff;
+ LOGI("Video Frames Per Second: %.4f", mFps);
+ mLastFpsTime = now;
+ mLastFrameCount = mFrameCount;
+ }
+}
+
+#if 0
+sp<IMemoryHeap> QCameraStream_record::getHeap() const
+{
+ return mRecordHeap != NULL ? mRecordHeap->mHeap : NULL;
+}
+
+#endif
+status_t QCameraStream_record::takeLiveSnapshot(){
+ return true;
+}
+
+}//namespace android
+
diff --git a/camera/QCamera/HAL/core/src/QCameraHWI_Still.cpp b/camera/QCamera/HAL/core/src/QCameraHWI_Still.cpp
new file mode 100644
index 0000000..521acec
--- /dev/null
+++ b/camera/QCamera/HAL/core/src/QCameraHWI_Still.cpp
@@ -0,0 +1,2507 @@
+/*
+** Copyright (c) 2011-2012 The Linux Foundation. All rights reserved.
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+** http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+/*#error uncomment this for compiler test!*/
+
+#define LOG_NDEBUG 0
+#define LOG_NDDEBUG 0
+#define LOG_NIDEBUG 0
+#define LOG_TAG "QCameraHWI_Still"
+#include <utils/Log.h>
+#include <utils/threads.h>
+#include <fcntl.h>
+#include <sys/mman.h>
+#include <media/mediarecorder.h>
+#include <math.h>
+#include "QCameraHAL.h"
+#include "QCameraHWI.h"
+
+#define THUMBNAIL_DEFAULT_WIDTH 512
+#define THUMBNAIL_DEFAULT_HEIGHT 384
+
+/* following code implement the still image capture & encoding logic of this class*/
+namespace android {
+
+typedef enum {
+ SNAPSHOT_STATE_ERROR,
+ SNAPSHOT_STATE_UNINIT,
+ SNAPSHOT_STATE_CH_ACQUIRED,
+ SNAPSHOT_STATE_BUF_NOTIF_REGD,
+ SNAPSHOT_STATE_BUF_INITIALIZED,
+ SNAPSHOT_STATE_INITIALIZED,
+ SNAPSHOT_STATE_IMAGE_CAPTURE_STRTD,
+ SNAPSHOT_STATE_YUV_RECVD,
+ SNAPSHOT_STATE_JPEG_ENCODING,
+ SNAPSHOT_STATE_JPEG_ENCODE_DONE,
+ SNAPSHOT_STATE_JPEG_COMPLETE_ENCODE_DONE,
+
+ /*Add any new state above*/
+ SNAPSHOT_STATE_MAX
+} snapshot_state_type_t;
+
+
+//-----------------------------------------------------------------------
+// Constants
+//----------------------------------------------------------------------
+static const int PICTURE_FORMAT_JPEG = 1;
+static const int PICTURE_FORMAT_RAW = 2;
+static const int POSTVIEW_SMALL_HEIGHT = 144;
+
+// ---------------------------------------------------------------------------
+/* static functions*/
+// ---------------------------------------------------------------------------
+
+
+
+/* TBD: Temp: to be removed*/
+static pthread_mutex_t g_s_mutex;
+static int g_status = 0;
+static pthread_cond_t g_s_cond_v;
+
+static void mm_app_snapshot_done()
+{
+ pthread_mutex_lock(&g_s_mutex);
+ g_status = TRUE;
+ pthread_cond_signal(&g_s_cond_v);
+ pthread_mutex_unlock(&g_s_mutex);
+}
+
+static void mm_app_snapshot_wait()
+{
+ pthread_mutex_lock(&g_s_mutex);
+ if(FALSE == g_status) pthread_cond_wait(&g_s_cond_v, &g_s_mutex);
+ pthread_mutex_unlock(&g_s_mutex);
+ g_status = FALSE;
+}
+
+static int mm_app_dump_snapshot_frame(char *filename,
+ const void *buffer,
+ uint32_t len)
+{
+ char bufp[128];
+ int file_fdp;
+ int rc = 0;
+
+ file_fdp = open(filename, O_RDWR | O_CREAT, 0777);
+
+ if (file_fdp < 0) {
+ rc = -1;
+ goto end;
+ }
+ write(file_fdp,
+ (const void *)buffer, len);
+ close(file_fdp);
+end:
+ return rc;
+}
+
+/* Callback received when a frame is available after snapshot*/
+static void snapshot_notify_cb(mm_camera_ch_data_buf_t *recvd_frame,
+ void *user_data)
+{
+ QCameraStream_Snapshot *pme = (QCameraStream_Snapshot *)user_data;
+
+ LOGD("%s: E", __func__);
+
+ if (pme != NULL) {
+ pme->receiveRawPicture(recvd_frame);
+ }
+ else{
+ LOGW("%s: Snapshot obj NULL in callback", __func__);
+ }
+
+ LOGD("%s: X", __func__);
+
+}
+
+/* Once we give frame for encoding, we get encoded jpeg image
+ fragments by fragment. We'll need to store them in a buffer
+ to form complete JPEG image */
+static void snapshot_jpeg_fragment_cb(uint8_t *ptr,
+ uint32_t size,
+ void *user_data)
+{
+ QCameraStream_Snapshot *pme = (QCameraStream_Snapshot *)user_data;
+
+ LOGE("%s: E",__func__);
+ if (pme != NULL) {
+ pme->receiveJpegFragment(ptr,size);
+ }
+ else
+ LOGW("%s: Receive jpeg fragment cb obj Null", __func__);
+
+ LOGD("%s: X",__func__);
+}
+
+/* This callback is received once the complete JPEG encoding is done */
+static void snapshot_jpeg_cb(jpeg_event_t event, void *user_data)
+{
+ QCameraStream_Snapshot *pme = (QCameraStream_Snapshot *)user_data;
+ LOGE("%s: E ",__func__);
+
+ if (event != JPEG_EVENT_DONE) {
+ if (event == JPEG_EVENT_THUMBNAIL_DROPPED) {
+ LOGE("%s: Error in thumbnail encoding (event: %d)!!!",
+ __func__, event);
+ LOGD("%s: X",__func__);
+ return;
+ }
+ else {
+ LOGE("%s: Error (event: %d) while jpeg encoding!!!",
+ __func__, event);
+ }
+ }
+
+ if (pme != NULL) {
+ pme->receiveCompleteJpegPicture(event);
+ LOGE(" Completed issuing JPEG callback");
+ /* deinit only if we are done taking requested number of snapshots */
+ if (pme->getSnapshotState() == SNAPSHOT_STATE_JPEG_COMPLETE_ENCODE_DONE) {
+ LOGE(" About to issue deinit callback");
+ /* If it's ZSL Mode, we don't deinit now. We'll stop the polling thread and
+ deinit the channel/buffers only when we change the mode from zsl to
+ non-zsl. */
+ if (!(pme->isZSLMode())) {
+ pme->stop();
+ }
+ }
+ }
+ else
+ LOGW("%s: Receive jpeg cb Obj Null", __func__);
+
+
+ LOGD("%s: X",__func__);
+
+}
+
+// ---------------------------------------------------------------------------
+/* private functions*/
+// ---------------------------------------------------------------------------
+
+void QCameraStream_Snapshot::
+receiveJpegFragment(uint8_t *ptr, uint32_t size)
+{
+ LOGE("%s: E", __func__);
+#if 0
+ if (mJpegHeap != NULL) {
+ LOGE("%s: Copy jpeg...", __func__);
+ memcpy((uint8_t *)mJpegHeap->mHeap->base()+ mJpegOffset, ptr, size);
+ mJpegOffset += size;
+ }
+ else {
+ LOGE("%s: mJpegHeap is NULL!", __func__);
+ }
+ #else
+ if(mHalCamCtrl->mJpegMemory.camera_memory[0] != NULL && ptr != NULL && size > 0) {
+ memcpy((uint8_t *)((uint32_t)mHalCamCtrl->mJpegMemory.camera_memory[0]->data + mJpegOffset), ptr, size);
+ mJpegOffset += size;
+
+
+ /*
+ memcpy((uint8_t *)((uint32_t)mHalCamCtrl->mJpegMemory.camera_memory[0]->data + mJpegOffset), ptr, size);
+ mJpegOffset += size;
+ */
+ } else {
+ LOGE("%s: mJpegHeap is NULL!", __func__);
+ }
+
+
+ #endif
+
+ LOGD("%s: X", __func__);
+}
+
+
+void QCameraStream_Snapshot::
+receiveCompleteJpegPicture(jpeg_event_t event)
+{
+ int msg_type = CAMERA_MSG_COMPRESSED_IMAGE;
+ LOGE("%s: E", __func__);
+ camera_memory_t *encodedMem = NULL;
+ camera_data_callback jpg_data_cb = NULL;
+ bool fail_cb_flag = false;
+
+ //Mutex::Autolock l(&snapshotLock);
+ mStopCallbackLock.lock( );
+ if(!mActive && !isLiveSnapshot()) {
+ LOGE("%s : Cancel Picture",__func__);
+ fail_cb_flag = true;
+ goto end;
+ }
+
+ if(mCurrentFrameEncoded!=NULL /*&& !isLiveSnapshot()*/){
+ LOGV("<DEBUG>: Calling buf done for snapshot buffer");
+ cam_evt_buf_done(mCameraId, mCurrentFrameEncoded);
+ }
+ mHalCamCtrl->dumpFrameToFile(mHalCamCtrl->mJpegMemory.camera_memory[0]->data, mJpegOffset, (char *)"debug", (char *)"jpg", 0);
+
+end:
+ msg_type = CAMERA_MSG_COMPRESSED_IMAGE;
+ if (mHalCamCtrl->mDataCb && (mHalCamCtrl->mMsgEnabled & msg_type)) {
+ jpg_data_cb = mHalCamCtrl->mDataCb;
+ }else{
+ LOGE("%s: JPEG callback was cancelled--not delivering image.", __func__);
+ }
+ setSnapshotState(SNAPSHOT_STATE_JPEG_ENCODE_DONE);
+ mNumOfRecievedJPEG++;
+ mHalCamCtrl->deinitExifData();
+
+ /* free the resource we allocated to maintain the structure */
+ //mm_camera_do_munmap(main_fd, (void *)main_buffer_addr, mSnapshotStreamBuf.frame_len);
+ if(mCurrentFrameEncoded) {
+ free(mCurrentFrameEncoded);
+ mCurrentFrameEncoded = NULL;
+ }
+
+ /* Before leaving check the jpeg queue. If it's not empty give the available
+ frame for encoding*/
+ if (!mSnapshotQueue.isEmpty()) {
+ LOGI("%s: JPEG Queue not empty. Dequeue and encode.", __func__);
+ mm_camera_ch_data_buf_t* buf =
+ (mm_camera_ch_data_buf_t *)mSnapshotQueue.dequeue();
+ //encodeDisplayAndSave(buf, 1);
+ if ( NO_ERROR != encodeDisplayAndSave(buf, 1)){
+ fail_cb_flag = true;
+ }
+ } else if (mNumOfSnapshot == mNumOfRecievedJPEG ) { /* finished */
+ LOGD("%s: Before omxJpegFinish", __func__);
+ omxJpegFinish();
+ LOGD("%s: After omxJpegFinish", __func__);
+ /* getRemainingSnapshots call will give us number of snapshots still
+ remaining after flushing current zsl buffer once*/
+ LOGD("%s: Complete JPEG Encoding Done!", __func__);
+ setSnapshotState(SNAPSHOT_STATE_JPEG_COMPLETE_ENCODE_DONE);
+ mBurstModeFlag = false;
+ mSnapshotQueue.flush();
+ mNumOfRecievedJPEG = 0;
+ /* in case of zsl, we need to reset some of the zsl attributes */
+ if (isZSLMode()){
+ LOGD("%s: Resetting the ZSL attributes", __func__);
+ setZSLChannelAttribute();
+ }
+ if (!isZSLMode() && !isLiveSnapshot()){
+ //Stop polling before calling datacb for if not ZSL mode
+ stopPolling();
+ }
+
+ } else {
+ LOGD("%s: mNumOfRecievedJPEG(%d), mNumOfSnapshot(%d)", __func__, mNumOfRecievedJPEG, mNumOfSnapshot);
+ }
+ if(fail_cb_flag && mHalCamCtrl->mDataCb &&
+ (mHalCamCtrl->mMsgEnabled & CAMERA_MSG_COMPRESSED_IMAGE)) {
+ /* get picture failed. Give jpeg callback with NULL data
+ * to the application to restore to preview mode
+ */
+ jpg_data_cb = mHalCamCtrl->mDataCb;
+ }
+ if(!fail_cb_flag) {
+ camera_memory_t *encodedMem = mHalCamCtrl->mGetMemory(
+ mHalCamCtrl->mJpegMemory.fd[0], mJpegOffset, 1, mHalCamCtrl);
+ if (!encodedMem || !encodedMem->data) {
+ LOGE("%s: mGetMemory failed.\n", __func__);
+ }
+ memcpy(encodedMem->data, mHalCamCtrl->mJpegMemory.camera_memory[0]->data, mJpegOffset );
+ mStopCallbackLock.unlock( );
+ if ((mActive || isLiveSnapshot()) && jpg_data_cb != NULL) {
+ LOGV("%s: Calling upperlayer callback to store JPEG image", __func__);
+ jpg_data_cb (msg_type,encodedMem, 0, NULL,mHalCamCtrl->mCallbackCookie);
+ }
+ encodedMem->release( encodedMem );
+ jpg_data_cb = NULL;
+ }else{
+ LOGV("Image Encoding Failed... Notify Upper layer");
+ mStopCallbackLock.unlock( );
+ if((mActive || isLiveSnapshot()) && jpg_data_cb != NULL) {
+ jpg_data_cb (CAMERA_MSG_COMPRESSED_IMAGE,NULL, 0, NULL,
+ mHalCamCtrl->mCallbackCookie);
+ }
+ }
+ //reset jpeg_offset
+ mJpegOffset = 0;
+ LOGD("%s: X", __func__);
+}
+
+status_t QCameraStream_Snapshot::
+configSnapshotDimension(cam_ctrl_dimension_t* dim)
+{
+ bool matching = true;
+ cam_format_t img_format;
+ status_t ret = NO_ERROR;
+ LOGD("%s: E", __func__);
+
+ LOGI("%s:Passed picture size: %d X %d", __func__,
+ dim->picture_width, dim->picture_height);
+ LOGI("%s:Passed postview size: %d X %d", __func__,
+ dim->ui_thumbnail_width, dim->ui_thumbnail_height);
+
+ /* First check if the picture resolution is the same, if not, change it*/
+ mHalCamCtrl->getPictureSize(&mPictureWidth, &mPictureHeight);
+ LOGD("%s: Picture size received: %d x %d", __func__,
+ mPictureWidth, mPictureHeight);
+ /*Current VFE software design requires picture size >= display size for ZSL*/
+ if (isZSLMode()){
+ mPostviewWidth = dim->display_width;
+ mPostviewHeight = dim->display_height;
+ } else {
+ mPostviewWidth = mHalCamCtrl->mParameters.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH);
+ mPostviewHeight = mHalCamCtrl->mParameters.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT);
+ }
+ /*If application requested thumbnail size to be (0,0)
+ then configure second outout to a default size.
+ Jpeg encoder will drop thumbnail as reflected in encodeParams.
+ */
+ mDropThumbnail = false;
+ if (mPostviewWidth == 0 && mPostviewHeight == 0) {
+ mPostviewWidth = THUMBNAIL_DEFAULT_WIDTH;
+ mPostviewHeight = THUMBNAIL_DEFAULT_HEIGHT;
+ mDropThumbnail = true;
+ }
+
+ LOGD("%s: Postview size received: %d x %d", __func__,
+ mPostviewWidth, mPostviewHeight);
+
+ matching = (mPictureWidth == dim->picture_width) &&
+ (mPictureHeight == dim->picture_height);
+ matching &= (dim->ui_thumbnail_width == mPostviewWidth) &&
+ (dim->ui_thumbnail_height == mPostviewHeight);
+
+ /* picture size currently set do not match with the one wanted
+ by user.*/
+ if (!matching) {
+ if (mPictureWidth < mPostviewWidth || mPictureHeight < mPostviewHeight) {
+ //Changes to Handle VFE limitation.
+ mActualPictureWidth = mPictureWidth;
+ mActualPictureHeight = mPictureHeight;
+ mPictureWidth = mPostviewWidth;
+ mPictureHeight = mPostviewHeight;
+ mJpegDownscaling = TRUE;
+ }else{
+ mJpegDownscaling = FALSE;
+ }
+ dim->picture_width = mPictureWidth;
+ dim->picture_height = mPictureHeight;
+ dim->ui_thumbnail_height = mThumbnailHeight = mPostviewHeight;
+ dim->ui_thumbnail_width = mThumbnailWidth = mPostviewWidth;
+ }
+ #if 0
+ img_format = mHalCamCtrl->getPreviewFormat();
+ if (img_format) {
+ matching &= (img_format == dim->main_img_format);
+ if (!matching) {
+ dim->main_img_format = img_format;
+ dim->thumb_format = img_format;
+ }
+ }
+ #endif
+ if (!matching) {
+ LOGD("%s: Image Sizes before set parm call: main: %dx%d thumbnail: %dx%d",
+ __func__,
+ dim->picture_width, dim->picture_height,
+ dim->ui_thumbnail_width, dim->ui_thumbnail_height);
+
+ ret = cam_config_set_parm(mCameraId, MM_CAMERA_PARM_DIMENSION,dim);
+ if (NO_ERROR != ret) {
+ LOGE("%s: error - can't config snapshot parms!", __func__);
+ ret = FAILED_TRANSACTION;
+ goto end;
+ }
+ }
+ /* set_parm will return corrected dimension based on aspect ratio and
+ ceiling size */
+ mPictureWidth = dim->picture_width;
+ mPictureHeight = dim->picture_height;
+ mPostviewHeight = mThumbnailHeight = dim->ui_thumbnail_height;
+ mPostviewWidth = mThumbnailWidth = dim->ui_thumbnail_width;
+ mPictureFormat= dim->main_img_format;
+ mThumbnailFormat = dim->thumb_format;
+
+ LOGD("%s: Image Format: %d", __func__, dim->main_img_format);
+ LOGI("%s: Image Sizes: main: %dx%d thumbnail: %dx%d", __func__,
+ dim->picture_width, dim->picture_height,
+ dim->ui_thumbnail_width, dim->ui_thumbnail_height);
+end:
+ LOGD("%s: X", __func__);
+ return ret;
+}
+
+status_t QCameraStream_Snapshot::
+initRawSnapshotChannel(cam_ctrl_dimension_t *dim,
+ int num_of_snapshots)
+{
+ status_t ret = NO_ERROR;
+ mm_camera_ch_image_fmt_parm_t fmt;
+ mm_camera_channel_attr_t ch_attr;
+
+ mm_camera_raw_streaming_type_t raw_stream_type =
+ MM_CAMERA_RAW_STREAMING_CAPTURE_SINGLE;
+
+ LOGD("%s: E", __func__);
+
+ /* Initialize stream - set format, acquire channel */
+ /*TBD: Currently we only support single raw capture*/
+ LOGE("num_of_snapshots = %d",num_of_snapshots);
+ if (num_of_snapshots == 1) {
+ raw_stream_type = MM_CAMERA_RAW_STREAMING_CAPTURE_SINGLE;
+ }
+
+ /* Set channel attribute */
+ LOGD("%s: Set Raw Snapshot Channel attribute", __func__);
+ memset(&ch_attr, 0, sizeof(ch_attr));
+ ch_attr.type = MM_CAMERA_CH_ATTR_RAW_STREAMING_TYPE;
+ ch_attr.raw_streaming_mode = raw_stream_type;
+
+ if( NO_ERROR !=
+ cam_ops_ch_set_attr(mCameraId, MM_CAMERA_CH_RAW, &ch_attr)) {
+ LOGD("%s: Failure setting Raw channel attribute.", __func__);
+ ret = FAILED_TRANSACTION;
+ goto end;
+ }
+
+ memset(&fmt, 0, sizeof(mm_camera_ch_image_fmt_parm_t));
+ fmt.ch_type = MM_CAMERA_CH_RAW;
+ fmt.def.fmt = CAMERA_BAYER_SBGGR10;
+ fmt.def.dim.width = dim->raw_picture_width;
+ fmt.def.dim.height = dim->raw_picture_height;
+
+
+ LOGV("%s: Raw snapshot channel fmt: %d", __func__,
+ fmt.def.fmt);
+ LOGV("%s: Raw snapshot resolution: %dX%d", __func__,
+ dim->raw_picture_width, dim->raw_picture_height);
+
+ LOGD("%s: Set Raw Snapshot channel image format", __func__);
+ ret = cam_config_set_parm(mCameraId, MM_CAMERA_PARM_CH_IMAGE_FMT, &fmt);
+ if (NO_ERROR != ret) {
+ LOGE("%s: Set Raw Snapshot Channel format err=%d\n", __func__, ret);
+ ret = FAILED_TRANSACTION;
+ goto end;
+ }
+
+end:
+ if (ret != NO_ERROR) {
+ handleError();
+ }
+ LOGE("%s: X", __func__);
+ return ret;
+
+}
+
+status_t QCameraStream_Snapshot::
+setZSLChannelAttribute(void)
+{
+ status_t ret = NO_ERROR;
+ mm_camera_channel_attr_t ch_attr;
+ LOGD("%s: E", __func__);
+
+ memset(&ch_attr, 0, sizeof(mm_camera_channel_attr_t));
+ ch_attr.type = MM_CAMERA_CH_ATTR_BUFFERING_FRAME;
+ ch_attr.buffering_frame.look_back = mHalCamCtrl->getZSLBackLookCount();
+ ch_attr.buffering_frame.water_mark = mHalCamCtrl->getZSLQueueDepth();
+ ch_attr.buffering_frame.interval = mHalCamCtrl->getZSLBurstInterval( );
+ LOGE("%s: ZSL queue_depth = %d, back_look_count = %d", __func__,
+ ch_attr.buffering_frame.water_mark,
+ ch_attr.buffering_frame.look_back);
+ if( NO_ERROR !=
+ cam_ops_ch_set_attr(mCameraId, MM_CAMERA_CH_SNAPSHOT, &ch_attr)) {
+ LOGD("%s: Failure setting ZSL channel attribute.", __func__);
+ ret = FAILED_TRANSACTION;
+ goto end;
+ }
+end:
+ LOGD("%s: X", __func__);
+ return ret;
+}
+
+status_t QCameraStream_Snapshot::
+initSnapshotFormat(cam_ctrl_dimension_t *dim)
+{
+ status_t ret = NO_ERROR;
+ mm_camera_ch_image_fmt_parm_t fmt;
+
+ LOGD("%s: E", __func__);
+
+ /* For ZSL mode we'll need to set channel attribute */
+ if (isZSLMode()) {
+ ret = setZSLChannelAttribute();
+ if (ret != NO_ERROR) {
+ goto end;
+ }
+ }
+
+ memset(&fmt, 0, sizeof(mm_camera_ch_image_fmt_parm_t));
+ fmt.ch_type = MM_CAMERA_CH_SNAPSHOT;
+ fmt.snapshot.main.fmt = dim->main_img_format;
+ fmt.snapshot.main.dim.width = dim->picture_width;
+ fmt.snapshot.main.dim.height = dim->picture_height;
+
+ fmt.snapshot.thumbnail.fmt = dim->thumb_format;
+ fmt.snapshot.thumbnail.dim.width = dim->ui_thumbnail_width;
+ fmt.snapshot.thumbnail.dim.height = dim->ui_thumbnail_height;
+
+ LOGV("%s: Snapshot channel fmt = main: %d thumbnail: %d", __func__,
+ dim->main_img_format, dim->thumb_format);
+ LOGV("%s: Snapshot channel resolution = main: %dX%d thumbnail: %dX%d",
+ __func__, dim->picture_width, dim->picture_height,
+ dim->ui_thumbnail_width, dim->ui_thumbnail_height);
+
+ LOGD("%s: Set Snapshot channel image format", __func__);
+ ret = cam_config_set_parm(mCameraId, MM_CAMERA_PARM_CH_IMAGE_FMT, &fmt);
+ if (NO_ERROR != ret) {
+ LOGE("%s: Set Snapshot Channel format err=%d\n", __func__, ret);
+ ret = FAILED_TRANSACTION;
+ goto end;
+ }
+
+end:
+ if (ret != NO_ERROR) {
+ handleError();
+ }
+ LOGE("%s: X", __func__);
+ return ret;
+
+}
+
+void QCameraStream_Snapshot::
+deinitSnapshotChannel(mm_camera_channel_type_t ch_type)
+{
+ LOGD("%s: E", __func__);
+
+ /* unreg buf notify*/
+ if (getSnapshotState() >= SNAPSHOT_STATE_BUF_NOTIF_REGD){
+ if (NO_ERROR != cam_evt_register_buf_notify(mCameraId,
+ ch_type, NULL,(mm_camera_register_buf_cb_type_t)NULL,NULL, this)) {
+ LOGE("%s: Failure to unregister buf notification", __func__);
+ }
+ }
+
+ if (getSnapshotState() >= SNAPSHOT_STATE_CH_ACQUIRED) {
+ LOGD("%s: Release snapshot channel", __func__);
+ cam_ops_ch_release(mCameraId, ch_type);
+ }
+
+ LOGD("%s: X",__func__);
+}
+
+status_t QCameraStream_Snapshot::
+initRawSnapshotBuffers(cam_ctrl_dimension_t *dim, int num_of_buf)
+{
+ status_t ret = NO_ERROR;
+ struct msm_frame *frame;
+ uint32_t frame_len;
+ uint8_t num_planes;
+ uint32_t planes[VIDEO_MAX_PLANES];
+ mm_camera_reg_buf_t reg_buf;
+
+ LOGD("%s: E", __func__);
+ memset(®_buf, 0, sizeof(mm_camera_reg_buf_t));
+ memset(&mSnapshotStreamBuf, 0, sizeof(mSnapshotStreamBuf));
+
+ if ((num_of_buf == 0) || (num_of_buf > MM_CAMERA_MAX_NUM_FRAMES)) {
+ LOGE("%s: Invalid number of buffers (=%d) requested!", __func__, num_of_buf);
+ ret = BAD_VALUE;
+ goto end;
+ }
+
+ reg_buf.def.buf.mp = new mm_camera_mp_buf_t[num_of_buf];
+ if (!reg_buf.def.buf.mp) {
+ LOGE("%s Error allocating memory for mplanar struct ", __func__);
+ ret = NO_MEMORY;
+ goto end;
+ }
+ memset(reg_buf.def.buf.mp, 0, num_of_buf * sizeof(mm_camera_mp_buf_t));
+
+ /* Get a frame len for buffer to be allocated*/
+ frame_len = mm_camera_get_msm_frame_len(CAMERA_BAYER_SBGGR10,
+ myMode,
+ dim->raw_picture_width,
+ dim->raw_picture_height,
+ OUTPUT_TYPE_S,
+ &num_planes, planes);
+
+ if (mHalCamCtrl->initHeapMem(&mHalCamCtrl->mRawMemory, num_of_buf,
+ frame_len, 0, planes[0], MSM_PMEM_RAW_MAINIMG,
+ &mSnapshotStreamBuf, ®_buf.def,
+ num_planes, planes) < 0) {
+ ret = NO_MEMORY;
+ goto end;
+ }
+
+ /* register the streaming buffers for the channel*/
+ reg_buf.ch_type = MM_CAMERA_CH_RAW;
+ reg_buf.def.num = mSnapshotStreamBuf.num;
+
+ ret = cam_config_prepare_buf(mCameraId, ®_buf);
+ if(ret != NO_ERROR) {
+ LOGV("%s:reg snapshot buf err=%d\n", __func__, ret);
+ ret = FAILED_TRANSACTION;
+ mHalCamCtrl->releaseHeapMem(&mHalCamCtrl->mRawMemory);
+ goto end;
+ }
+
+ /* If we have reached here successfully, we have allocated buffer.
+ Set state machine.*/
+ setSnapshotState(SNAPSHOT_STATE_BUF_INITIALIZED);
+
+end:
+ /* If it's error, we'll need to do some needful */
+ if (ret != NO_ERROR) {
+ handleError();
+ }
+ if (reg_buf.def.buf.mp)
+ delete []reg_buf.def.buf.mp;
+ LOGD("%s: X", __func__);
+ return ret;
+}
+
+status_t QCameraStream_Snapshot::deinitRawSnapshotBuffers(void)
+{
+ int ret = NO_ERROR;
+
+ LOGD("%s: E", __func__);
+
+ /* deinit buffers only if we have already allocated */
+ if (getSnapshotState() >= SNAPSHOT_STATE_BUF_INITIALIZED ){
+
+ LOGD("%s: Unpreparing Snapshot Buffer", __func__);
+ ret = cam_config_unprepare_buf(mCameraId, MM_CAMERA_CH_RAW);
+ if(ret != NO_ERROR) {
+ LOGE("%s:Unreg Raw snapshot buf err=%d\n", __func__, ret);
+ ret = FAILED_TRANSACTION;
+ goto end;
+ }
+ mHalCamCtrl->releaseHeapMem(&mHalCamCtrl->mRawMemory);
+ }
+
+end:
+ LOGD("%s: X", __func__);
+ return ret;
+}
+
+status_t QCameraStream_Snapshot::
+initSnapshotBuffers(cam_ctrl_dimension_t *dim, int num_of_buf)
+{
+ status_t ret = NO_ERROR;
+ struct msm_frame *frame;
+ uint32_t frame_len, y_off, cbcr_off;
+ uint8_t num_planes;
+ uint32_t planes[VIDEO_MAX_PLANES];
+ mm_camera_reg_buf_t reg_buf;
+ int rotation = 0;
+
+ LOGD("%s: E", __func__);
+ memset(®_buf, 0, sizeof(mm_camera_reg_buf_t));
+ memset(&mSnapshotStreamBuf, 0, sizeof(mSnapshotStreamBuf));
+
+ if ((num_of_buf == 0) || (num_of_buf > MM_CAMERA_MAX_NUM_FRAMES)) {
+ LOGE("%s: Invalid number of buffers (=%d) requested!",
+ __func__, num_of_buf);
+ ret = BAD_VALUE;
+ goto end;
+ }
+
+ LOGD("%s: Mode: %d Num_of_buf: %d ImageSizes: main: %dx%d thumb: %dx%d",
+ __func__, myMode, num_of_buf,
+ dim->picture_width, dim->picture_height,
+ dim->ui_thumbnail_width, dim->ui_thumbnail_height);
+
+ reg_buf.snapshot.main.buf.mp = new mm_camera_mp_buf_t[num_of_buf];
+ if (!reg_buf.snapshot.main.buf.mp) {
+ LOGE("%s Error allocating memory for mplanar struct ", __func__);
+ ret = NO_MEMORY;
+ goto end;
+ }
+ memset(reg_buf.snapshot.main.buf.mp, 0,
+ num_of_buf * sizeof(mm_camera_mp_buf_t));
+ if (!isFullSizeLiveshot()) {
+ reg_buf.snapshot.thumbnail.buf.mp = new mm_camera_mp_buf_t[num_of_buf];
+ if (!reg_buf.snapshot.thumbnail.buf.mp) {
+ LOGE("%s Error allocating memory for mplanar struct ", __func__);
+ ret = NO_MEMORY;
+ goto end;
+ }
+ memset(reg_buf.snapshot.thumbnail.buf.mp, 0,
+ num_of_buf * sizeof(mm_camera_mp_buf_t));
+ }
+ /* Number of buffers to be set*/
+ /* Set the JPEG Rotation here since get_buffer_offset needs
+ * the value of rotation.*/
+ mHalCamCtrl->setJpegRotation(isZSLMode());
+ if(!isZSLMode())
+ rotation = mHalCamCtrl->getJpegRotation();
+ else
+ rotation = 0;
+ if(rotation != dim->rotation) {
+ dim->rotation = rotation;
+ ret = cam_config_set_parm(mHalCamCtrl->mCameraId, MM_CAMERA_PARM_DIMENSION, dim);
+ }
+
+ if(isLiveSnapshot()) {
+ ret = cam_config_set_parm(mHalCamCtrl->mCameraId, MM_CAMERA_PARM_DIMENSION, dim);
+ }
+ num_planes = 2;
+ planes[0] = dim->picture_frame_offset.mp[0].len;
+ planes[1] = dim->picture_frame_offset.mp[1].len;
+ frame_len = dim->picture_frame_offset.frame_len;
+ y_off = dim->picture_frame_offset.mp[0].offset;
+ cbcr_off = dim->picture_frame_offset.mp[1].offset;
+ LOGE("%s: main image: rotation = %d, yoff = %d, cbcroff = %d, size = %d, width = %d, height = %d",
+ __func__, dim->rotation, y_off, cbcr_off, frame_len, dim->picture_width, dim->picture_height);
+ if (mHalCamCtrl->initHeapMem (&mHalCamCtrl->mJpegMemory, 1, frame_len, 0, cbcr_off,
+ MSM_PMEM_MAX, NULL, NULL, num_planes, planes) < 0) {
+ LOGE("%s: Error allocating JPEG memory", __func__);
+ ret = NO_MEMORY;
+ goto end;
+ }
+ if(!isLiveSnapshot()) {
+ if (mHalCamCtrl->initHeapMem(&mHalCamCtrl->mSnapshotMemory, num_of_buf,
+ frame_len, y_off, cbcr_off, MSM_PMEM_MAINIMG, &mSnapshotStreamBuf,
+ ®_buf.snapshot.main, num_planes, planes) < 0) {
+ ret = NO_MEMORY;
+ mHalCamCtrl->releaseHeapMem(&mHalCamCtrl->mJpegMemory);
+ goto end;
+ };
+ num_planes = 2;
+ planes[0] = dim->thumb_frame_offset.mp[0].len;
+ planes[1] = dim->thumb_frame_offset.mp[1].len;
+ frame_len = planes[0] + planes[1];
+ if (!isFullSizeLiveshot()) {
+ y_off = dim->thumb_frame_offset.mp[0].offset;
+ cbcr_off = dim->thumb_frame_offset.mp[1].offset;
+ LOGE("%s: thumbnail: rotation = %d, yoff = %d, cbcroff = %d, size = %d, width = %d, height = %d",
+ __func__, dim->rotation, y_off, cbcr_off, frame_len,
+ dim->thumbnail_width, dim->thumbnail_height);
+
+ if (mHalCamCtrl->initHeapMem(&mHalCamCtrl->mThumbnailMemory, num_of_buf,
+ frame_len, y_off, cbcr_off, MSM_PMEM_THUMBNAIL, &mPostviewStreamBuf,
+ ®_buf.snapshot.thumbnail, num_planes, planes) < 0) {
+ ret = NO_MEMORY;
+ mHalCamCtrl->releaseHeapMem(&mHalCamCtrl->mSnapshotMemory);
+ mHalCamCtrl->releaseHeapMem(&mHalCamCtrl->mJpegMemory);
+ goto end;
+ }
+ }
+ /* register the streaming buffers for the channel*/
+ reg_buf.ch_type = MM_CAMERA_CH_SNAPSHOT;
+ reg_buf.snapshot.main.num = mSnapshotStreamBuf.num;
+
+ if (!isFullSizeLiveshot())
+ reg_buf.snapshot.thumbnail.num = mPostviewStreamBuf.num;
+ else
+ reg_buf.snapshot.thumbnail.num = 0;
+
+ ret = cam_config_prepare_buf(mCameraId, ®_buf);
+ if(ret != NO_ERROR) {
+ LOGV("%s:reg snapshot buf err=%d\n", __func__, ret);
+ ret = FAILED_TRANSACTION;
+ if (!isFullSizeLiveshot()){
+ mHalCamCtrl->releaseHeapMem(&mHalCamCtrl->mThumbnailMemory);
+ }
+ mHalCamCtrl->releaseHeapMem(&mHalCamCtrl->mSnapshotMemory);
+ mHalCamCtrl->releaseHeapMem(&mHalCamCtrl->mJpegMemory);
+ goto end;
+ }
+ }
+
+ /* If we have reached here successfully, we have allocated buffer.
+ Set state machine.*/
+ setSnapshotState(SNAPSHOT_STATE_BUF_INITIALIZED);
+end:
+ if (ret != NO_ERROR) {
+ handleError();
+ }
+ if (reg_buf.snapshot.main.buf.mp)
+ delete []reg_buf.snapshot.main.buf.mp;
+ if (reg_buf.snapshot.thumbnail.buf.mp)
+ delete []reg_buf.snapshot.thumbnail.buf.mp;
+ LOGD("%s: X", __func__);
+ return ret;
+}
+
+status_t QCameraStream_Snapshot::
+deinitSnapshotBuffers(void)
+{
+ int ret = NO_ERROR;
+
+ LOGD("%s: E", __func__);
+
+ /* Deinit only if we have already initialized*/
+ if (getSnapshotState() >= SNAPSHOT_STATE_BUF_INITIALIZED ){
+
+ if(!isLiveSnapshot()) {
+ LOGD("%s: Unpreparing Snapshot Buffer", __func__);
+ ret = cam_config_unprepare_buf(mCameraId, MM_CAMERA_CH_SNAPSHOT);
+ if(ret != NO_ERROR) {
+ LOGE("%s:unreg snapshot buf err=%d\n", __func__, ret);
+ ret = FAILED_TRANSACTION;
+ goto end;
+ }
+ }
+
+ /* Clear main and thumbnail heap*/
+ if(!isLiveSnapshot()) {
+ mHalCamCtrl->releaseHeapMem(&mHalCamCtrl->mSnapshotMemory);
+ if (!isFullSizeLiveshot())
+ mHalCamCtrl->releaseHeapMem(&mHalCamCtrl->mThumbnailMemory);
+ }
+ mHalCamCtrl->releaseHeapMem(&mHalCamCtrl->mJpegMemory);
+ }
+end:
+ LOGD("%s: X", __func__);
+ return ret;
+}
+
+void QCameraStream_Snapshot::deInitBuffer(void)
+{
+ mm_camera_channel_type_t ch_type;
+
+ LOGI("%s: E", __func__);
+
+ if( getSnapshotState() == SNAPSHOT_STATE_UNINIT) {
+ LOGD("%s: Already deinit'd!", __func__);
+ return;
+ }
+
+ if (mSnapshotFormat == PICTURE_FORMAT_RAW) {
+ /* deinit buffer */
+ deinitRawSnapshotBuffers();
+ }
+ else
+ {
+ if (!isZSLMode() &&
+ ((mHalCamCtrl->getHDRMode() == HDR_MODE) || (mHalCamCtrl->isWDenoiseEnabled()))) {
+ /*register main and thumbnail buffers at back-end for frameproc*/
+ for (int i = 0; i < mHalCamCtrl->mSnapshotMemory.buffer_count; i++) {
+ if (NO_ERROR != mHalCamCtrl->sendUnMappingBuf(MSM_V4L2_EXT_CAPTURE_MODE_MAIN, i, mCameraId,
+ CAM_SOCK_MSG_TYPE_FD_UNMAPPING)) {
+ LOGE("%s: sending unmapping data Msg Failed", __func__);
+ }
+ if (NO_ERROR != mHalCamCtrl->sendUnMappingBuf(MSM_V4L2_EXT_CAPTURE_MODE_THUMBNAIL, i, mCameraId,
+ CAM_SOCK_MSG_TYPE_FD_UNMAPPING)) {
+ LOGE("%s: sending unmapping data Msg Failed", __func__);
+ }
+ }
+ }
+
+ deinitSnapshotBuffers();
+ }
+
+
+ /* deinit jpeg buffer if allocated */
+ if(mJpegHeap != NULL) mJpegHeap.clear();
+ mJpegHeap = NULL;
+
+ /* memset some global structure */
+ memset(&mSnapshotStreamBuf, 0, sizeof(mSnapshotStreamBuf));
+ memset(&mPostviewStreamBuf, 0, sizeof(mPostviewStreamBuf));
+ mSnapshotQueue.flush();
+ mWDNQueue.flush();
+
+ setSnapshotState(SNAPSHOT_STATE_UNINIT);
+
+ LOGD("%s: X", __func__);
+}
+
+/*Temp: to be removed once event handling is enabled in mm-camera.
+ We need an event - one event for
+ stream-off to disable OPS_SNAPSHOT*/
+void QCameraStream_Snapshot::runSnapshotThread(void *data)
+{
+ LOGD("%s: E", __func__);
+
+ if (mSnapshotFormat == PICTURE_FORMAT_RAW) {
+ /* TBD: Temp: Needs to be removed once event handling is enabled.
+ We cannot call mm-camera interface to stop snapshot from callback
+ function as it causes deadlock. Hence handling it here temporarily
+ in this thread. Later mm-camera intf will give us event in separate
+ thread context */
+ mm_app_snapshot_wait();
+ /* Send command to stop snapshot polling thread*/
+ stop();
+ }
+ LOGD("%s: X", __func__);
+}
+
+/*Temp: to be removed once event handling is enabled in mm-camera*/
+static void *snapshot_thread(void *obj)
+{
+ QCameraStream_Snapshot *pme = (QCameraStream_Snapshot *)obj;
+ LOGD("%s: E", __func__);
+ if (pme != 0) {
+ pme->runSnapshotThread(obj);
+ }
+ else LOGW("not starting snapshot thread: the object went away!");
+ LOGD("%s: X", __func__);
+ return NULL;
+}
+
+/*Temp: to be removed later*/
+static pthread_t mSnapshotThread;
+
+status_t QCameraStream_Snapshot::initJPEGSnapshot(int num_of_snapshots)
+{
+ status_t ret = NO_ERROR;
+ cam_ctrl_dimension_t dim;
+ mm_camera_op_mode_type_t op_mode;
+
+ LOGV("%s: E", __func__);
+
+ if (isFullSizeLiveshot())
+ goto end;
+
+ LOGD("%s: Get current dimension", __func__);
+ /* Query mm_camera to get current dimension */
+ memset(&dim, 0, sizeof(cam_ctrl_dimension_t));
+ ret = cam_config_get_parm(mCameraId,
+ MM_CAMERA_PARM_DIMENSION, &dim);
+ if (NO_ERROR != ret) {
+ LOGE("%s: error - can't get preview dimension!", __func__);
+ ret = FAILED_TRANSACTION;
+ goto end;
+ }
+
+ /* Set camera op mode to MM_CAMERA_OP_MODE_CAPTURE */
+ LOGD("Setting OP_MODE_CAPTURE");
+ op_mode = MM_CAMERA_OP_MODE_CAPTURE;
+ if( NO_ERROR != cam_config_set_parm(mCameraId,
+ MM_CAMERA_PARM_OP_MODE, &op_mode)) {
+ LOGE("%s: MM_CAMERA_OP_MODE_CAPTURE failed", __func__);
+ ret = FAILED_TRANSACTION;
+ goto end;
+ }
+
+ /* config the parmeters and see if we need to re-init the stream*/
+ LOGI("%s: Configure Snapshot Dimension", __func__);
+ ret = configSnapshotDimension(&dim);
+ if (ret != NO_ERROR) {
+ LOGE("%s: Setting snapshot dimension failed", __func__);
+ goto end;
+ }
+
+ /* Initialize stream - set format, acquire channel */
+ ret = initSnapshotFormat(&dim);
+ if (NO_ERROR != ret) {
+ LOGE("%s: error - can't init nonZSL stream!", __func__);
+ goto end;
+ }
+
+ ret = initSnapshotBuffers(&dim, num_of_snapshots);
+ if ( NO_ERROR != ret ){
+ LOGE("%s: Failure allocating memory for Snapshot buffers", __func__);
+ goto end;
+ }
+
+ if (!isZSLMode() &&
+ ((mHalCamCtrl->getHDRMode() == HDR_MODE) || (mHalCamCtrl->isWDenoiseEnabled()))) {
+ /*register main and thumbnail buffers at back-end for frameproc*/
+ for (int i = 0; i < num_of_snapshots; i++) {
+ if (NO_ERROR != mHalCamCtrl->sendMappingBuf(MSM_V4L2_EXT_CAPTURE_MODE_MAIN, i,
+ mSnapshotStreamBuf.frame[i].fd, mHalCamCtrl->mSnapshotMemory.size, mCameraId,
+ CAM_SOCK_MSG_TYPE_FD_MAPPING)) {
+ LOGE("%s: sending mapping data Msg Failed", __func__);
+ }
+ if (NO_ERROR != mHalCamCtrl->sendMappingBuf(MSM_V4L2_EXT_CAPTURE_MODE_THUMBNAIL, i,
+ mPostviewStreamBuf.frame[i].fd, mHalCamCtrl->mThumbnailMemory.size, mCameraId,
+ CAM_SOCK_MSG_TYPE_FD_MAPPING)) {
+ LOGE("%s: sending mapping data Msg Failed", __func__);
+ }
+ }
+ }
+
+end:
+ /* Based on what state we are in, we'll need to handle error -
+ like deallocating memory if we have already allocated */
+ if (ret != NO_ERROR) {
+ handleError();
+ }
+ LOGV("%s: X", __func__);
+ return ret;
+
+}
+
+status_t QCameraStream_Snapshot::initRawSnapshot(int num_of_snapshots)
+{
+ status_t ret = NO_ERROR;
+ cam_ctrl_dimension_t dim;
+ bool initSnapshot = false;
+ mm_camera_op_mode_type_t op_mode;
+
+ LOGV("%s: E", __func__);
+
+ /* Set camera op mode to MM_CAMERA_OP_MODE_CAPTURE */
+ LOGD("%s: Setting OP_MODE_CAPTURE", __func__);
+ op_mode = MM_CAMERA_OP_MODE_CAPTURE;
+ if( NO_ERROR != cam_config_set_parm(mCameraId,
+ MM_CAMERA_PARM_OP_MODE, &op_mode)) {
+ LOGE("%s: MM_CAMERA_OP_MODE_CAPTURE failed", __func__);
+ ret = FAILED_TRANSACTION;
+ goto end;
+ }
+
+ /* For raw snapshot, we do not know the dimension as it
+ depends on sensor to sensor. We call getDimension which will
+ give us raw width and height */
+ memset(&dim, 0, sizeof(cam_ctrl_dimension_t));
+ ret = cam_config_get_parm(mCameraId, MM_CAMERA_PARM_DIMENSION, &dim);
+ if (MM_CAMERA_OK != ret) {
+ LOGE("%s: error - can't get dimension!", __func__);
+ LOGE("%s: X", __func__);
+ goto end;
+ }
+ LOGD("%s: Raw Snapshot dimension: %dx%d", __func__,
+ dim.raw_picture_width,
+ dim.raw_picture_height);
+
+
+ ret = initRawSnapshotChannel(&dim, num_of_snapshots);
+ if (NO_ERROR != ret) {
+ LOGE("%s: error - can't init nonZSL stream!", __func__);
+ goto end;
+ }
+
+ ret = initRawSnapshotBuffers(&dim, num_of_snapshots);
+ if ( NO_ERROR != ret ){
+ LOGE("%s: Failure allocating memory for Raw Snapshot buffers",
+ __func__);
+ goto end;
+ }
+ setSnapshotState(SNAPSHOT_STATE_INITIALIZED);
+
+end:
+ if (ret != NO_ERROR) {
+ handleError();
+ }
+ LOGV("%s: X", __func__);
+ return ret;
+}
+
+status_t QCameraStream_Snapshot::initFullLiveshot(void)
+{
+ status_t ret = NO_ERROR;
+ cam_ctrl_dimension_t dim;
+ bool matching = true;
+
+ memset(&dim, 0, sizeof(cam_ctrl_dimension_t));
+ ret = cam_config_get_parm(mCameraId, MM_CAMERA_PARM_DIMENSION, &dim);
+ if (MM_CAMERA_OK != ret) {
+ LOGE("%s: error - can't get dimension!", __func__);
+ return ret;
+ }
+#if 1
+ /* First check if the picture resolution is the same, if not, change it*/
+ mHalCamCtrl->getPictureSize(&mPictureWidth, &mPictureHeight);
+ LOGD("%s: Picture size received: %d x %d", __func__,
+ mPictureWidth, mPictureHeight);
+
+ //Use main image as input to encoder to generate thumbnail
+ mThumbnailWidth = dim.picture_width;
+ mThumbnailHeight = dim.picture_height;
+ matching = (mPictureWidth == dim.picture_width) &&
+ (mPictureHeight == dim.picture_height);
+
+ //Actual thumbnail size requested
+ mPostviewWidth = mHalCamCtrl->mParameters.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_WIDTH);
+ mPostviewHeight = mHalCamCtrl->mParameters.getInt(CameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT);
+
+ mDropThumbnail = false;
+ if (mPostviewWidth == 0 && mPostviewHeight == 0) {
+ mPostviewWidth = THUMBNAIL_DEFAULT_WIDTH;
+ mPostviewHeight = THUMBNAIL_DEFAULT_HEIGHT;
+ mDropThumbnail = true;
+ }
+
+ if (!matching) {
+ dim.picture_width = mPictureWidth;
+ dim.picture_height = mPictureHeight;
+ dim.ui_thumbnail_height = mThumbnailHeight;
+ dim.ui_thumbnail_width = mThumbnailWidth;
+ }
+ LOGD("%s: Picture size to set: %d x %d", __func__,
+ dim.picture_width, dim.picture_height);
+ ret = cam_config_set_parm(mCameraId, MM_CAMERA_PARM_DIMENSION,&dim);
+#endif
+ /* Initialize stream - set format, acquire channel */
+ ret = initSnapshotFormat(&dim);
+ if (NO_ERROR != ret) {
+ LOGE("%s: error - can't init nonZSL stream!", __func__);
+ return ret;
+ }
+ ret = initSnapshotBuffers(&dim, 1);
+ if ( NO_ERROR != ret ){
+ LOGE("%s: Failure allocating memory for Snapshot buffers", __func__);
+ return ret;
+ }
+
+ return ret;
+}
+
+status_t QCameraStream_Snapshot::initZSLSnapshot(void)
+{
+ status_t ret = NO_ERROR;
+ cam_ctrl_dimension_t dim;
+ mm_camera_op_mode_type_t op_mode;
+
+ LOGV("%s: E", __func__);
+
+ LOGD("%s: Get current dimension", __func__);
+ /* Query mm_camera to get current dimension */
+ memset(&dim, 0, sizeof(cam_ctrl_dimension_t));
+ ret = cam_config_get_parm(mCameraId,
+ MM_CAMERA_PARM_DIMENSION, &dim);
+ if (NO_ERROR != ret) {
+ LOGE("%s: error - can't get preview dimension!", __func__);
+ ret = FAILED_TRANSACTION;
+ goto end;
+ }
+
+ /* config the parmeters and see if we need to re-init the stream*/
+ LOGD("%s: Configure Snapshot Dimension", __func__);
+ ret = configSnapshotDimension(&dim);
+ if (ret != NO_ERROR) {
+ LOGE("%s: Setting snapshot dimension failed", __func__);
+ goto end;
+ }
+
+ /* Initialize stream - set format, acquire channel */
+ ret = initSnapshotFormat(&dim);
+ if (NO_ERROR != ret) {
+ LOGE("%s: error - can't init nonZSL stream!", __func__);
+ goto end;
+ }
+
+ /* For ZSL we'll have to allocate buffers for internal queue
+ maintained by mm-camera lib plus around 3 buffers used for
+ data handling by lower layer.*/
+
+ ret = initSnapshotBuffers(&dim, mHalCamCtrl->getZSLQueueDepth() + 3);
+ if ( NO_ERROR != ret ){
+ LOGE("%s: Failure allocating memory for Snapshot buffers", __func__);
+ goto end;
+ }
+
+end:
+ /* Based on what state we are in, we'll need to handle error -
+ like deallocating memory if we have already allocated */
+ if (ret != NO_ERROR) {
+ handleError();
+ }
+ LOGV("%s: X", __func__);
+ return ret;
+
+}
+
+status_t QCameraStream_Snapshot::
+takePictureJPEG(void)
+{
+ status_t ret = NO_ERROR;
+
+ LOGD("%s: E", __func__);
+
+ /* Take snapshot */
+ LOGD("%s: Call MM_CAMERA_OPS_SNAPSHOT", __func__);
+ if (NO_ERROR != cam_ops_action(mCameraId,
+ TRUE,
+ MM_CAMERA_OPS_SNAPSHOT,
+ this)) {
+ LOGE("%s: Failure taking snapshot", __func__);
+ ret = FAILED_TRANSACTION;
+ goto end;
+ }
+
+ /* TBD: Temp: to be removed once event callback
+ is implemented in mm-camera lib */
+ pthread_attr_t attr;
+ pthread_attr_init(&attr);
+ pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_DETACHED);
+ pthread_create(&mSnapshotThread,&attr,
+ snapshot_thread, (void *)this);
+
+end:
+ if (ret != NO_ERROR) {
+ handleError();
+ }
+
+ LOGD("%s: X", __func__);
+ return ret;
+
+}
+
+status_t QCameraStream_Snapshot::
+takePictureRaw(void)
+{
+ status_t ret = NO_ERROR;
+
+ LOGD("%s: E", __func__);
+
+ /* Take snapshot */
+ LOGD("%s: Call MM_CAMERA_OPS_SNAPSHOT", __func__);
+ if (NO_ERROR != cam_ops_action(mCameraId,
+ TRUE,
+ MM_CAMERA_OPS_RAW,
+ this)) {
+ LOGE("%s: Failure taking snapshot", __func__);
+ ret = FAILED_TRANSACTION;
+ goto end;
+ }
+
+ /* TBD: Temp: to be removed once event callback
+ is implemented in mm-camera lib */
+ /* Wait for snapshot frame callback to return*/
+ pthread_attr_t attr;
+ pthread_attr_init(&attr);
+ pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_DETACHED);
+ pthread_create(&mSnapshotThread,&attr,
+ snapshot_thread, (void *)this);
+
+end:
+ if (ret != NO_ERROR) {
+ handleError();
+ }
+ LOGD("%s: X", __func__);
+ return ret;
+
+}
+
+/* This is called from vide stream object */
+status_t QCameraStream_Snapshot::
+takePictureLiveshot(mm_camera_ch_data_buf_t* recvd_frame,
+ cam_ctrl_dimension_t *dim,
+ int frame_len)
+{
+ status_t ret = NO_ERROR;
+ common_crop_t crop_info;
+ //common_crop_t crop;
+ uint32_t aspect_ratio;
+ camera_notify_callback notifyCb;
+ camera_data_callback dataCb;
+
+ LOGI("%s: E", __func__);
+
+ /* set flag to indicate we are doing livesnapshot */
+ resetSnapshotCounters( );
+ setModeLiveSnapshot(true);
+
+ if(!mHalCamCtrl->mShutterSoundPlayed) {
+ notifyShutter(&crop_info, TRUE);
+ }
+ notifyShutter(&crop_info, FALSE);
+ mHalCamCtrl->mShutterSoundPlayed = FALSE;
+
+ // send upperlayer callback for raw image (data or notify, not both)
+ if((mHalCamCtrl->mDataCb) && (mHalCamCtrl->mMsgEnabled & CAMERA_MSG_RAW_IMAGE)){
+ dataCb = mHalCamCtrl->mDataCb;
+ } else {
+ dataCb = NULL;
+ }
+ if((mHalCamCtrl->mNotifyCb) && (mHalCamCtrl->mMsgEnabled & CAMERA_MSG_RAW_IMAGE_NOTIFY)){
+ notifyCb = mHalCamCtrl->mNotifyCb;
+ } else {
+ notifyCb = NULL;
+ }
+
+ LOGI("%s:Passed picture size: %d X %d", __func__,
+ dim->picture_width, dim->picture_height);
+ LOGI("%s:Passed thumbnail size: %d X %d", __func__,
+ dim->ui_thumbnail_width, dim->ui_thumbnail_height);
+
+ mPictureWidth = dim->picture_width;
+ mPictureHeight = dim->picture_height;
+ mThumbnailWidth = dim->ui_thumbnail_width;
+ mThumbnailHeight = dim->ui_thumbnail_height;
+ mPictureFormat = dim->main_img_format;
+ mThumbnailFormat = dim->thumb_format;
+
+ memset(&crop_info, 0, sizeof(common_crop_t));
+ crop_info.in1_w = mPictureWidth;
+ crop_info.in1_h = mPictureHeight;
+ /* For low power live snapshot the thumbnail output size is set to default size.
+ In case of live snapshot video buffer = thumbnail buffer. For higher resolutions
+ the thumnail will be dropped if its more than 64KB. To avoid thumbnail drop
+ set thumbnail as configured by application. This will be a size lower than video size*/
+ mDropThumbnail = false;
+ if(mHalCamCtrl->thumbnailWidth == 0 && mHalCamCtrl->thumbnailHeight == 0) {
+ LOGE("Live Snapshot thumbnail will be dropped as indicated by application");
+ mDropThumbnail = true;
+ }
+ crop_info.out1_w = mHalCamCtrl->thumbnailWidth;
+ crop_info.out1_h = mHalCamCtrl->thumbnailHeight;
+ ret = encodeData(recvd_frame, &crop_info, frame_len, 0);
+ if (ret != NO_ERROR) {
+ LOGE("%s: Failure configuring JPEG encoder", __func__);
+
+ /* Failure encoding this frame. Just notify upper layer
+ about it.*/
+ if(mHalCamCtrl->mDataCb &&
+ (mHalCamCtrl->mMsgEnabled & MEDIA_RECORDER_MSG_COMPRESSED_IMAGE)) {
+ /* get picture failed. Give jpeg callback with NULL data
+ * to the application to restore to preview mode
+ */
+ }
+ setModeLiveSnapshot(false);
+ goto end;
+ }
+
+ if (dataCb) {
+ dataCb(CAMERA_MSG_RAW_IMAGE, mHalCamCtrl->mSnapshotMemory.camera_memory[0],
+ 1, NULL, mHalCamCtrl->mCallbackCookie);
+ }
+ if (notifyCb) {
+ notifyCb(CAMERA_MSG_RAW_IMAGE_NOTIFY, 0, 0, mHalCamCtrl->mCallbackCookie);
+ }
+
+end:
+ LOGI("%s: X", __func__);
+ return ret;
+}
+
+status_t QCameraStream_Snapshot::
+takePictureZSL(void)
+{
+ status_t ret = NO_ERROR;
+ mm_camera_ops_parm_get_buffered_frame_t param;
+
+ LOGE("%s: E", __func__);
+
+ memset(¶m, 0, sizeof(param));
+ param.ch_type = MM_CAMERA_CH_SNAPSHOT;
+
+ /* Take snapshot */
+ LOGE("%s: Call MM_CAMERA_OPS_GET_BUFFERED_FRAME", __func__);
+
+ mNumOfSnapshot = mHalCamCtrl->getNumOfSnapshots();
+ if (NO_ERROR != cam_ops_action(mCameraId,
+ TRUE,
+ MM_CAMERA_OPS_GET_BUFFERED_FRAME,
+ ¶m)) {
+ LOGE("%s: Failure getting zsl frame(s)", __func__);
+ ret = FAILED_TRANSACTION;
+ goto end;
+ }
+
+ /* TBD: Temp: to be removed once event callback
+ is implemented in mm-camera lib */
+/* pthread_attr_t attr;
+ pthread_attr_init(&attr);
+ pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_DETACHED);
+ pthread_create(&mSnapshotThread,&attr,
+ snapshot_thread, (void *)this);
+*/
+end:
+ LOGD("%s: X", __func__);
+ return ret;
+}
+
+status_t QCameraStream_Snapshot::
+startStreamZSL(void)
+{
+ status_t ret = NO_ERROR;
+
+ LOGD("%s: E", __func__);
+
+ /* Start ZSL - it'll start queuing the frames */
+ LOGD("%s: Call MM_CAMERA_OPS_ZSL", __func__);
+ if (NO_ERROR != cam_ops_action(mCameraId,
+ TRUE,
+ MM_CAMERA_OPS_ZSL,
+ this)) {
+ LOGE("%s: Failure starting ZSL stream", __func__);
+ ret = FAILED_TRANSACTION;
+ goto end;
+ }
+
+end:
+ LOGD("%s: X", __func__);
+ return ret;
+
+}
+
+status_t QCameraStream_Snapshot::
+encodeData(mm_camera_ch_data_buf_t* recvd_frame,
+ common_crop_t *crop_info,
+ int frame_len,
+ bool enqueued)
+{
+ status_t ret = NO_ERROR;
+ cam_ctrl_dimension_t dimension;
+ struct msm_frame *postviewframe;
+ struct msm_frame *mainframe;
+ common_crop_t crop;
+ cam_point_t main_crop_offset;
+ cam_point_t thumb_crop_offset;
+ int width, height;
+ uint8_t *thumbnail_buf;
+ uint32_t thumbnail_fd;
+
+ omx_jpeg_encode_params encode_params;
+
+ /* If it's the only frame, we directly pass to encoder.
+ If not, we'll queue it and check during next jpeg .
+ Also, if the queue isn't empty then we need to queue this
+ one too till its turn comes (only if it's not already
+ queued up there)*/
+ LOGD("%s: getSnapshotState()=%d, enqueued =%d, Q empty=%d", __func__, getSnapshotState(), enqueued, mSnapshotQueue.isEmpty());
+ LOGD("%s: mNumOfRecievedJPEG=%d, mNumOfSnapshot =%d", __func__, mNumOfRecievedJPEG, mNumOfSnapshot);
+ if((getSnapshotState() == SNAPSHOT_STATE_JPEG_ENCODING) ||
+ (!mSnapshotQueue.isEmpty() && !enqueued)){ /*busy and new buffer*/
+ /* encoding is going on. Just queue the frame for now.*/
+ LOGD("%s: JPEG encoding in progress."
+ "Enqueuing frame id(%d) for later processing.", __func__,
+ recvd_frame->snapshot.main.idx);
+ mSnapshotQueue.enqueue((void *)recvd_frame);
+ } else if (enqueued ||
+ (mNumOfRecievedJPEG != mNumOfSnapshot && mNumOfRecievedJPEG != 0)) { /*not busy, not first*/
+ LOGD("%s: JPG not busy, not first frame.", __func__);
+
+ // For full-size live shot, use mainimage to generate thumbnail
+ if (isFullSizeLiveshot()) {
+ postviewframe = recvd_frame->snapshot.main.frame;
+ } else {
+ postviewframe = recvd_frame->snapshot.thumbnail.frame;
+ }
+ mainframe = recvd_frame->snapshot.main.frame;
+ cam_config_get_parm(mHalCamCtrl->mCameraId, MM_CAMERA_PARM_DIMENSION, &dimension);
+ LOGD("%s: main_fmt =%d, tb_fmt =%d", __func__, dimension.main_img_format, dimension.thumb_format);
+ /*since this is the continue job, we only care about the input buffer*/
+ encode_params.thumbnail_buf = (uint8_t *)postviewframe->buffer;
+ encode_params.thumbnail_fd = postviewframe->fd;
+ encode_params.snapshot_buf = (uint8_t *)mainframe->buffer;
+ encode_params.snapshot_fd = mainframe->fd;
+ encode_params.dimension = &dimension;
+ /*update exif parameters in HAL*/
+ mHalCamCtrl->setExifTags();
+
+ encode_params.exif_data = mHalCamCtrl->getExifData();
+ encode_params.exif_numEntries = mHalCamCtrl->getExifTableNumEntries();
+ if (!omxJpegEncodeNext(&encode_params)){
+ LOGE("%s: Failure! JPEG encoder returned error.", __func__);
+ ret = FAILED_TRANSACTION;
+ goto end;
+ }
+ /* Save the pointer to the frame sent for encoding. we'll need it to
+ tell kernel that we are done with the frame.*/
+ mCurrentFrameEncoded = recvd_frame;
+ setSnapshotState(SNAPSHOT_STATE_JPEG_ENCODING);
+ } else { /*not busy and new buffer (first job)*/
+
+ LOGD("%s: JPG Idle and first frame.", __func__);
+
+ // For full-size live shot, use mainimage to generate thumbnail
+ if (isFullSizeLiveshot()){
+ postviewframe = recvd_frame->snapshot.main.frame;
+ } else {
+ postviewframe = recvd_frame->snapshot.thumbnail.frame;
+ }
+ mainframe = recvd_frame->snapshot.main.frame;
+ cam_config_get_parm(mHalCamCtrl->mCameraId, MM_CAMERA_PARM_DIMENSION, &dimension);
+ LOGD("%s: main_fmt =%d, tb_fmt =%d", __func__, dimension.main_img_format, dimension.thumb_format);
+
+ dimension.orig_picture_dx = mPictureWidth;
+ dimension.orig_picture_dy = mPictureHeight;
+
+ if(!mDropThumbnail) {
+ if(isZSLMode()) {
+ LOGI("Setting input thumbnail size to previewWidth= %d previewheight= %d in ZSL mode",
+ mHalCamCtrl->mPreviewWidth, mHalCamCtrl->mPreviewHeight);
+ dimension.thumbnail_width = width = mHalCamCtrl->mPreviewWidth;
+ dimension.thumbnail_height = height = mHalCamCtrl->mPreviewHeight;
+ } else {
+ dimension.thumbnail_width = width = mThumbnailWidth;
+ dimension.thumbnail_height = height = mThumbnailHeight;
+ }
+ } else {
+ dimension.thumbnail_width = width = 0;
+ dimension.thumbnail_height = height = 0;
+ }
+ dimension.main_img_format = mPictureFormat;
+ dimension.thumb_format = mThumbnailFormat;
+
+ /*TBD: Move JPEG handling to the mm-camera library */
+ LOGD("Setting callbacks, initializing encoder and start encoding.");
+ LOGD(" Passing my obj: %x", (unsigned int) this);
+ set_callbacks(snapshot_jpeg_fragment_cb, snapshot_jpeg_cb, this,
+ mHalCamCtrl->mJpegMemory.camera_memory[0]->data, &mJpegOffset);
+ omxJpegStart();
+ if (mHalCamCtrl->getJpegQuality())
+ mm_jpeg_encoder_setMainImageQuality(mHalCamCtrl->getJpegQuality());
+ else
+ mm_jpeg_encoder_setMainImageQuality(85);
+
+ LOGE("%s: Dimension to encode: main: %dx%d thumbnail: %dx%d", __func__,
+ dimension.orig_picture_dx, dimension.orig_picture_dy,
+ dimension.thumbnail_width, dimension.thumbnail_height);
+
+ /*TBD: Pass 0 as cropinfo for now as v4l2 doesn't provide
+ cropinfo. It'll be changed later.*/
+ memset(&crop,0,sizeof(common_crop_t));
+ memset(&main_crop_offset,0,sizeof(cam_point_t));
+ memset(&thumb_crop_offset,0,sizeof(cam_point_t));
+
+ /* Setting crop info */
+
+ /*Main image*/
+ crop.in2_w=mCrop.snapshot.main_crop.width;// dimension.picture_width
+ crop.in2_h=mCrop.snapshot.main_crop.height;// dimension.picture_height;
+ if (!mJpegDownscaling) {
+ crop.out2_w = mPictureWidth;
+ crop.out2_h = mPictureHeight;
+ } else {
+ crop.out2_w = mActualPictureWidth;
+ crop.out2_h = mActualPictureHeight;
+ if (!crop.in2_w || !crop.in2_h) {
+ crop.in2_w = mPictureWidth;
+ crop.in2_h = mPictureHeight;
+ }
+ }
+ main_crop_offset.x=mCrop.snapshot.main_crop.left;
+ main_crop_offset.y=mCrop.snapshot.main_crop.top;
+ /*Thumbnail image*/
+ crop.in1_w=mCrop.snapshot.thumbnail_crop.width; //dimension.thumbnail_width;
+ crop.in1_h=mCrop.snapshot.thumbnail_crop.height; // dimension.thumbnail_height;
+ if(isLiveSnapshot() || isFullSizeLiveshot()) {
+ crop.out1_w= mHalCamCtrl->thumbnailWidth;
+ crop.out1_h= mHalCamCtrl->thumbnailHeight;
+ LOGD("Thumbnail width= %d height= %d for livesnapshot", crop.out1_w, crop.out1_h);
+ } else {
+ crop.out1_w = width;
+ crop.out1_h = height;
+ }
+ thumb_crop_offset.x=mCrop.snapshot.thumbnail_crop.left;
+ thumb_crop_offset.y=mCrop.snapshot.thumbnail_crop.top;
+
+ //update exif parameters in HAL
+ mHalCamCtrl->initExifData();
+
+ /*Fill in the encode parameters*/
+ encode_params.dimension = (const cam_ctrl_dimension_t *)&dimension;
+ //if (!isFullSizeLiveshot()) {
+ encode_params.thumbnail_buf = (uint8_t *)postviewframe->buffer;
+ encode_params.thumbnail_fd = postviewframe->fd;
+ encode_params.thumbnail_offset = postviewframe->phy_offset;
+ encode_params.thumb_crop_offset = &thumb_crop_offset;
+ //}
+ encode_params.snapshot_buf = (uint8_t *)mainframe->buffer;
+ encode_params.snapshot_fd = mainframe->fd;
+ encode_params.snapshot_offset = mainframe->phy_offset;
+ encode_params.scaling_params = &crop;
+ encode_params.exif_data = mHalCamCtrl->getExifData();
+ encode_params.exif_numEntries = mHalCamCtrl->getExifTableNumEntries();
+
+ if (isLiveSnapshot() && !isFullSizeLiveshot())
+ encode_params.a_cbcroffset = mainframe->cbcr_off;
+ else
+ encode_params.a_cbcroffset = -1;
+ encode_params.main_crop_offset = &main_crop_offset;
+
+ if (mDropThumbnail)
+ encode_params.hasThumbnail = 0;
+ else
+ encode_params.hasThumbnail = 1;
+ encode_params.thumb_crop_offset = &thumb_crop_offset;
+ encode_params.main_format = dimension.main_img_format;
+ encode_params.thumbnail_format = dimension.thumb_format;
+
+ if (!omxJpegEncode(&encode_params)){
+ LOGE("%s: Failure! JPEG encoder returned error.", __func__);
+ ret = FAILED_TRANSACTION;
+ goto end;
+ }
+
+ /* Save the pointer to the frame sent for encoding. we'll need it to
+ tell kernel that we are done with the frame.*/
+ mCurrentFrameEncoded = recvd_frame;
+ setSnapshotState(SNAPSHOT_STATE_JPEG_ENCODING);
+ }
+
+end:
+ LOGD("%s: X", __func__);
+ return ret;
+}
+
+/* Called twice - 1st to play shutter sound and 2nd to configure
+ overlay/surfaceflinger for postview */
+void QCameraStream_Snapshot::notifyShutter(common_crop_t *crop,
+ bool mPlayShutterSoundOnly)
+{
+ LOGD("%s: E", __func__);
+ if(!mActive && !isLiveSnapshot()) {
+ LOGE("__debbug: Snapshot thread stopped \n");
+ return;
+ }
+ if(mHalCamCtrl->mNotifyCb)
+ mHalCamCtrl->mNotifyCb(CAMERA_MSG_SHUTTER, 0, mPlayShutterSoundOnly,
+ mHalCamCtrl->mCallbackCookie);
+ LOGD("%s: X", __func__);
+}
+
+status_t QCameraStream_Snapshot::
+encodeDisplayAndSave(mm_camera_ch_data_buf_t* recvd_frame,
+ bool enqueued)
+{
+ status_t ret = NO_ERROR;
+ struct msm_frame *postview_frame;
+ struct ion_flush_data cache_inv_data;
+ int ion_fd;
+ int buf_index = 0;
+ ssize_t offset_addr = 0;
+ common_crop_t dummy_crop;
+ /* send frame for encoding */
+ LOGE("%s: Send frame for encoding", __func__);
+ /*TBD: Pass 0 as cropinfo for now as v4l2 doesn't provide
+ cropinfo. It'll be changed later.*/
+ if(!mActive) {
+ LOGE("Cancel Picture.. Stop is called");
+ return NO_ERROR;
+ }
+ if(isZSLMode()){
+ LOGE("%s: set JPEG rotation in ZSL mode", __func__);
+ mHalCamCtrl->setJpegRotation(isZSLMode());
+ }
+#ifdef USE_ION
+ /*Clean out(Write-back) cache before sending for JPEG*/
+ memset(&cache_inv_data, 0, sizeof(struct ion_flush_data));
+
+ cache_inv_data.vaddr = (void*)recvd_frame->snapshot.main.frame->buffer;
+ cache_inv_data.fd = recvd_frame->snapshot.main.frame->fd;
+ cache_inv_data.handle = recvd_frame->snapshot.main.frame->fd_data.handle;
+ cache_inv_data.length = recvd_frame->snapshot.main.frame->ion_alloc.len;
+ ion_fd = recvd_frame->snapshot.main.frame->ion_dev_fd;
+ if(ion_fd > 0) {
+ if(ioctl(ion_fd, ION_IOC_CLEAN_INV_CACHES, &cache_inv_data) < 0)
+ LOGE("%s: Cache Invalidate failed\n", __func__);
+ else {
+ LOGD("%s: Successful cache invalidate\n", __func__);
+ if(!isFullSizeLiveshot()) {
+ ion_fd = recvd_frame->snapshot.thumbnail.frame->ion_dev_fd;
+ cache_inv_data.vaddr = (void*)recvd_frame->snapshot.thumbnail.frame->buffer;
+ cache_inv_data.fd = recvd_frame->snapshot.thumbnail.frame->fd;
+ cache_inv_data.handle = recvd_frame->snapshot.thumbnail.frame->fd_data.handle;
+ cache_inv_data.length = recvd_frame->snapshot.thumbnail.frame->ion_alloc.len;
+ if(ioctl(ion_fd, ION_IOC_CLEAN_INV_CACHES, &cache_inv_data) < 0)
+ LOGE("%s: Cache Invalidate failed\n", __func__);
+ else
+ LOGD("%s: Successful cache invalidate\n", __func__);
+ }
+ }
+ }
+#endif
+ memset(&dummy_crop,0,sizeof(common_crop_t));
+ ret = encodeData(recvd_frame, &dummy_crop, mSnapshotStreamBuf.frame_len,
+ enqueued);
+ if (ret != NO_ERROR) {
+ LOGE("%s: Failure configuring JPEG encoder", __func__);
+
+ goto end;
+ }
+
+ /* Display postview image*/
+ /* If it's burst mode, we won't be displaying postview of all the captured
+ images - only the first one */
+ LOGD("%s: Burst mode flag %d", __func__, mBurstModeFlag);
+
+end:
+ LOGD("%s: X", __func__);
+ return ret;
+}
+
+status_t QCameraStream_Snapshot::receiveRawPicture(mm_camera_ch_data_buf_t* recvd_frame)
+{
+ int buf_index = 0;
+ common_crop_t crop;
+ int rc = NO_ERROR;
+
+ camera_notify_callback notifyCb;
+ camera_data_callback dataCb, jpgDataCb;
+
+ LOGD("%s: E ", __func__);
+ mStopCallbackLock.lock( );
+ if(!mActive) {
+ mStopCallbackLock.unlock();
+ LOGD("%s: Stop receiving raw pic ", __func__);
+ return NO_ERROR;
+ }
+
+ mHalCamCtrl->dumpFrameToFile(recvd_frame->snapshot.main.frame, HAL_DUMP_FRM_MAIN);
+ if (!isFullSizeLiveshot())
+ mHalCamCtrl->dumpFrameToFile(recvd_frame->snapshot.thumbnail.frame,
+ HAL_DUMP_FRM_THUMBNAIL);
+
+ /* If it's raw snapshot, we just want to tell upperlayer to save the image*/
+ if(mSnapshotFormat == PICTURE_FORMAT_RAW) {
+ LOGD("%s: Call notifyShutter 2nd time in case of RAW", __func__);
+ mStopCallbackLock.unlock();
+ if(!mHalCamCtrl->mShutterSoundPlayed) {
+ notifyShutter(&crop, TRUE);
+ }
+ notifyShutter(&crop, FALSE);
+ mHalCamCtrl->mShutterSoundPlayed = FALSE;
+
+ mStopCallbackLock.lock( );
+ LOGD("%s: Sending Raw Snapshot Callback to Upperlayer", __func__);
+ buf_index = recvd_frame->def.idx;
+
+ if (mHalCamCtrl->mDataCb && mActive &&
+ (mHalCamCtrl->mMsgEnabled & CAMERA_MSG_COMPRESSED_IMAGE)){
+ dataCb = mHalCamCtrl->mDataCb;
+ } else {
+ dataCb = NULL;
+ }
+ mStopCallbackLock.unlock();
+
+ if(dataCb) {
+ dataCb(
+ CAMERA_MSG_COMPRESSED_IMAGE,
+ mHalCamCtrl->mRawMemory.camera_memory[buf_index], 0, NULL,
+ mHalCamCtrl->mCallbackCookie);
+ }
+ /* TBD: Temp: To be removed once event handling is enabled */
+ mm_app_snapshot_done();
+ } else {
+ /*TBD: v4l2 doesn't have support to provide cropinfo along with
+ frame. We'll need to query.*/
+ memset(&crop, 0, sizeof(common_crop_t));
+
+ /*maftab*/
+ #if 0
+ crop.in1_w=mCrop.snapshot.thumbnail_crop.width;
+ crop.in1_h=mCrop.snapshot.thumbnail_crop.height;
+ crop.out1_w=mThumbnailWidth;
+ crop.out1_h=mThumbnailHeight;
+ #endif
+
+ LOGD("%s: Call notifyShutter 2nd time", __func__);
+ /* The recvd_frame structre we receive from lower library is a local
+ variable. So we'll need to save this structure so that we won't
+ be later pointing to garbage data when that variable goes out of
+ scope */
+ mm_camera_ch_data_buf_t* frame =
+ (mm_camera_ch_data_buf_t *)malloc(sizeof(mm_camera_ch_data_buf_t));
+ if (frame == NULL) {
+ LOGE("%s: Error allocating memory to save received_frame structure.", __func__);
+ cam_evt_buf_done(mCameraId, recvd_frame);
+ mStopCallbackLock.unlock();
+ return BAD_VALUE;
+ }
+ memcpy(frame, recvd_frame, sizeof(mm_camera_ch_data_buf_t));
+
+ //mStopCallbackLock.lock();
+
+ // only in ZSL mode and Wavelet Denoise is enabled, we will send frame to deamon to do WDN
+ if (isZSLMode() && mHalCamCtrl->isWDenoiseEnabled()) {
+ if(mIsDoingWDN){
+ mWDNQueue.enqueue((void *)frame);
+ LOGD("%s: Wavelet denoise is going on, queue frame", __func__);
+ rc = NO_ERROR;
+ } else {
+ LOGD("%s: Start Wavelet denoise", __func__);
+ mIsDoingWDN = TRUE; // set the falg to TRUE because we are going to do WDN
+
+ // No WDN is going on so far, we will start it here
+ rc = doWaveletDenoise(frame);
+ if ( NO_ERROR != rc ) {
+ LOGE("%s: Error while doing wavelet denoise", __func__);
+ mIsDoingWDN = FALSE;
+ }
+ }
+ }
+ else {
+ LOGD("%s: encodeDisplayAndSave ", __func__);
+ rc = encodeDisplayAndSave(frame, 0);
+ }
+
+
+ // send upperlayer callback for raw image (data or notify, not both)
+ if((mHalCamCtrl->mDataCb) && (mHalCamCtrl->mMsgEnabled & CAMERA_MSG_RAW_IMAGE)){
+ dataCb = mHalCamCtrl->mDataCb;
+ } else {
+ dataCb = NULL;
+ }
+ if((mHalCamCtrl->mNotifyCb) && (mHalCamCtrl->mMsgEnabled & CAMERA_MSG_RAW_IMAGE_NOTIFY)){
+ notifyCb = mHalCamCtrl->mNotifyCb;
+ } else {
+ notifyCb = NULL;
+ }
+
+ mStopCallbackLock.unlock();
+ if(!mHalCamCtrl->mShutterSoundPlayed) {
+ notifyShutter(&crop, TRUE);
+ }
+ notifyShutter(&crop, FALSE);
+ mHalCamCtrl->mShutterSoundPlayed = FALSE;
+
+
+ if (rc != NO_ERROR)
+ {
+ LOGE("%s: Error while encoding/displaying/saving image", __func__);
+ cam_evt_buf_done(mCameraId, recvd_frame);
+
+ if(mHalCamCtrl->mDataCb &&
+ (mHalCamCtrl->mMsgEnabled & CAMERA_MSG_COMPRESSED_IMAGE)) {
+ /* get picture failed. Give jpeg callback with NULL data
+ * to the application to restore to preview mode
+ */
+ jpgDataCb = mHalCamCtrl->mDataCb;
+ } else {
+ jpgDataCb = NULL;
+ }
+ LOGE("%s: encode err so data cb", __func__);
+ //mStopCallbackLock.unlock();
+ if (dataCb) {
+ dataCb(CAMERA_MSG_RAW_IMAGE, mHalCamCtrl->mSnapshotMemory.camera_memory[0],
+ 1, NULL, mHalCamCtrl->mCallbackCookie);
+ }
+ if (notifyCb) {
+ notifyCb(CAMERA_MSG_RAW_IMAGE_NOTIFY, 0, 0, mHalCamCtrl->mCallbackCookie);
+ }
+ if (jpgDataCb) {
+ jpgDataCb(CAMERA_MSG_COMPRESSED_IMAGE,
+ NULL, 0, NULL,
+ mHalCamCtrl->mCallbackCookie);
+ }
+
+ if (frame != NULL) {
+ free(frame);
+ }
+ } else {
+
+ //mStopCallbackLock.unlock();
+ if (dataCb) {
+ dataCb(CAMERA_MSG_RAW_IMAGE, mHalCamCtrl->mSnapshotMemory.camera_memory[0],
+ 1, NULL, mHalCamCtrl->mCallbackCookie);
+ }
+ if (notifyCb) {
+ notifyCb(CAMERA_MSG_RAW_IMAGE_NOTIFY, 0, 0, mHalCamCtrl->mCallbackCookie);
+ }
+ }
+ }
+
+ LOGD("%s: X", __func__);
+ return NO_ERROR;
+}
+
+//-------------------------------------------------------------------
+// Helper Functions
+//-------------------------------------------------------------------
+void QCameraStream_Snapshot::handleError()
+{
+ mm_camera_channel_type_t ch_type;
+ LOGD("%s: E", __func__);
+
+ /* Depending upon the state we'll have to
+ handle error */
+ switch(getSnapshotState()) {
+ case SNAPSHOT_STATE_JPEG_ENCODING:
+ if(mJpegHeap != NULL) mJpegHeap.clear();
+ mJpegHeap = NULL;
+
+ case SNAPSHOT_STATE_YUV_RECVD:
+ case SNAPSHOT_STATE_IMAGE_CAPTURE_STRTD:
+ stopPolling();
+ case SNAPSHOT_STATE_INITIALIZED:
+ case SNAPSHOT_STATE_BUF_INITIALIZED:
+ if (mSnapshotFormat == PICTURE_FORMAT_JPEG) {
+ deinitSnapshotBuffers();
+ }else
+ {
+ deinitRawSnapshotBuffers();
+ }
+ case SNAPSHOT_STATE_BUF_NOTIF_REGD:
+ case SNAPSHOT_STATE_CH_ACQUIRED:
+ if (mSnapshotFormat == PICTURE_FORMAT_JPEG) {
+ deinitSnapshotChannel(MM_CAMERA_CH_SNAPSHOT);
+ }else
+ {
+ deinitSnapshotChannel(MM_CAMERA_CH_RAW);
+ }
+ default:
+ /* Set the state to ERROR */
+ setSnapshotState(SNAPSHOT_STATE_ERROR);
+ break;
+ }
+
+ LOGD("%s: X", __func__);
+}
+
+void QCameraStream_Snapshot::setSnapshotState(int state)
+{
+ LOGD("%s: Setting snapshot state to: %d",
+ __func__, state);
+ mSnapshotState = state;
+}
+
+int QCameraStream_Snapshot::getSnapshotState()
+{
+ return mSnapshotState;
+}
+
+void QCameraStream_Snapshot::setModeLiveSnapshot(bool value)
+{
+ mModeLiveSnapshot = value;
+}
+
+bool QCameraStream_Snapshot::isLiveSnapshot(void)
+{
+ return mModeLiveSnapshot;
+}
+bool QCameraStream_Snapshot::isZSLMode()
+{
+ return (myMode & CAMERA_ZSL_MODE);
+}
+
+void QCameraStream_Snapshot::setFullSizeLiveshot(bool value)
+{
+ mFullLiveshot = value;
+}
+
+bool QCameraStream_Snapshot::isFullSizeLiveshot()
+{
+ return mFullLiveshot;
+}
+
+void QCameraStream_Snapshot::resetSnapshotCounters(void )
+{
+ mNumOfSnapshot = mHalCamCtrl->getNumOfSnapshots();
+ if (mNumOfSnapshot <= 0) {
+ mNumOfSnapshot = 1;
+ }
+ mNumOfRecievedJPEG = 0;
+ LOGD("%s: Number of images to be captured: %d", __func__, mNumOfSnapshot);
+}
+
+//------------------------------------------------------------------
+// Constructor and Destructor
+//------------------------------------------------------------------
+QCameraStream_Snapshot::
+QCameraStream_Snapshot(int cameraId, camera_mode_t mode)
+ : QCameraStream(cameraId,mode),
+ mSnapshotFormat(PICTURE_FORMAT_JPEG),
+ mPictureWidth(0), mPictureHeight(0),
+ mPictureFormat(CAMERA_YUV_420_NV21),
+ mPostviewWidth(0), mPostviewHeight(0),
+ mThumbnailWidth(0), mThumbnailHeight(0),
+ mThumbnailFormat(CAMERA_YUV_420_NV21),
+ mJpegOffset(0),
+ mSnapshotState(SNAPSHOT_STATE_UNINIT),
+ mNumOfSnapshot(1),
+ mModeLiveSnapshot(false),
+ mBurstModeFlag(false),
+ mActualPictureWidth(0),
+ mActualPictureHeight(0),
+ mJpegDownscaling(false),
+ mJpegHeap(NULL),
+ mDisplayHeap(NULL),
+ mPostviewHeap(NULL),
+ mCurrentFrameEncoded(NULL),
+ mJpegSessionId(0),
+ mFullLiveshot(false),
+ mDropThumbnail(false)
+ {
+ LOGV("%s: E", __func__);
+
+ /*initialize snapshot queue*/
+ mSnapshotQueue.init();
+
+ /*initialize WDN queue*/
+ mWDNQueue.init();
+ mIsDoingWDN = FALSE;
+
+ memset(&mSnapshotStreamBuf, 0, sizeof(mSnapshotStreamBuf));
+ memset(&mPostviewStreamBuf, 0, sizeof(mPostviewStreamBuf));
+ mSnapshotBufferNum = 0;
+ mMainSize = 0;
+ mThumbSize = 0;
+ for(int i = 0; i < mMaxSnapshotBufferCount; i++) {
+ mMainfd[i] = 0;
+ mThumbfd[i] = 0;
+ mCameraMemoryPtrMain[i] = NULL;
+ mCameraMemoryPtrThumb[i] = NULL;
+ }
+ /*load the jpeg lib*/
+ mJpegSessionId = omxJpegOpen( );
+ LOGV("%s: X", __func__);
+ }
+
+
+QCameraStream_Snapshot::~QCameraStream_Snapshot() {
+ LOGV("%s: E", __func__);
+
+ /* deinit snapshot queue */
+ if (mSnapshotQueue.isInitialized()) {
+ mSnapshotQueue.deinit();
+ }
+ /* deinit snapshot queue */
+ if (mWDNQueue.isInitialized()) {
+ mWDNQueue.deinit();
+ }
+
+ if(mActive) {
+ stop();
+ }
+ if(mInit) {
+ release();
+ }
+ mInit = false;
+ mActive = false;
+ if (mJpegSessionId > 0) {
+ omxJpegClose( );
+ mJpegSessionId = 0;
+ }
+ LOGV("%s: X", __func__);
+
+}
+
+//------------------------------------------------------------------
+// Public Members
+//------------------------------------------------------------------
+status_t QCameraStream_Snapshot::init()
+{
+ status_t ret = NO_ERROR;
+ mm_camera_op_mode_type_t op_mode;
+
+ LOGV("%s: E", __func__);
+ /* Check the state. If we have already started snapshot
+ process just return*/
+ if (getSnapshotState() != SNAPSHOT_STATE_UNINIT) {
+ ret = isZSLMode() ? NO_ERROR : INVALID_OPERATION;
+ LOGE("%s: Trying to take picture while snapshot is in progress",
+ __func__);
+ goto end;
+ }
+ mInit = true;
+
+end:
+ /*if (ret == NO_ERROR) {
+ setSnapshotState(SNAPSHOT_STATE_INITIALIZED);
+ }*/
+ LOGV("%s: X", __func__);
+ return ret;
+}
+
+status_t QCameraStream_Snapshot::start(void) {
+ status_t ret = NO_ERROR;
+
+ LOGV("%s: E", __func__);
+
+ Mutex::Autolock lock(mStopCallbackLock);
+
+ /* Keep track of number of snapshots to take - in case of
+ multiple snapshot/burst mode */
+
+ if(mHalCamCtrl->isRawSnapshot()) {
+ LOGD("%s: Acquire Raw Snapshot Channel", __func__);
+ ret = cam_ops_ch_acquire(mCameraId, MM_CAMERA_CH_RAW);
+ if (NO_ERROR != ret) {
+ LOGE("%s: Failure Acquiring Raw Snapshot Channel error =%d\n",
+ __func__, ret);
+ ret = FAILED_TRANSACTION;
+ goto end;
+ }
+ /* Snapshot channel is acquired */
+ setSnapshotState(SNAPSHOT_STATE_CH_ACQUIRED);
+ LOGD("%s: Register buffer notification. My object: %x",
+ __func__, (unsigned int) this);
+ (void) cam_evt_register_buf_notify(mCameraId, MM_CAMERA_CH_RAW,
+ snapshot_notify_cb,
+ MM_CAMERA_REG_BUF_CB_INFINITE,
+ 0,
+ this);
+ /* Set the state to buffer notification completed */
+ setSnapshotState(SNAPSHOT_STATE_BUF_NOTIF_REGD);
+ }else{
+ LOGD("%s: Acquire Snapshot Channel", __func__);
+ ret = cam_ops_ch_acquire(mCameraId, MM_CAMERA_CH_SNAPSHOT);
+ if (NO_ERROR != ret) {
+ LOGE("%s: Failure Acquiring Snapshot Channel error =%d\n", __func__, ret);
+ ret = FAILED_TRANSACTION;
+ goto end;
+ }
+ /* Snapshot channel is acquired */
+ setSnapshotState(SNAPSHOT_STATE_CH_ACQUIRED);
+ LOGD("%s: Register buffer notification. My object: %x",
+ __func__, (unsigned int) this);
+ (void) cam_evt_register_buf_notify(mCameraId, MM_CAMERA_CH_SNAPSHOT,
+ snapshot_notify_cb,
+ MM_CAMERA_REG_BUF_CB_INFINITE,
+ 0,
+ this);
+ /* Set the state to buffer notification completed */
+ setSnapshotState(SNAPSHOT_STATE_BUF_NOTIF_REGD);
+ }
+
+ if (isZSLMode()) {
+ prepareHardware();
+ ret = initZSLSnapshot();
+ if(ret != NO_ERROR) {
+ LOGE("%s : Error while Initializing ZSL snapshot",__func__);
+ goto end;
+ }
+ mHalCamCtrl->setExifTags();
+ /* In case of ZSL, start will only start snapshot stream and
+ continuously queue the frames in a queue. When user clicks
+ shutter we'll call get buffer from the queue and pass it on */
+ ret = startStreamZSL();
+ goto end;
+ }
+
+ if (isFullSizeLiveshot())
+ ret = initFullLiveshot();
+
+ /* Check if it's a raw snapshot or JPEG*/
+ if(mHalCamCtrl->isRawSnapshot()) {
+ mSnapshotFormat = PICTURE_FORMAT_RAW;
+ ret = initRawSnapshot(mNumOfSnapshot);
+ }else{
+ //JPEG
+ mSnapshotFormat = PICTURE_FORMAT_JPEG;
+ ret = initJPEGSnapshot(mNumOfSnapshot);
+ }
+ if(ret != NO_ERROR) {
+ LOGE("%s : Error while Initializing snapshot",__func__);
+ goto end;
+ }
+
+ //Update Exiftag values.
+ mHalCamCtrl->setExifTags();
+
+ if (mSnapshotFormat == PICTURE_FORMAT_RAW) {
+ ret = takePictureRaw();
+ goto end;
+ }
+ else{
+ ret = takePictureJPEG();
+ goto end;
+ }
+
+end:
+ if (ret == NO_ERROR) {
+ setSnapshotState(SNAPSHOT_STATE_IMAGE_CAPTURE_STRTD);
+ mActive = true;
+ } else {
+ deInitBuffer();
+ }
+
+ LOGV("%s: X", __func__);
+ return ret;
+ }
+
+void QCameraStream_Snapshot::stopPolling(void)
+{
+ mm_camera_ops_type_t ops_type;
+
+ if (mSnapshotFormat == PICTURE_FORMAT_JPEG) {
+ ops_type = isZSLMode() ? MM_CAMERA_OPS_ZSL : MM_CAMERA_OPS_SNAPSHOT;
+ }else
+ ops_type = MM_CAMERA_OPS_RAW;
+
+ if( NO_ERROR != cam_ops_action(mCameraId, FALSE,
+ ops_type, this)) {
+ LOGE("%s: Failure stopping snapshot", __func__);
+ }
+}
+
+void QCameraStream_Snapshot::stop(void)
+{
+ mm_camera_ops_type_t ops_type;
+ status_t ret = NO_ERROR;
+
+ LOGV("%s: E", __func__);
+ //Mutex::Autolock l(&snapshotLock);
+
+ if(!mActive) {
+ LOGV("%s: Not Active return now", __func__);
+ return;
+ }
+ mActive = false;
+ Mutex::Autolock lock(mStopCallbackLock);
+ if (getSnapshotState() != SNAPSHOT_STATE_UNINIT) {
+ /* Stop polling for further frames */
+ stopPolling();
+
+ if(getSnapshotState() == SNAPSHOT_STATE_JPEG_ENCODING) {
+ LOGV("Destroy Jpeg Instance");
+ omxJpegAbort();
+ }
+
+ /* Depending upon current state, we'll need to allocate-deallocate-deinit*/
+ deInitBuffer();
+ }
+
+ if(mSnapshotFormat == PICTURE_FORMAT_RAW) {
+ ret= QCameraStream::deinitChannel(mCameraId, MM_CAMERA_CH_RAW);
+ if(ret != MM_CAMERA_OK) {
+ LOGE("%s:Deinit RAW channel failed=%d\n", __func__, ret);
+ }
+ (void)cam_evt_register_buf_notify(mCameraId, MM_CAMERA_CH_RAW,
+ NULL,
+ (mm_camera_register_buf_cb_type_t)NULL,
+ NULL,
+ NULL);
+ } else {
+ ret= QCameraStream::deinitChannel(mCameraId, MM_CAMERA_CH_SNAPSHOT);
+ if(ret != MM_CAMERA_OK) {
+ LOGE("%s:Deinit Snapshot channel failed=%d\n", __func__, ret);
+ }
+ (void)cam_evt_register_buf_notify(mCameraId, MM_CAMERA_CH_SNAPSHOT,
+ NULL,
+ (mm_camera_register_buf_cb_type_t)NULL,
+ NULL,
+ NULL);
+ }
+
+ /* release is generally called in case of explicit call from
+ upper-layer during disconnect. So we need to deinit everything
+ whatever state we are in */
+ LOGV("Calling omxjpegjoin from release\n");
+ omxJpegFinish();
+#if 0
+ omxJpegClose();
+#endif
+ mFullLiveshot = false;
+ LOGV("%s: X", __func__);
+
+}
+
+void QCameraStream_Snapshot::release()
+{
+ status_t ret = NO_ERROR;
+ LOGV("%s: E", __func__);
+ //Mutex::Autolock l(&snapshotLock);
+
+ if(isLiveSnapshot()) {
+ deInitBuffer();
+ }
+ if(!mInit){
+ LOGE("%s : Stream not Initalized",__func__);
+ return;
+ }
+
+ if(mActive) {
+ this->stop();
+ mActive = FALSE;
+ }
+
+ /* release is generally called in case of explicit call from
+ upper-layer during disconnect. So we need to deinit everything
+ whatever state we are in */
+
+ //deinit();
+ mInit = false;
+ LOGV("%s: X", __func__);
+
+}
+
+void QCameraStream_Snapshot::prepareHardware()
+{
+ LOGV("%s: E", __func__);
+
+ /* Prepare snapshot*/
+ cam_ops_action(mCameraId,
+ TRUE,
+ MM_CAMERA_OPS_PREPARE_SNAPSHOT,
+ this);
+ LOGV("%s: X", __func__);
+}
+
+sp<IMemoryHeap> QCameraStream_Snapshot::getRawHeap() const
+{
+ return ((mDisplayHeap != NULL) ? mDisplayHeap->mHeap : NULL);
+}
+
+QCameraStream*
+QCameraStream_Snapshot::createInstance(int cameraId,
+ camera_mode_t mode)
+{
+
+ QCameraStream* pme = new QCameraStream_Snapshot(cameraId, mode);
+
+ return pme;
+}
+
+void QCameraStream_Snapshot::deleteInstance(QCameraStream *p)
+{
+ if (p){
+ p->release();
+ delete p;
+ p = NULL;
+ }
+}
+
+void QCameraStream_Snapshot::notifyWDenoiseEvent(cam_ctrl_status_t status, void * cookie)
+{
+ camera_notify_callback notifyCb;
+ camera_data_callback dataCb, jpgDataCb;
+ int rc = NO_ERROR;
+ mm_camera_ch_data_buf_t *frame = (mm_camera_ch_data_buf_t *)cookie;
+
+ LOGI("%s: WDN Done status (%d) received",__func__,status);
+ Mutex::Autolock lock(mStopCallbackLock);
+ if (frame == NULL) {
+ LOGE("%s: cookie is returned NULL", __func__);
+ } else {
+ // first unmapping the fds
+ mHalCamCtrl->sendUnMappingBuf(MSM_V4L2_EXT_CAPTURE_MODE_MAIN, frame->snapshot.main.idx, mCameraId,
+ CAM_SOCK_MSG_TYPE_FD_UNMAPPING);
+ mHalCamCtrl->sendUnMappingBuf(MSM_V4L2_EXT_CAPTURE_MODE_THUMBNAIL, frame->snapshot.thumbnail.idx, mCameraId,
+ CAM_SOCK_MSG_TYPE_FD_UNMAPPING);
+
+ // then do JPEG encoding
+ rc = encodeDisplayAndSave(frame, 0);
+ }
+
+ // send upperlayer callback for raw image (data or notify, not both)
+ if((mHalCamCtrl->mDataCb) && (mHalCamCtrl->mMsgEnabled & CAMERA_MSG_RAW_IMAGE)){
+ dataCb = mHalCamCtrl->mDataCb;
+ } else {
+ dataCb = NULL;
+ }
+ if((mHalCamCtrl->mNotifyCb) && (mHalCamCtrl->mMsgEnabled & CAMERA_MSG_RAW_IMAGE_NOTIFY)){
+ notifyCb = mHalCamCtrl->mNotifyCb;
+ } else {
+ notifyCb = NULL;
+ }
+ if(mHalCamCtrl->mDataCb &&
+ (mHalCamCtrl->mMsgEnabled & CAMERA_MSG_COMPRESSED_IMAGE)) {
+ /* get picture failed. Give jpeg callback with NULL data
+ * to the application to restore to preview mode
+ */
+ jpgDataCb = mHalCamCtrl->mDataCb;
+ } else {
+ jpgDataCb = NULL;
+ }
+
+ // launch next WDN if there is more in WDN Queue
+ lauchNextWDenoiseFromQueue();
+
+ mStopCallbackLock.unlock();
+
+ if (rc != NO_ERROR)
+ {
+ LOGE("%s: Error while encoding/displaying/saving image", __func__);
+ if (frame) {
+ cam_evt_buf_done(mCameraId, frame);
+ }
+
+ if (dataCb) {
+ dataCb(CAMERA_MSG_RAW_IMAGE, mHalCamCtrl->mSnapshotMemory.camera_memory[0],
+ 1, NULL, mHalCamCtrl->mCallbackCookie);
+ }
+ if (notifyCb) {
+ notifyCb(CAMERA_MSG_RAW_IMAGE_NOTIFY, 0, 0, mHalCamCtrl->mCallbackCookie);
+ }
+ if (jpgDataCb) {
+ jpgDataCb(CAMERA_MSG_COMPRESSED_IMAGE,
+ NULL, 0, NULL,
+ mHalCamCtrl->mCallbackCookie);
+ }
+
+ if (frame != NULL) {
+ free(frame);
+ }
+ }
+}
+
+void QCameraStream_Snapshot::lauchNextWDenoiseFromQueue()
+{
+ do {
+ mm_camera_ch_data_buf_t *frame = NULL;
+ if ( mWDNQueue.isEmpty() ||
+ (NULL == (frame = (mm_camera_ch_data_buf_t *)mWDNQueue.dequeue())) ) {
+ // set the flag back to FALSE when no WDN going on
+ mIsDoingWDN = FALSE;
+ break;
+ }
+
+ if ( NO_ERROR != doWaveletDenoise(frame) ) {
+ LOGE("%s: Error while doing wavelet denoise", __func__);
+ if (frame != NULL) {
+ free(frame);
+ }
+ } else {
+ // we sent out req for WDN, so we can break here
+ LOGD("%s: Send out req for doing wavelet denoise, return here", __func__);
+ break;
+ }
+ } while (TRUE);
+}
+
+status_t QCameraStream_Snapshot::doWaveletDenoise(mm_camera_ch_data_buf_t* frame)
+{
+ status_t ret = NO_ERROR;
+ cam_sock_packet_t packet;
+ cam_ctrl_dimension_t dim;
+
+ LOGD("%s: E", __func__);
+
+ // get dim on the fly
+ memset(&dim, 0, sizeof(cam_ctrl_dimension_t));
+ ret = cam_config_get_parm(mCameraId, MM_CAMERA_PARM_DIMENSION, &dim);
+ if (NO_ERROR != ret) {
+ LOGE("%s: error - can't get dimension!", __func__);
+ return FAILED_TRANSACTION;
+ }
+
+ // send main frame mapping through domain socket
+ if (NO_ERROR != mHalCamCtrl->sendMappingBuf(MSM_V4L2_EXT_CAPTURE_MODE_MAIN,
+ frame->snapshot.main.idx,
+ frame->snapshot.main.frame->fd,
+ dim.picture_frame_offset.frame_len, mCameraId,
+ CAM_SOCK_MSG_TYPE_FD_MAPPING)) {
+ LOGE("%s: sending main frame mapping buf msg Failed", __func__);
+ ret = FAILED_TRANSACTION;
+ goto end;
+ }
+
+ // send thumbnail frame mapping through domain socket
+ if (NO_ERROR != mHalCamCtrl->sendMappingBuf(MSM_V4L2_EXT_CAPTURE_MODE_THUMBNAIL,
+ frame->snapshot.thumbnail.idx,
+ frame->snapshot.thumbnail.frame->fd,
+ dim.display_frame_offset.frame_len, mCameraId,
+ CAM_SOCK_MSG_TYPE_FD_MAPPING)) {
+ LOGE("%s: sending thumbnail frame mapping buf msg Failed", __func__);
+ ret = FAILED_TRANSACTION;
+ goto end;
+ }
+
+ // ask deamon to start wdn operation
+ if (NO_ERROR != sendWDenoiseStartMsg(frame)) {
+ LOGE("%s: sending thumbnail frame mapping buf msg Failed", __func__);
+ ret = FAILED_TRANSACTION;
+ goto end;
+ }
+
+end:
+ LOGD("%s: X", __func__);
+ return ret;
+}
+
+status_t QCameraStream_Snapshot::sendWDenoiseStartMsg(mm_camera_ch_data_buf_t * frame)
+{
+ cam_sock_packet_t packet;
+ memset(&packet, 0, sizeof(cam_sock_packet_t));
+ packet.msg_type = CAM_SOCK_MSG_TYPE_WDN_START;
+ packet.payload.wdn_start.cookie = (unsigned long)frame;
+ packet.payload.wdn_start.num_frames = MM_MAX_WDN_NUM;
+ packet.payload.wdn_start.ext_mode[0] = MSM_V4L2_EXT_CAPTURE_MODE_MAIN;
+ packet.payload.wdn_start.ext_mode[1] = MSM_V4L2_EXT_CAPTURE_MODE_THUMBNAIL;
+ packet.payload.wdn_start.frame_idx[0] = frame->snapshot.main.idx;
+ packet.payload.wdn_start.frame_idx[1] = frame->snapshot.thumbnail.idx;
+ if ( cam_ops_sendmsg(mCameraId, &packet, sizeof(packet), 0) <= 0 ) {
+ LOGE("%s: sending start wavelet denoise msg failed", __func__);
+ return FAILED_TRANSACTION;
+ }
+ return NO_ERROR;
+}
+
+}; // namespace android
+
diff --git a/camera/QCamera/HAL/core/src/QCameraStream.cpp b/camera/QCamera/HAL/core/src/QCameraStream.cpp
new file mode 100644
index 0000000..9196852
--- /dev/null
+++ b/camera/QCamera/HAL/core/src/QCameraStream.cpp
@@ -0,0 +1,364 @@
+/*
+** Copyright (c) 2011-2012 The Linux Foundation. All rights reserved.
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+** http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+/*#error uncomment this for compiler test!*/
+
+#define LOG_NDEBUG 0
+#define LOG_NIDEBUG 0
+#define LOG_TAG __FILE__
+#include <utils/Log.h>
+#include <utils/threads.h>
+
+
+#include "QCameraStream.h"
+
+/* QCameraStream class implementation goes here*/
+/* following code implement the control logic of this class*/
+
+namespace android {
+
+StreamQueue::StreamQueue(){
+ mInitialized = false;
+}
+
+StreamQueue::~StreamQueue(){
+ flush();
+}
+
+void StreamQueue::init(){
+ Mutex::Autolock l(&mQueueLock);
+ mInitialized = true;
+ mQueueWait.signal();
+}
+
+void StreamQueue::deinit(){
+ Mutex::Autolock l(&mQueueLock);
+ mInitialized = false;
+ mQueueWait.signal();
+}
+
+bool StreamQueue::isInitialized(){
+ Mutex::Autolock l(&mQueueLock);
+ return mInitialized;
+}
+
+bool StreamQueue::enqueue(
+ void * element){
+ Mutex::Autolock l(&mQueueLock);
+ if(mInitialized == false)
+ return false;
+
+ mContainer.add(element);
+ mQueueWait.signal();
+ return true;
+}
+
+bool StreamQueue::isEmpty(){
+ return (mInitialized && mContainer.isEmpty());
+}
+void* StreamQueue::dequeue(){
+
+ void *frame;
+ mQueueLock.lock();
+ while(mInitialized && mContainer.isEmpty()){
+ mQueueWait.wait(mQueueLock);
+ }
+
+ if(!mInitialized){
+ mQueueLock.unlock();
+ return NULL;
+ }
+
+ frame = mContainer.itemAt(0);
+ mContainer.removeAt(0);
+ mQueueLock.unlock();
+ return frame;
+}
+
+void StreamQueue::flush(){
+ Mutex::Autolock l(&mQueueLock);
+ mContainer.clear();
+}
+
+
+// ---------------------------------------------------------------------------
+// QCameraStream
+// ---------------------------------------------------------------------------
+
+/* initialize a streaming channel*/
+status_t QCameraStream::initChannel(int cameraId,
+ uint32_t ch_type_mask)
+{
+#if 0
+ int rc = MM_CAMERA_OK;
+ int i;
+ status_t ret = NO_ERROR;
+ int width = 0; /* width of channel */
+ int height = 0; /* height of channel */
+ cam_ctrl_dimension_t dim;
+ mm_camera_ch_image_fmt_parm_t fmt;
+
+ memset(&dim, 0, sizeof(cam_ctrl_dimension_t));
+ rc = cam_config_get_parm(cameraId, MM_CAMERA_PARM_DIMENSION, &dim);
+ if (MM_CAMERA_OK != rc) {
+ LOGE("%s: error - can't get camera dimension!", __func__);
+ LOGE("%s: X", __func__);
+ return BAD_VALUE;
+ }
+
+ if(MM_CAMERA_CH_PREVIEW_MASK & ch_type_mask) {
+ rc = cam_ops_ch_acquire(cameraId, MM_CAMERA_CH_PREVIEW);
+ LOGV("%s:ch_acquire MM_CAMERA_CH_PREVIEW, rc=%d\n",__func__, rc);
+
+ if(MM_CAMERA_OK != rc) {
+ LOGE("%s: preview channel acquir error =%d\n", __func__, rc);
+ LOGE("%s: X", __func__);
+ return BAD_VALUE;
+ }
+ else{
+ memset(&fmt, 0, sizeof(mm_camera_ch_image_fmt_parm_t));
+ fmt.ch_type = MM_CAMERA_CH_PREVIEW;
+ fmt.def.fmt = CAMERA_YUV_420_NV12; //dim.prev_format;
+ fmt.def.dim.width = dim.display_width;
+ fmt.def.dim.height = dim.display_height;
+ LOGV("%s: preview channel fmt = %d", __func__,
+ dim.prev_format);
+ LOGV("%s: preview channel resolution = %d X %d", __func__,
+ dim.display_width, dim.display_height);
+
+ rc = cam_config_set_parm(cameraId, MM_CAMERA_PARM_CH_IMAGE_FMT, &fmt);
+ LOGV("%s: preview MM_CAMERA_PARM_CH_IMAGE_FMT rc = %d\n", __func__, rc);
+ if(MM_CAMERA_OK != rc) {
+ LOGE("%s:set preview channel format err=%d\n", __func__, ret);
+ LOGE("%s: X", __func__);
+ ret = BAD_VALUE;
+ }
+ }
+ }
+
+
+ if(MM_CAMERA_CH_VIDEO_MASK & ch_type_mask)
+ {
+ rc = cam_ops_ch_acquire(cameraId, MM_CAMERA_CH_VIDEO);
+ LOGV("%s:ch_acquire MM_CAMERA_CH_VIDEO, rc=%d\n",__func__, rc);
+
+ if(MM_CAMERA_OK != rc) {
+ LOGE("%s: video channel acquir error =%d\n", __func__, rc);
+ LOGE("%s: X", __func__);
+ ret = BAD_VALUE;
+ }
+ else {
+ memset(&fmt, 0, sizeof(mm_camera_ch_image_fmt_parm_t));
+ fmt.ch_type = MM_CAMERA_CH_VIDEO;
+ fmt.video.video.fmt = CAMERA_YUV_420_NV12; //dim.enc_format;
+ fmt.video.video.dim.width = dim.video_width;
+ fmt.video.video.dim.height = dim.video_height;
+ LOGV("%s: video channel fmt = %d", __func__,
+ dim.enc_format);
+ LOGV("%s: video channel resolution = %d X %d", __func__,
+ dim.video_width, dim.video_height);
+
+ rc = cam_config_set_parm(cameraId, MM_CAMERA_PARM_CH_IMAGE_FMT, &fmt);
+
+ LOGV("%s: video MM_CAMERA_PARM_CH_IMAGE_FMT rc = %d\n", __func__, rc);
+ if(MM_CAMERA_OK != rc) {
+ LOGE("%s:set video channel format err=%d\n", __func__, rc);
+ LOGE("%s: X", __func__);
+ ret= BAD_VALUE;
+ }
+ }
+
+ } /*MM_CAMERA_CH_VIDEO*/
+#endif
+
+ int rc = MM_CAMERA_OK;
+ status_t ret = NO_ERROR;
+ mm_camera_op_mode_type_t op_mode=MM_CAMERA_OP_MODE_VIDEO;
+ int i;
+
+ LOGV("QCameraStream::initChannel : E");
+ if(MM_CAMERA_CH_PREVIEW_MASK & ch_type_mask){
+ rc = cam_ops_ch_acquire(cameraId, MM_CAMERA_CH_PREVIEW);
+ LOGV("%s:ch_acquire MM_CAMERA_CH_PREVIEW, rc=%d\n",__func__, rc);
+ if(MM_CAMERA_OK != rc) {
+ LOGE("%s: preview channel acquir error =%d\n", __func__, rc);
+ LOGE("%s: X", __func__);
+ return BAD_VALUE;
+ }
+ /*Callback register*/
+ /* register a notify into the mmmm_camera_t object*/
+ /* ret = cam_evt_register_buf_notify(mCameraId, MM_CAMERA_CH_PREVIEW,
+ preview_notify_cb,
+ this);
+ LOGV("Buf notify MM_CAMERA_CH_PREVIEW, rc=%d\n",rc);*/
+ }else if(MM_CAMERA_CH_VIDEO_MASK & ch_type_mask){
+ rc = cam_ops_ch_acquire(cameraId, MM_CAMERA_CH_VIDEO);
+ LOGV("%s:ch_acquire MM_CAMERA_CH_VIDEO, rc=%d\n",__func__, rc);
+ if(MM_CAMERA_OK != rc) {
+ LOGE("%s: preview channel acquir error =%d\n", __func__, rc);
+ LOGE("%s: X", __func__);
+ return BAD_VALUE;
+ }
+ /*Callback register*/
+ /* register a notify into the mmmm_camera_t object*/
+ /*ret = cam_evt_register_buf_notify(mCameraId, MM_CAMERA_CH_VIDEO,
+ record_notify_cb,
+ this);
+ LOGV("Buf notify MM_CAMERA_CH_VIDEO, rc=%d\n",rc);*/
+ }
+
+ ret = (MM_CAMERA_OK==rc)? NO_ERROR : BAD_VALUE;
+ LOGV("%s: X, ret = %d", __func__, ret);
+ return ret;
+}
+
+status_t QCameraStream::deinitChannel(int cameraId,
+ mm_camera_channel_type_t ch_type)
+{
+
+ int rc = MM_CAMERA_OK;
+
+ LOGV("%s: E, channel = %d\n", __func__, ch_type);
+
+ if (MM_CAMERA_CH_MAX <= ch_type) {
+ LOGE("%s: X: BAD_VALUE", __func__);
+ return BAD_VALUE;
+ }
+
+ cam_ops_ch_release(cameraId, ch_type);
+
+ LOGV("%s: X, channel = %d\n", __func__, ch_type);
+ return NO_ERROR;
+}
+
+status_t QCameraStream::setMode(int enable) {
+ LOGE("%s :myMode %x ", __func__, myMode);
+ if (enable) {
+ myMode = (camera_mode_t)(myMode | CAMERA_ZSL_MODE);
+ } else {
+ myMode = (camera_mode_t)(myMode & ~CAMERA_ZSL_MODE);
+ }
+ return NO_ERROR;
+}
+
+status_t QCameraStream::setFormat(uint8_t ch_type_mask)
+{
+ int rc = MM_CAMERA_OK;
+ status_t ret = NO_ERROR;
+ int width = 0; /* width of channel */
+ int height = 0; /* height of channel */
+ cam_ctrl_dimension_t dim;
+ mm_camera_ch_image_fmt_parm_t fmt;
+ int preview_format;
+ LOGE("%s: E",__func__);
+
+ memset(&dim, 0, sizeof(cam_ctrl_dimension_t));
+ rc = cam_config_get_parm(mCameraId, MM_CAMERA_PARM_DIMENSION, &dim);
+ if (MM_CAMERA_OK != rc) {
+ LOGE("%s: error - can't get camera dimension!", __func__);
+ LOGE("%s: X", __func__);
+ return BAD_VALUE;
+ }
+ char mDeviceName[PROPERTY_VALUE_MAX];
+ property_get("ro.product.device",mDeviceName," ");
+ memset(&fmt, 0, sizeof(mm_camera_ch_image_fmt_parm_t));
+ if(MM_CAMERA_CH_PREVIEW_MASK & ch_type_mask){
+ fmt.ch_type = MM_CAMERA_CH_PREVIEW;
+ ret = cam_config_get_parm(mCameraId,
+ MM_CAMERA_PARM_PREVIEW_FORMAT, &preview_format);
+ fmt.def.fmt = (cam_format_t)preview_format;
+ fmt.def.dim.width = dim.display_width;
+ fmt.def.dim.height = dim.display_height;
+ }else if(MM_CAMERA_CH_VIDEO_MASK & ch_type_mask){
+ fmt.ch_type = MM_CAMERA_CH_VIDEO;
+ fmt.video.video.fmt = CAMERA_YUV_420_NV21; //dim.enc_format;
+ fmt.video.video.dim.width = dim.video_width;
+ fmt.video.video.dim.height = dim.video_height;
+ }/*else if(MM_CAMERA_CH_SNAPSHOT_MASK & ch_type_mask){
+ if(mHalCamCtrl->isRawSnapshot()) {
+ fmt.ch_type = MM_CAMERA_CH_RAW;
+ fmt.def.fmt = CAMERA_BAYER_SBGGR10;
+ fmt.def.dim.width = dim.raw_picture_width;
+ fmt.def.dim.height = dim.raw_picture_height;
+ }else{
+ //Jpeg???
+ fmt.ch_type = MM_CAMERA_CH_SNAPSHOT;
+ fmt.snapshot.main.fmt = dim.main_img_format;
+ fmt.snapshot.main.dim.width = dim.picture_width;
+ fmt.snapshot.main.dim.height = dim.picture_height;
+
+ fmt.snapshot.thumbnail.fmt = dim.thumb_format;
+ fmt.snapshot.thumbnail.dim.width = dim.ui_thumbnail_width;
+ fmt.snapshot.thumbnail.dim.height = dim.ui_thumbnail_height;
+ }
+ }*/
+
+ rc = cam_config_set_parm(mCameraId, MM_CAMERA_PARM_CH_IMAGE_FMT, &fmt);
+ LOGV("%s: Stream MM_CAMERA_PARM_CH_IMAGE_FMT rc = %d\n", __func__, rc);
+ if(MM_CAMERA_OK != rc) {
+ LOGE("%s:set stream channel format err=%d\n", __func__, ret);
+ LOGE("%s: X", __func__);
+ ret = BAD_VALUE;
+ }
+ LOGE("%s: X",__func__);
+ return ret;
+}
+
+QCameraStream::QCameraStream (){
+ mInit = false;
+ mActive = false;
+ /* memset*/
+ memset(&mCrop, 0, sizeof(mm_camera_ch_crop_t));
+}
+
+QCameraStream::QCameraStream (int cameraId, camera_mode_t mode)
+ :mCameraId(cameraId),
+ myMode(mode)
+{
+ mInit = false;
+ mActive = false;
+
+ /* memset*/
+ memset(&mCrop, 0, sizeof(mm_camera_ch_crop_t));
+}
+
+QCameraStream::~QCameraStream () {;}
+
+
+status_t QCameraStream::init() {
+ return NO_ERROR;
+}
+
+status_t QCameraStream::start() {
+ return NO_ERROR;
+}
+
+void QCameraStream::stop() {
+ return;
+}
+
+void QCameraStream::release() {
+ return;
+}
+
+void QCameraStream::setHALCameraControl(QCameraHardwareInterface* ctrl) {
+
+ /* provide a frame data user,
+ for the queue monitor thread to call the busy queue is not empty*/
+ mHalCamCtrl = ctrl;
+}
+
+}; // namespace android
diff --git a/camera/QCamera/HAL/test/Android.mk b/camera/QCamera/HAL/test/Android.mk
new file mode 100644
index 0000000..8b13789
--- /dev/null
+++ b/camera/QCamera/HAL/test/Android.mk
@@ -0,0 +1 @@
+
diff --git a/camera/QCamera/HAL/wrapper/QualcommCamera.cpp b/camera/QCamera/HAL/wrapper/QualcommCamera.cpp
new file mode 100644
index 0000000..cd7ac0b
--- /dev/null
+++ b/camera/QCamera/HAL/wrapper/QualcommCamera.cpp
@@ -0,0 +1,506 @@
+/* Copyright (c) 2011, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ * * Neither the name of The Linux Foundation nor the names of its
+ * contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+/*#error uncomment this for compiler test!*/
+
+//#define LOG_NDEBUG 0
+#define LOG_NIDEBUG 0
+#define LOG_TAG "QualcommCamera"
+#include <utils/Log.h>
+#include <utils/threads.h>
+#include <fcntl.h>
+#include <sys/mman.h>
+
+#include "QCameraHAL.h"
+/* include QCamera Hardware Interface Header*/
+#include "QualcommCamera.h"
+//#include "QualcommCameraHardware.h"
+//#include <camera/CameraHardwareInterface.h>
+
+extern "C" {
+#include <sys/time.h>
+}
+
+/* HAL function implementation goes here*/
+
+/**
+ * The functions need to be provided by the camera HAL.
+ *
+ * If getNumberOfCameras() returns N, the valid cameraId for getCameraInfo()
+ * and openCameraHardware() is 0 to N-1.
+ */
+
+static hw_module_methods_t camera_module_methods = {
+ open: camera_device_open,
+};
+
+static hw_module_t camera_common = {
+ tag: HARDWARE_MODULE_TAG,
+ version_major: 0,
+ version_minor: 01,
+ id: CAMERA_HARDWARE_MODULE_ID,
+ name: "Qcamera",
+ author:"Qcom",
+ methods: &camera_module_methods,
+ dso: NULL,
+ //reserved[0]: 0,
+};
+camera_module_t HAL_MODULE_INFO_SYM = {
+ common: camera_common,
+ get_number_of_cameras: get_number_of_cameras,
+ get_camera_info: get_camera_info,
+};
+
+camera_device_ops_t camera_ops = {
+ set_preview_window: android::set_preview_window,
+ set_callbacks: android::set_CallBacks,
+ enable_msg_type: android::enable_msg_type,
+ disable_msg_type: android::disable_msg_type,
+ msg_type_enabled: android::msg_type_enabled,
+
+ start_preview: android::start_preview,
+ stop_preview: android::stop_preview,
+ preview_enabled: android::preview_enabled,
+ store_meta_data_in_buffers: android::store_meta_data_in_buffers,
+
+ start_recording: android::start_recording,
+ stop_recording: android::stop_recording,
+ recording_enabled: android::recording_enabled,
+ release_recording_frame: android::release_recording_frame,
+
+ auto_focus: android::auto_focus,
+ cancel_auto_focus: android::cancel_auto_focus,
+
+ take_picture: android::take_picture,
+ cancel_picture: android::cancel_picture,
+
+ set_parameters: android::set_parameters,
+ get_parameters: android::get_parameters,
+ put_parameters: android::put_parameters,
+ send_command: android::send_command,
+
+ release: android::release,
+ dump: android::dump,
+};
+
+namespace android {
+
+typedef struct {
+ camera_device hw_dev;
+ //sp<CameraHardwareInterface> hardware;
+ QCameraHardwareInterface *hardware;
+ int camera_released;
+ int cameraId;
+ //CameraParameters parameters;
+} camera_hardware_t;
+
+typedef struct {
+ camera_memory_t mem;
+ int32_t msgType;
+ sp<IMemory> dataPtr;
+ void* user;
+ unsigned int index;
+} q_cam_memory_t;
+
+QCameraHardwareInterface *util_get_Hal_obj( struct camera_device * device)
+{
+ QCameraHardwareInterface *hardware = NULL;
+ if(device && device->priv){
+ camera_hardware_t *camHal = (camera_hardware_t *)device->priv;
+ hardware = camHal->hardware;
+ }
+ return hardware;
+}
+
+#if 0 //mzhu
+CameraParameters* util_get_HAL_parameter( struct camera_device * device)
+{
+ CameraParameters *param = NULL;
+ if(device && device->priv){
+ camera_hardware_t *camHal = (camera_hardware_t *)device->priv;
+ param = &(camHal->parameters);
+ }
+ return param;
+}
+#endif //mzhu
+
+extern "C" int get_number_of_cameras()
+{
+ /* try to query every time we get the call!*/
+
+ LOGE("Q%s: E", __func__);
+ return android::HAL_getNumberOfCameras( );
+}
+
+extern "C" int get_camera_info(int camera_id, struct camera_info *info)
+{
+ int rc = -1;
+ LOGE("Q%s: E", __func__);
+ if(info) {
+ struct CameraInfo camInfo;
+ memset(&camInfo, -1, sizeof (struct CameraInfo));
+ android::HAL_getCameraInfo(camera_id, &camInfo);
+ if (camInfo.facing >= 0) {
+ rc = 0;
+ info->facing = camInfo.facing;
+ info->orientation = camInfo.orientation;
+ }
+ }
+ LOGV("Q%s: X", __func__);
+ return rc;
+}
+
+
+/* HAL should return NULL if it fails to open camera hardware. */
+extern "C" int camera_device_open(
+ const struct hw_module_t* module, const char* id,
+ struct hw_device_t** hw_device)
+{
+ int rc = -1;
+ int mode = 0; // TODO: need to add 3d/2d mode, etc
+ camera_device *device = NULL;
+ if(module && id && hw_device) {
+ int cameraId = atoi(id);
+
+ if (!strcmp(module->name, camera_common.name)) {
+ camera_hardware_t *camHal =
+ (camera_hardware_t *) malloc(sizeof (camera_hardware_t));
+ if(!camHal) {
+ *hw_device = NULL;
+ LOGE("%s: end in no mem", __func__);
+ return rc;
+ }
+ /* we have the camera_hardware obj malloced */
+ memset(camHal, 0, sizeof (camera_hardware_t));
+ camHal->hardware = new QCameraHardwareInterface(cameraId, mode); //HAL_openCameraHardware(cameraId);
+ if (camHal->hardware && camHal->hardware->isCameraReady()) {
+ camHal->cameraId = cameraId;
+ device = &camHal->hw_dev;
+ device->common.close = close_camera_device;
+ device->ops = &camera_ops;
+ device->priv = (void *)camHal;
+ rc = 0;
+ } else {
+ if (camHal->hardware) {
+ delete camHal->hardware;
+ camHal->hardware = NULL;
+ }
+ free(camHal);
+ device = NULL;
+ }
+ }
+ }
+ /* pass actual hw_device ptr to framework. This amkes that we actally be use memberof() macro */
+ *hw_device = (hw_device_t*)&device->common;
+ LOGE("%s: end rc %d", __func__, rc);
+ return rc;
+}
+
+extern "C" int close_camera_device( hw_device_t *hw_dev)
+{
+ LOGE("Q%s: device =%p E", __func__, hw_dev);
+ int rc = -1;
+ camera_device_t *device = (camera_device_t *)hw_dev;
+
+ if(device) {
+ camera_hardware_t *camHal = (camera_hardware_t *)device->priv;
+ if(camHal ) {
+ QCameraHardwareInterface *hardware = util_get_Hal_obj( device);
+ if(!camHal->camera_released) {
+ if(hardware != NULL) {
+ hardware->release( );
+ }
+ }
+ if(hardware != NULL)
+ delete hardware;
+ free(camHal);
+ }
+ rc = 0;
+ }
+ return rc;
+}
+
+
+int set_preview_window(struct camera_device * device,
+ struct preview_stream_ops *window)
+{
+ int rc = -1;
+ QCameraHardwareInterface *hardware = util_get_Hal_obj(device);
+
+ if(hardware != NULL) {
+ rc = hardware->setPreviewWindow(window);
+ }
+ return rc;
+}
+
+void set_CallBacks(struct camera_device * device,
+ camera_notify_callback notify_cb,
+ camera_data_callback data_cb,
+ camera_data_timestamp_callback data_cb_timestamp,
+ camera_request_memory get_memory,
+ void *user)
+{
+ LOGE("Q%s: E", __func__);
+ QCameraHardwareInterface *hardware = util_get_Hal_obj(device);
+ if(hardware != NULL){
+ hardware->setCallbacks(notify_cb,data_cb, data_cb_timestamp, get_memory, user);
+ }
+}
+
+void enable_msg_type(struct camera_device * device, int32_t msg_type)
+{
+ QCameraHardwareInterface *hardware = util_get_Hal_obj(device);
+ if(hardware != NULL){
+ hardware->enableMsgType(msg_type);
+ }
+}
+
+void disable_msg_type(struct camera_device * device, int32_t msg_type)
+{
+ QCameraHardwareInterface *hardware = util_get_Hal_obj(device);
+ LOGE("Q%s: E", __func__);
+ if(hardware != NULL){
+ hardware->disableMsgType(msg_type);
+ }
+}
+
+int msg_type_enabled(struct camera_device * device, int32_t msg_type)
+{
+ LOGE("Q%s: E", __func__);
+ int rc = -1;
+ QCameraHardwareInterface *hardware = util_get_Hal_obj(device);
+ if(hardware != NULL){
+ rc = hardware->msgTypeEnabled(msg_type);
+ }
+ return rc;
+}
+
+int start_preview(struct camera_device * device)
+{
+ LOGE("Q%s: E", __func__);
+ int rc = -1;
+ QCameraHardwareInterface *hardware = util_get_Hal_obj(device);
+ if(hardware != NULL){
+ rc = hardware->startPreview( );
+ }
+ LOGE("Q%s: X", __func__);
+ return rc;
+}
+
+void stop_preview(struct camera_device * device)
+{
+ LOGE("Q%s: E", __func__);
+ QCameraHardwareInterface *hardware = util_get_Hal_obj(device);
+ if(hardware != NULL){
+ hardware->stopPreview( );
+ }
+}
+
+int preview_enabled(struct camera_device * device)
+{
+ LOGE("Q%s: E", __func__);
+ int rc = -1;
+ QCameraHardwareInterface *hardware = util_get_Hal_obj(device);
+ if(hardware != NULL){
+ rc = hardware->previewEnabled( );
+ }
+ return rc;
+}
+
+int store_meta_data_in_buffers(struct camera_device * device, int enable)
+{
+ LOGE("Q%s: E", __func__);
+ int rc = -1;
+ QCameraHardwareInterface *hardware = util_get_Hal_obj(device);
+ if(hardware != NULL){
+ rc = hardware->storeMetaDataInBuffers(enable);
+ }
+ return rc;
+}
+
+int start_recording(struct camera_device * device)
+{
+ LOGE("Q%s: E", __func__);
+ int rc = -1;
+ QCameraHardwareInterface *hardware = util_get_Hal_obj(device);
+ if(hardware != NULL){
+ rc = hardware->startRecording( );
+ }
+ return rc;
+}
+
+void stop_recording(struct camera_device * device)
+{
+ LOGE("Q%s: E", __func__);
+ QCameraHardwareInterface *hardware = util_get_Hal_obj(device);
+ if(hardware != NULL){
+ hardware->stopRecording( );
+ }
+}
+
+int recording_enabled(struct camera_device * device)
+{
+ LOGE("Q%s: E", __func__);
+ int rc = -1;
+ QCameraHardwareInterface *hardware = util_get_Hal_obj(device);
+ if(hardware != NULL){
+ rc = hardware->recordingEnabled( );
+ }
+ return rc;
+}
+
+void release_recording_frame(struct camera_device * device,
+ const void *opaque)
+{
+ LOGV("Q%s: E", __func__);
+ QCameraHardwareInterface *hardware = util_get_Hal_obj(device);
+ if(hardware != NULL){
+ hardware->releaseRecordingFrame(opaque);
+ }
+}
+
+int auto_focus(struct camera_device * device)
+{
+ LOGE("Q%s: E", __func__);
+ int rc = -1;
+ QCameraHardwareInterface *hardware = util_get_Hal_obj(device);
+ if(hardware != NULL){
+ rc = hardware->autoFocus( );
+ }
+ return rc;
+}
+
+int cancel_auto_focus(struct camera_device * device)
+{
+ LOGE("Q%s: E", __func__);
+ int rc = -1;
+ QCameraHardwareInterface *hardware = util_get_Hal_obj(device);
+ if(hardware != NULL){
+ rc = hardware->cancelAutoFocus( );
+ }
+ return rc;
+}
+
+int take_picture(struct camera_device * device)
+{
+ LOGE("Q%s: E", __func__);
+ int rc = -1;
+ QCameraHardwareInterface *hardware = util_get_Hal_obj(device);
+ if(hardware != NULL){
+ rc = hardware->takePicture( );
+ }
+ return rc;
+}
+
+int cancel_picture(struct camera_device * device)
+
+{
+ LOGE("Q%s: E", __func__);
+ int rc = -1;
+ QCameraHardwareInterface *hardware = util_get_Hal_obj(device);
+ if(hardware != NULL){
+ rc = hardware->cancelPicture( );
+ }
+ return rc;
+}
+
+int set_parameters(struct camera_device * device, const char *parms)
+
+{
+ LOGE("Q%s: E", __func__);
+ int rc = -1;
+ QCameraHardwareInterface *hardware = util_get_Hal_obj(device);
+ if(hardware != NULL && parms){
+ //CameraParameters param;// = util_get_HAL_parameter(device);
+ //String8 str = String8(parms);
+
+ //param.unflatten(str);
+ rc = hardware->setParameters(parms);
+ //rc = 0;
+ }
+ return rc;
+}
+
+char* get_parameters(struct camera_device * device)
+{
+ LOGE("Q%s: E", __func__);
+ QCameraHardwareInterface *hardware = util_get_Hal_obj(device);
+ if(hardware != NULL){
+ char *parms = NULL;
+ hardware->getParameters(&parms);
+ return parms;
+ }
+ return NULL;
+}
+
+void put_parameters(struct camera_device * device, char *parm)
+
+{
+ LOGE("Q%s: E", __func__);
+ QCameraHardwareInterface *hardware = util_get_Hal_obj(device);
+ if(hardware != NULL){
+ hardware->putParameters(parm);
+ }
+}
+
+int send_command(struct camera_device * device,
+ int32_t cmd, int32_t arg1, int32_t arg2)
+{
+ LOGE("Q%s: E", __func__);
+ int rc = -1;
+ QCameraHardwareInterface *hardware = util_get_Hal_obj(device);
+ if(hardware != NULL){
+ rc = hardware->sendCommand( cmd, arg1, arg2);
+ }
+ return rc;
+}
+
+void release(struct camera_device * device)
+{
+ LOGE("Q%s: E", __func__);
+ QCameraHardwareInterface *hardware = util_get_Hal_obj(device);
+ if(hardware != NULL){
+ camera_hardware_t *camHal = (camera_hardware_t *)device->priv;
+ hardware->release( );
+ camHal->camera_released = true;
+ }
+}
+
+int dump(struct camera_device * device, int fd)
+{
+ LOGE("Q%s: E", __func__);
+ int rc = -1;
+ QCameraHardwareInterface *hardware = util_get_Hal_obj(device);
+ if(hardware != NULL){
+ rc = hardware->dump( fd );
+ //rc = 0;
+ }
+ return rc;
+}
+
+}; // namespace android
diff --git a/camera/QCamera/HAL/wrapper/QualcommCamera.h b/camera/QCamera/HAL/wrapper/QualcommCamera.h
new file mode 100644
index 0000000..c7d4983
--- /dev/null
+++ b/camera/QCamera/HAL/wrapper/QualcommCamera.h
@@ -0,0 +1,106 @@
+/* Copyright (c) 2011, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ * * Neither the name of The Linux Foundation nor the names of its
+ * contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef ANDROID_HARDWARE_QUALCOMM_CAMERA_H
+#define ANDROID_HARDWARE_QUALCOMM_CAMERA_H
+
+
+#include "QCameraHWI.h"
+
+extern "C" {
+ int get_number_of_cameras();
+ int get_camera_info(int camera_id, struct camera_info *info);
+
+ int camera_device_open(const struct hw_module_t* module, const char* id,
+ struct hw_device_t** device);
+
+ hw_device_t * open_camera_device(int cameraId);
+
+ int close_camera_device( hw_device_t *);
+
+namespace android {
+ int set_preview_window(struct camera_device *,
+ struct preview_stream_ops *window);
+ void set_CallBacks(struct camera_device *,
+ camera_notify_callback notify_cb,
+ camera_data_callback data_cb,
+ camera_data_timestamp_callback data_cb_timestamp,
+ camera_request_memory get_memory,
+ void *user);
+
+ void enable_msg_type(struct camera_device *, int32_t msg_type);
+
+ void disable_msg_type(struct camera_device *, int32_t msg_type);
+ int msg_type_enabled(struct camera_device *, int32_t msg_type);
+
+ int start_preview(struct camera_device *);
+
+ void stop_preview(struct camera_device *);
+
+ int preview_enabled(struct camera_device *);
+ int store_meta_data_in_buffers(struct camera_device *, int enable);
+
+ int start_recording(struct camera_device *);
+
+ void stop_recording(struct camera_device *);
+
+ int recording_enabled(struct camera_device *);
+
+ void release_recording_frame(struct camera_device *,
+ const void *opaque);
+
+ int auto_focus(struct camera_device *);
+
+ int cancel_auto_focus(struct camera_device *);
+
+ int take_picture(struct camera_device *);
+
+ int cancel_picture(struct camera_device *);
+
+ int set_parameters(struct camera_device *, const char *parms);
+
+ char* get_parameters(struct camera_device *);
+
+ void put_parameters(struct camera_device *, char *);
+
+ int send_command(struct camera_device *,
+ int32_t cmd, int32_t arg1, int32_t arg2);
+
+ void release(struct camera_device *);
+
+ int dump(struct camera_device *, int fd);
+
+
+
+}; // namespace android
+
+} //extern "C"
+
+#endif
+
diff --git a/camera/QCamera/stack/Android.mk b/camera/QCamera/stack/Android.mk
new file mode 100644
index 0000000..c7307a7
--- /dev/null
+++ b/camera/QCamera/stack/Android.mk
@@ -0,0 +1,4 @@
+LOCAL_PATH:= $(call my-dir)
+include $(LOCAL_PATH)/mm-camera-interface/Android.mk
+include $(LOCAL_PATH)/mm-camera-test/Android.mk
+#include $(LOCAL_PATH)/mm-jpeg-interface/Android.mk
diff --git a/camera/QCamera/stack/common/cam_list.h b/camera/QCamera/stack/common/cam_list.h
new file mode 100644
index 0000000..ea30d7f
--- /dev/null
+++ b/camera/QCamera/stack/common/cam_list.h
@@ -0,0 +1,74 @@
+/*
+Copyright (c) 2011-2012, The Linux Foundation. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following
+ disclaimer in the documentation and/or other materials provided
+ with the distribution.
+ * Neither the name of The Linux Foundation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+
+/* This file is a slave copy from /vendor/qcom/propreitary/mm-cammera/common,
+ * Please do not modify it directly here. */
+
+#ifndef __CAMLIST_H
+#define __CAMLIST_H
+
+#include <stddef.h>
+
+#define member_of(ptr, type, member) ({ \
+ const typeof(((type *)0)->member) *__mptr = (ptr); \
+ (type *)((char *)__mptr - offsetof(type,member));})
+
+struct cam_list {
+ struct cam_list *next, *prev;
+};
+
+static inline void cam_list_init(struct cam_list *ptr)
+{
+ ptr->next = ptr;
+ ptr->prev = ptr;
+}
+
+static inline void cam_list_add_tail_node(struct cam_list *item,
+ struct cam_list *head)
+{
+ struct cam_list *prev = head->prev;
+
+ head->prev = item;
+ item->next = head;
+ item->prev = prev;
+ prev->next = item;
+}
+
+static inline void cam_list_del_node(struct cam_list *ptr)
+{
+ struct cam_list *prev = ptr->prev;
+ struct cam_list *next = ptr->next;
+
+ next->prev = ptr->prev;
+ prev->next = ptr->next;
+ ptr->next = ptr;
+ ptr->prev = ptr;
+}
+
+#endif /* __CAMLIST_H */
diff --git a/camera/QCamera/stack/mm-camera-interface/Android.mk b/camera/QCamera/stack/mm-camera-interface/Android.mk
new file mode 100644
index 0000000..dd1b110
--- /dev/null
+++ b/camera/QCamera/stack/mm-camera-interface/Android.mk
@@ -0,0 +1,47 @@
+#ifeq ($(call is-board-platform,msm8960),true)
+OLD_LOCAL_PATH := $(LOCAL_PATH)
+LOCAL_PATH := $(call my-dir)
+USE_BIONIC_HEADER:=true
+
+include $(CLEAR_VARS)
+
+MM_CAM_FILES := \
+ src/mm_camera_interface.c \
+ src/mm_camera_stream.c \
+ src/mm_camera_channel.c \
+ src/mm_camera.c \
+ src/mm_camera_thread.c \
+ src/mm_camera_data.c \
+ src/mm_camera_sock.c \
+ src/mm_camera_helper.c
+
+LOCAL_CFLAGS += -D_ANDROID_
+LOCAL_COPY_HEADERS_TO := mm-camera-interface_badger
+LOCAL_COPY_HEADERS := inc/mm_camera_interface.h
+
+LOCAL_C_INCLUDES := \
+ $(LOCAL_PATH)/inc \
+ $(LOCAL_PATH)/../common \
+ $(LOCAL_PATH)/../../../
+
+ifneq ($(strip $(USE_BIONIC_HEADER)),true)
+LOCAL_C_INCLUDES += $(TARGET_OUT_INTERMEDIATES)/KERNEL_OBJ/usr/include
+LOCAL_C_INCLUDES += $(TARGET_OUT_INTERMEDIATES)/KERNEL_OBJ/usr/include/media
+LOCAL_ADDITIONAL_DEPENDENCIES := $(TARGET_OUT_INTERMEDIATES)/KERNEL_OBJ/usr
+endif
+
+LOCAL_C_INCLUDES+= hardware/qcom/media/mm-core/inc
+LOCAL_CFLAGS += -include bionic/libc/include/sys/socket.h
+LOCAL_CFLAGS += -include bionic/libc/include/sys/un.h
+
+LOCAL_SRC_FILES := $(MM_CAM_FILES)
+
+LOCAL_MODULE := libmmcamera_interface
+LOCAL_PRELINK_MODULE := false
+LOCAL_SHARED_LIBRARIES := libdl libcutils liblog
+LOCAL_MODULE_TAGS := optional
+
+include $(BUILD_SHARED_LIBRARY)
+
+LOCAL_PATH := $(OLD_LOCAL_PATH)
+#endif
diff --git a/camera/QCamera/stack/mm-camera-interface/inc/mm_camera.h b/camera/QCamera/stack/mm-camera-interface/inc/mm_camera.h
new file mode 100644
index 0000000..c8c8311
--- /dev/null
+++ b/camera/QCamera/stack/mm-camera-interface/inc/mm_camera.h
@@ -0,0 +1,621 @@
+/*
+Copyright (c) 2011-2012, The Linux Foundation. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following
+ disclaimer in the documentation and/or other materials provided
+ with the distribution.
+ * Neither the name of The Linux Foundation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+
+#ifndef __MM_CAMERA_H__
+#define __MM_CAMERA_H__
+
+#include <sys/poll.h>
+#include "mm_camera_interface.h"
+#include "cam_list.h"
+
+/**********************************************************************************
+* Data structure declare
+***********************************************************************************/
+/* num of streams allowed in a channel obj */
+#define MM_CAMEAR_STRAEM_NUM_MAX 8
+/* num of channels allowed in a camera obj */
+#define MM_CAMERA_CHANNEL_MAX 1
+/* num of callbacks allowed for an event type */
+#define MM_CAMERA_EVT_ENTRY_MAX 4
+/* num of data callbacks allowed in a stream obj */
+#define MM_CAMERA_STREAM_BUF_CB_MAX 4
+/* num of data poll threads allowed in a channel obj */
+#define MM_CAMERA_CHANNEL_POLL_THREAD_MAX 1
+
+#define MM_CAMERA_DEV_NAME_LEN 32
+#define MM_CAMERA_DEV_OPEN_TRIES 2
+#define MM_CAMERA_DEV_OPEN_RETRY_SLEEP 20
+
+struct mm_channel;
+struct mm_stream;
+struct mm_camera_obj;
+
+/* common use */
+typedef struct {
+ struct cam_list list;
+ void* data;
+} mm_camera_q_node_t;
+
+typedef struct {
+ mm_camera_q_node_t head; /* dummy head */
+ uint32_t size;
+ pthread_mutex_t lock;
+} mm_camera_queue_t;
+
+typedef enum
+{
+ MM_CAMERA_ASYNC_CMD_TYPE_STOP, /* async stop */
+ MM_CAMERA_ASYNC_CMD_TYPE_MAX
+} mm_camera_async_cmd_type_t;
+
+typedef struct {
+ struct mm_channel* ch_obj;
+ uint8_t num_streams;
+ uint32_t stream_ids[MM_CAMEAR_STRAEM_NUM_MAX];
+} mm_camera_async_stop_cmd_t;
+
+typedef struct {
+ mm_camera_async_cmd_type_t cmd_type;
+ union {
+ mm_camera_async_stop_cmd_t stop_cmd;
+ } u;
+} mm_camera_async_cmd_t;
+
+typedef enum
+{
+ MM_CAMERA_CMD_TYPE_DATA_CB, /* dataB CMD */
+ MM_CAMERA_CMD_TYPE_EVT_CB, /* evtCB CMD */
+ MM_CAMERA_CMD_TYPE_ASYNC_CB, /* asyncCB CMD */
+ MM_CAMERA_CMD_TYPE_EXIT, /* EXIT */
+ MM_CAMERA_CMD_TYPE_REQ_DATA_CB,/* request data */
+ MM_CAMERA_CMD_TYPE_MAX
+} mm_camera_cmdcb_type_t;
+
+typedef struct {
+ uint32_t stream_id;
+ uint32_t frame_idx;
+ uint8_t need_pp; /* flag if pp needed on this buf */
+ mm_camera_buf_def_t *buf; /* ref to buf */
+} mm_camera_buf_info_t;
+
+typedef struct {
+ mm_camera_cmdcb_type_t cmd_type;
+ union {
+ mm_camera_buf_info_t buf; /* frame buf if dataCB */
+ mm_camera_event_t evt; /* evt if evtCB */
+ mm_camera_async_cmd_t async; /* async cmd */
+ } u;
+} mm_camera_cmdcb_t;
+
+typedef void (*mm_camera_cmd_cb_t)(mm_camera_cmdcb_t * cmd_cb, void* user_data);
+
+typedef struct {
+ mm_camera_queue_t cmd_queue; /* cmd queue (queuing dataCB, asyncCB, or exitCMD) */
+ pthread_t cmd_pid; /* cmd thread ID */
+ sem_t cmd_sem; /* semaphore for cmd thread */
+ mm_camera_cmd_cb_t cb; /* cb for cmd */
+ void* user_data; /* user_data for cb */
+} mm_camera_cmd_thread_t;
+
+typedef enum {
+ MM_CAMERA_POLL_TYPE_EVT,
+ MM_CAMERA_POLL_TYPE_CH,
+ MM_CAMERA_POLL_TYPE_MAX
+} mm_camera_poll_thread_type_t;
+
+/* function ptr defined for poll notify CB,
+ * registered at poll thread with poll fd */
+typedef void (*mm_camera_poll_notify_t)(void *user_data);
+
+typedef struct {
+ int32_t fd;
+ mm_camera_poll_notify_t notify_cb;
+ uint32_t handler;
+ void* user_data;
+} mm_camera_poll_entry_t;
+
+typedef struct {
+ mm_camera_poll_thread_type_t poll_type;
+ /* array to store poll fd and cb info
+ * for MM_CAMERA_POLL_TYPE_EVT, only index 0 is valid;
+ * for MM_CAMERA_POLL_TYPE_CH, depends on valid stream fd */
+ mm_camera_poll_entry_t poll_entries[MM_CAMEAR_STRAEM_NUM_MAX];
+ int32_t pfds[2];
+ pthread_t pid;
+ int32_t state;
+ int timeoutms;
+ uint32_t cmd;
+ struct pollfd poll_fds[MM_CAMEAR_STRAEM_NUM_MAX+1];
+ uint8_t num_fds;
+ pthread_mutex_t mutex;
+ pthread_cond_t cond_v;
+ int32_t status;
+ //void *my_obj;
+} mm_camera_poll_thread_t;
+
+/* mm_stream */
+typedef enum {
+ MM_STREAM_STATE_NOTUSED = 0, /* not used */
+ MM_STREAM_STATE_INITED, /* inited */
+ MM_STREAM_STATE_ACQUIRED, /* acquired, fd opened */
+ MM_STREAM_STATE_CFG, /* fmt & dim configured */
+ MM_STREAM_STATE_BUFFED, /* buf allocated */
+ MM_STREAM_STATE_REG, /* buf regged, stream off */
+ MM_STREAM_STATE_ACTIVE_STREAM_ON, /* active with stream on */
+ MM_STREAM_STATE_ACTIVE_STREAM_OFF, /* active with stream off */
+ MM_STREAM_STATE_MAX
+} mm_stream_state_type_t;
+
+typedef enum {
+ MM_STREAM_EVT_ACQUIRE,
+ MM_STREAM_EVT_RELEASE,
+ MM_STREAM_EVT_SET_FMT,
+ MM_STREAM_EVT_GET_BUF,
+ MM_STREAM_EVT_PUT_BUF,
+ MM_STREAM_EVT_REG_BUF,
+ MM_STREAM_EVT_UNREG_BUF,
+ MM_STREAM_EVT_START,
+ MM_STREAM_EVT_STOP,
+ MM_STREAM_EVT_QBUF,
+ MM_STREAM_EVT_SET_PARM,
+ MM_STREAM_EVT_GET_PARM,
+ MM_STREAM_EVT_MAX
+} mm_stream_evt_type_t;
+
+typedef struct {
+ mm_camera_buf_notify_t cb;
+ void *user_data;
+ /* cb_count = -1: infinite
+ * cb_count > 0: register only for required times */
+ int8_t cb_count;
+} mm_stream_data_cb_t;
+
+typedef struct {
+ /* buf reference count */
+ uint8_t buf_refcnt;
+
+ /* This flag is to indicate if after allocation,
+ * the corresponding buf needs to qbuf into kernel
+ * (e.g. for preview usecase, display needs to hold two bufs,
+ * so no need to qbuf these two bufs initially) */
+ uint8_t initial_reg_flag;
+
+ /* indicate if buf is in kernel(1) or client(0) */
+ uint8_t in_kernel;
+} mm_stream_buf_status_t;
+
+typedef struct mm_stream {
+ uint32_t my_hdl;
+ uint32_t inst_hdl;
+ int32_t fd;
+ mm_stream_state_type_t state;
+
+ /* ext_image_mode used as id for stream obj */
+ uint32_t ext_image_mode;
+
+ /* sensor index used */
+ uint32_t sensor_idx;
+
+ mm_camera_image_fmt_t fmt;
+
+ mm_camera_cmd_thread_t cmd_thread;
+
+ /* dataCB registered on this stream obj */
+ pthread_mutex_t cb_lock; /* cb lock to protect buf_cb */
+ mm_stream_data_cb_t buf_cb[MM_CAMERA_STREAM_BUF_CB_MAX];
+
+ /* stream buffer management */
+ pthread_mutex_t buf_lock;
+ uint8_t buf_num; /* num of buffers allocated */
+ mm_camera_buf_def_t* buf; /* ptr to buf array */
+ mm_stream_buf_status_t* buf_status; /* ptr to buf status array */
+
+ /* reference to parent channel_obj */
+ struct mm_channel* ch_obj;
+
+ uint8_t is_bundled; /* flag if stream is bundled */
+ uint8_t is_pp_needed; /* flag if need to do post processing, set when streamon */
+ uint8_t is_local_buf; /* flag if buf is local copy, no need to qbuf to kernel */
+ uint8_t hal_requested_num_bufs;
+ uint8_t need_stream_on; /* flag if stream need streamon when start */
+
+ mm_camera_frame_len_offset frame_offset; /*Stream buffer offset information*/
+} mm_stream_t;
+
+/* mm_channel */
+typedef enum {
+ MM_CHANNEL_STATE_NOTUSED = 0, /* not used */
+ MM_CHANNEL_STATE_STOPPED, /* stopped */
+ MM_CHANNEL_STATE_ACTIVE, /* active, at least one stream active */
+ MM_CHANNEL_STATE_PAUSED, /* paused */
+ MM_CHANNEL_STATE_MAX
+} mm_channel_state_type_t;
+
+typedef enum {
+ MM_CHANNEL_EVT_ADD_STREAM,
+ MM_CHANNEL_EVT_DEL_STREAM,
+ MM_CHANNEL_EVT_START_STREAM,
+ MM_CHANNEL_EVT_STOP_STREAM,
+ MM_CHANNEL_EVT_TEARDOWN_STREAM,
+ MM_CHANNEL_EVT_CONFIG_STREAM,
+ MM_CHANNEL_EVT_PAUSE,
+ MM_CHANNEL_EVT_RESUME,
+ MM_CHANNEL_EVT_INIT_BUNDLE,
+ MM_CHANNEL_EVT_DESTROY_BUNDLE,
+ MM_CHANNEL_EVT_REQUEST_SUPER_BUF,
+ MM_CHANNEL_EVT_CANCEL_REQUEST_SUPER_BUF,
+ MM_CHANNEL_EVT_START_FOCUS,
+ MM_CHANNEL_EVT_ABORT_FOCUS,
+ MM_CHANNEL_EVT_PREPARE_SNAPSHOT,
+ MM_CHANNEL_EVT_SET_STREAM_PARM,
+ MM_CHANNEL_EVT_GET_STREAM_PARM,
+ MM_CHANNEL_EVT_DELETE,
+ MM_CHANNEL_EVT_MAX
+} mm_channel_evt_type_t;
+
+typedef struct {
+ mm_camera_buf_notify_t buf_cb;
+ void *user_data;
+ uint32_t ext_image_mode;
+ uint32_t sensor_idx;
+} mm_evt_paylod_add_stream_t;
+
+typedef struct {
+ uint32_t stream_id;
+ mm_camera_stream_config_t *config;
+} mm_evt_paylod_config_stream_t;
+
+typedef struct {
+ mm_camera_stream_parm_t parm_type;
+ void *value;
+} mm_evt_paylod_stream_parm_t;
+
+typedef struct {
+ mm_camera_buf_notify_t super_frame_notify_cb;
+ void *user_data;
+ mm_camera_bundle_attr_t *attr;
+ uint8_t num_streams;
+ uint32_t *stream_ids;
+} mm_evt_payload_bundle_stream_t;
+
+typedef struct {
+ uint8_t num_streams;
+ uint32_t *stream_ids;
+} mm_evt_payload_start_stream_t;
+
+typedef struct {
+ uint8_t num_streams;
+ uint32_t *stream_ids;
+} mm_evt_payload_stop_stream_t;
+
+typedef struct {
+ uint32_t sensor_idx;
+ uint32_t focus_mode;
+} mm_evt_payload_start_focus_t;
+
+typedef struct {
+ uint8_t num_of_bufs;
+ mm_camera_buf_info_t super_buf[MM_CAMEAR_MAX_STRAEM_BUNDLE];
+ uint8_t matched;
+} mm_channel_queue_node_t;
+
+typedef struct {
+ mm_camera_queue_t que;
+ uint8_t num_streams;
+ /* container for bundled stream handlers */
+ uint32_t bundled_streams[MM_CAMEAR_MAX_STRAEM_BUNDLE];
+ mm_camera_bundle_attr_t attr;
+ uint32_t expected_frame_id;
+ uint32_t match_cnt;
+} mm_channel_queue_t;
+
+typedef struct {
+ /* queue to store bundled super buffers */
+ mm_channel_queue_t superbuf_queue;
+ mm_camera_buf_notify_t super_buf_notify_cb;
+ void *user_data;
+} mm_channel_bundle_t;
+
+typedef struct mm_channel {
+ uint32_t my_hdl;
+ mm_channel_state_type_t state;
+ pthread_mutex_t ch_lock; /* channel lock */
+
+ /* stream bundle info in the channel */
+ mm_channel_bundle_t bundle;
+
+ /* num of pending suferbuffers */
+ uint32_t pending_cnt;
+ uint32_t pending_pp_cnt; /*pending cnt for post processing frames */
+
+ /* cmd thread for superbuffer dataCB and async stop*/
+ mm_camera_cmd_thread_t cmd_thread;
+
+ /* data poll thread
+ * currently one data poll thread per channel
+ * could extended to support one data poll thread per stream in the channel */
+ mm_camera_poll_thread_t poll_thread[MM_CAMERA_CHANNEL_POLL_THREAD_MAX];
+
+ /* container for all streams in channel
+ * stream is indexed by ext_image_mode */
+ mm_stream_t streams[MM_CAMEAR_STRAEM_NUM_MAX];
+
+ /* reference to parent cam_obj */
+ struct mm_camera_obj* cam_obj;
+} mm_channel_t;
+
+/* struct to store information about pp cookie*/
+typedef struct {
+ uint32_t cam_hdl;
+ uint32_t ch_hdl;
+ uint32_t stream_hdl;
+ mm_channel_queue_node_t* super_buf;
+} mm_channel_pp_info_t;
+
+/* mm_camera */
+typedef struct {
+ mm_camera_event_notify_t evt_cb;
+ void *user_data;
+} mm_camera_evt_entry_t;
+
+typedef struct {
+ mm_camera_evt_entry_t evt[MM_CAMERA_EVT_ENTRY_MAX];
+ /* reg_count <=0: infinite
+ * reg_count > 0: register only for required times */
+ int reg_count;
+} mm_camera_evt_obj_t;
+
+typedef struct mm_camera_obj {
+ uint32_t my_hdl;
+ int ref_count;
+ int32_t ctrl_fd;
+ int32_t ds_fd; /* domain socket fd */
+ cam_prop_t properties;
+ mm_camera_2nd_sensor_t second_sensor; /*second sensor info */
+ pthread_mutex_t cam_lock;
+ pthread_mutex_t cb_lock; /* lock for evt cb */
+ mm_channel_t ch[MM_CAMERA_CHANNEL_MAX];
+ mm_camera_evt_obj_t evt[MM_CAMERA_EVT_TYPE_MAX];
+ uint32_t evt_type_mask;
+ mm_camear_mem_vtbl_t *mem_vtbl; /* vtable for memory management */
+ mm_camera_poll_thread_t evt_poll_thread; /* evt poll thread */
+ mm_camera_cmd_thread_t evt_thread; /* thread for evt CB */
+ mm_camera_cmd_thread_t async_cmd_thread; /* thread for async cmd */
+ mm_camera_vtbl_t vtbl;
+
+ /* some local variables */
+ uint32_t snap_burst_num_by_user;
+ camera_mode_t current_mode;
+ uint8_t need_pp;
+ uint32_t op_mode;
+ cam_ctrl_dimension_t dim;
+} mm_camera_obj_t;
+
+typedef struct {
+ mm_camera_info_t camera[MSM_MAX_CAMERA_SENSORS];
+ int8_t num_cam;
+ char video_dev_name[MSM_MAX_CAMERA_SENSORS][MM_CAMERA_DEV_NAME_LEN];
+ mm_camera_obj_t *cam_obj[MSM_MAX_CAMERA_SENSORS];
+} mm_camera_ctrl_t;
+
+/**********************************************************************************
+* external function declare
+***********************************************************************************/
+/* utility functions */
+/* set int32_t value */
+extern int32_t mm_camera_util_s_ctrl(int32_t fd,
+ uint32_t id,
+ int32_t value);
+
+extern int32_t mm_camera_util_private_s_ctrl( int32_t fd,
+ uint32_t id, void* value);
+
+/* get int32_t value */
+extern int32_t mm_camera_util_g_ctrl(int32_t fd,
+ uint32_t id,int32_t *value);
+
+/* send msg throught domain socket for fd mapping */
+extern int32_t mm_camera_util_sendmsg(mm_camera_obj_t *my_obj,
+ void *msg,
+ uint32_t buf_size,
+ int sendfd);
+
+/* mm-camera */
+extern int32_t mm_camera_open(mm_camera_obj_t *my_obj);
+extern int32_t mm_camera_close(mm_camera_obj_t *my_obj);
+extern uint8_t mm_camera_is_event_supported(mm_camera_obj_t *my_obj,
+ mm_camera_event_type_t evt_type);
+extern int32_t mm_camera_register_event_notify(mm_camera_obj_t *my_obj,
+ mm_camera_event_notify_t evt_cb,
+ void * user_data,
+ mm_camera_event_type_t evt_type);
+extern int32_t mm_camera_qbuf(mm_camera_obj_t *my_obj,
+ uint32_t ch_id,
+ mm_camera_buf_def_t *buf);
+extern mm_camera_2nd_sensor_t * mm_camera_query_2nd_sensor_info(mm_camera_obj_t *my_obj);
+extern int32_t mm_camera_sync(mm_camera_obj_t *my_obj);
+extern int32_t mm_camera_is_parm_supported(mm_camera_obj_t *my_obj,
+ mm_camera_parm_type_t parm_type,
+ uint8_t *support_set_parm,
+ uint8_t *support_get_parm);
+extern int32_t mm_camera_set_parm(mm_camera_obj_t *my_obj,
+ mm_camera_parm_type_t parm_type,
+ void* p_value);
+extern int32_t mm_camera_get_parm(mm_camera_obj_t *my_obj,
+ mm_camera_parm_type_t parm_type,
+ void* p_value);
+extern uint32_t mm_camera_add_channel(mm_camera_obj_t *my_obj);
+extern void mm_camera_del_channel(mm_camera_obj_t *my_obj,
+ uint32_t ch_id);
+extern uint32_t mm_camera_add_stream(mm_camera_obj_t *my_obj,
+ uint32_t ch_id,
+ mm_camera_buf_notify_t buf_cb, void *user_data,
+ uint32_t ext_image_mode, uint32_t sensor_idx);
+extern int32_t mm_camera_del_stream(mm_camera_obj_t *my_obj,
+ uint32_t ch_id,
+ uint32_t stream_id);
+extern int32_t mm_camera_config_stream(mm_camera_obj_t *my_obj,
+ uint32_t ch_id,
+ uint32_t stream_id,
+ mm_camera_stream_config_t *config);
+extern int32_t mm_camera_bundle_streams(mm_camera_obj_t *my_obj,
+ uint32_t ch_id,
+ mm_camera_buf_notify_t super_frame_notify_cb,
+ void *user_data,
+ mm_camera_bundle_attr_t *attr,
+ uint8_t num_streams,
+ uint32_t *stream_ids);
+extern int32_t mm_camera_destroy_bundle(mm_camera_obj_t *my_obj,
+ uint32_t ch_id);
+extern int32_t mm_camera_start_streams(mm_camera_obj_t *my_obj,
+ uint32_t ch_id,
+ uint8_t num_streams,
+ uint32_t *stream_ids);
+extern int32_t mm_camera_stop_streams(mm_camera_obj_t *my_obj,
+ uint32_t ch_id,
+ uint8_t num_streams,
+ uint32_t *stream_ids);
+extern int32_t mm_camera_async_teardown_streams(mm_camera_obj_t *my_obj,
+ uint32_t ch_id,
+ uint8_t num_streams,
+ uint32_t *stream_ids);
+extern int32_t mm_camera_request_super_buf(mm_camera_obj_t *my_obj,
+ uint32_t ch_id);
+extern int32_t mm_camera_cancel_super_buf_request(mm_camera_obj_t *my_obj,
+ uint32_t ch_id);
+extern int32_t mm_camera_start_focus(mm_camera_obj_t *my_obj,
+ uint32_t ch_id,
+ uint32_t sensor_idx,
+ uint32_t focus_mode);
+extern int32_t mm_camera_abort_focus(mm_camera_obj_t *my_obj,
+ uint32_t ch_id,
+ uint32_t sensor_idx);
+extern int32_t mm_camera_prepare_snapshot(mm_camera_obj_t *my_obj,
+ uint32_t ch_id,
+ uint32_t sensor_idx);
+extern int32_t mm_camera_set_stream_parm(mm_camera_obj_t *my_obj,
+ uint32_t ch_id,
+ uint32_t s_id,
+ mm_camera_stream_parm_t parm_type,
+ void* p_value);
+
+extern int32_t mm_camera_get_stream_parm(mm_camera_obj_t *my_obj,
+ uint32_t ch_id,
+ uint32_t s_id,
+ mm_camera_stream_parm_t parm_type,
+ void* p_value);
+
+extern int32_t mm_camera_register_event_notify_internal(
+ mm_camera_obj_t *my_obj,
+ mm_camera_event_notify_t evt_cb,
+ void * user_data,
+ mm_camera_event_type_t evt_type);
+extern int32_t mm_camera_map_buf(mm_camera_obj_t *my_obj,
+ int ext_mode,
+ int idx,
+ int fd,
+ uint32_t size);
+extern int32_t mm_camera_unmap_buf(mm_camera_obj_t *my_obj,
+ int ext_mode,
+ int idx);
+extern int32_t mm_camera_send_ch_event(mm_camera_obj_t *my_obj,
+ uint32_t ch_id,
+ uint32_t stream_id,
+ mm_camera_ch_event_type_t evt);
+
+
+/* mm_channel */
+extern int32_t mm_channel_fsm_fn(mm_channel_t *my_obj,
+ mm_channel_evt_type_t evt,
+ void * in_val,
+ void * out_val);
+
+/* qbuf is a special case that not going through state machine.
+ * This is to avoid deadlock when trying to aquire ch_lock,
+ * from the context of dataCB, but async stop is holding ch_lock */
+extern int32_t mm_channel_qbuf(mm_channel_t *my_obj,
+ mm_camera_buf_def_t *buf);
+
+/* Allow other stream to register dataCB at certain stream.
+ * This is for use case of video sized live snapshot,
+ * because snapshot stream need register one time CB at video stream.
+ * ext_image_mode and sensor_idx are used to identify the destinate stream
+ * to be register with dataCB. */
+extern int32_t mm_channel_reg_stream_cb(mm_channel_t *my_obj,
+ mm_stream_data_cb_t *cb,
+ uint32_t ext_image_mode,
+ uint32_t sensor_idx);
+
+/* mm_stream */
+extern int32_t mm_stream_fsm_fn(mm_stream_t *my_obj,
+ mm_stream_evt_type_t evt,
+ void * in_val,
+ void * out_val);
+/* Allow other stream to register dataCB at certain stream.
+ * This is for use case of video sized live snapshot,
+ * because snapshot stream need register one time CB at video stream.
+ * ext_image_mode and sensor_idx are used to identify the destinate stream
+ * to be register with dataCB. */
+extern int32_t mm_stream_reg_buf_cb(mm_stream_t *my_obj,
+ mm_stream_data_cb_t *val);
+
+/* utiltity fucntion declared in mm-camera-inteface2.c
+ * and need be used by mm-camera and below*/
+uint32_t mm_camera_util_generate_handler(uint8_t index);
+const char * mm_camera_util_get_dev_name(uint32_t cam_handler);
+uint8_t mm_camera_util_get_index_by_handler(uint32_t handler);
+
+/* queue functions */
+extern int32_t mm_camera_queue_init(mm_camera_queue_t* queue);
+extern int32_t mm_camera_queue_enq(mm_camera_queue_t* queue, void* node);
+extern void* mm_camera_queue_deq(mm_camera_queue_t* queue);
+extern int32_t mm_camera_queue_deinit(mm_camera_queue_t* queue);
+extern int32_t mm_camera_queue_flush(mm_camera_queue_t* queue);
+
+/* poll/cmd thread functions */
+extern int32_t mm_camera_poll_thread_launch(
+ mm_camera_poll_thread_t * poll_cb,
+ mm_camera_poll_thread_type_t poll_type);
+extern int32_t mm_camera_poll_thread_release(mm_camera_poll_thread_t *poll_cb);
+extern int32_t mm_camera_poll_thread_add_poll_fd(
+ mm_camera_poll_thread_t * poll_cb,
+ uint32_t handler,
+ int32_t fd,
+ mm_camera_poll_notify_t nofity_cb,
+ void *userdata);
+extern int32_t mm_camera_poll_thread_del_poll_fd(
+ mm_camera_poll_thread_t * poll_cb,
+ uint32_t handler);
+extern int32_t mm_camera_cmd_thread_launch(
+ mm_camera_cmd_thread_t * cmd_thread,
+ mm_camera_cmd_cb_t cb,
+ void* user_data);
+extern int32_t mm_camera_cmd_thread_release(mm_camera_cmd_thread_t * cmd_thread);
+
+#endif /* __MM_CAMERA_H__ */
diff --git a/camera/QCamera/stack/mm-camera-interface/inc/mm_camera_dbg.h b/camera/QCamera/stack/mm-camera-interface/inc/mm_camera_dbg.h
new file mode 100644
index 0000000..e24b1a9
--- /dev/null
+++ b/camera/QCamera/stack/mm-camera-interface/inc/mm_camera_dbg.h
@@ -0,0 +1,70 @@
+/*
+Copyright (c) 2011-2012, The Linux Foundation. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following
+ disclaimer in the documentation and/or other materials provided
+ with the distribution.
+ * Neither the name of The Linux Foundation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+
+#ifndef __MM_CAMERA_DBG_H__
+#define __MM_CAMERA_DBG_H__
+
+#define LOG_DEBUG 1
+
+#ifndef LOG_DEBUG
+ #ifdef _ANDROID_
+ #undef LOG_NIDEBUG
+ #undef LOG_TAG
+ #define LOG_NIDEBUG 0
+ #define LOG_TAG "mm-libcamera2"
+ #include <utils/Log.h>
+ #else
+ #include <stdio.h>
+ #define ALOGE CDBG
+ #endif
+ #undef CDBG
+ #define CDBG(fmt, args...) do{}while(0)
+#else
+ #ifdef _ANDROID_
+ #undef LOG_NIDEBUG
+ #undef LOG_TAG
+ #define LOG_NIDEBUG 0
+ #define LOG_TAG "mm-libcamera2"
+ #include <utils/Log.h>
+ #define CDBG(fmt, args...) ALOGE(fmt, ##args)
+ #else
+ #include <stdio.h>
+ #define CDBG(fmt, args...) fprintf(stderr, fmt, ##args)
+ #define ALOGE(fmt, args...) fprintf(stderr, fmt, ##args)
+ #endif
+#endif
+
+#ifdef _ANDROID_
+ #define CDBG_HIGH(fmt, args...) ALOGE(fmt, ##args)
+ #define CDBG_ERROR(fmt, args...) ALOGE(fmt, ##args)
+#else
+ #define CDBG_HIGH(fmt, args...) fprintf(stderr, fmt, ##args)
+ #define CDBG_ERROR(fmt, args...) fprintf(stderr, fmt, ##args)
+#endif
+#endif /* __MM_CAMERA_DBG_H__ */
diff --git a/camera/QCamera/stack/mm-camera-interface/inc/mm_camera_interface.h b/camera/QCamera/stack/mm-camera-interface/inc/mm_camera_interface.h
new file mode 100644
index 0000000..7946f28
--- /dev/null
+++ b/camera/QCamera/stack/mm-camera-interface/inc/mm_camera_interface.h
@@ -0,0 +1,342 @@
+/*
+Copyright (c) 2011-2012, The Linux Foundation. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following
+ disclaimer in the documentation and/or other materials provided
+ with the distribution.
+ * Neither the name of The Linux Foundation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+
+#ifndef __MM_CAMERA_INTERFACE_H__
+#define __MM_CAMERA_INTERFACE_H__
+#include <linux/ion.h>
+#include <linux/videodev2.h>
+#include <QCamera_Intf.h>
+
+#define MM_CAMERA_MAX_NUM_FRAMES 16
+#define MM_CAMEAR_MAX_STRAEM_BUNDLE 4
+#define MM_CAMERA_MAX_2ND_SENSORS 4
+
+typedef enum {
+ MM_CAMERA_WHITE_BALANCE_AUTO,
+ MM_CAMERA_WHITE_BALANCE_OFF,
+ MM_CAMERA_WHITE_BALANCE_DAYLIGHT,
+ MM_CAMERA_WHITE_BALANCE_INCANDESCENT,
+ MM_CAMERA_WHITE_BALANCE_FLUORESCENT,
+ MM_CAMERA_WHITE_BALANCE_CLOUDY,
+ MM_CAMERA_WHITE_BALANCE_MAX
+} mm_camera_white_balance_mode_type_t;
+
+typedef enum {
+ MM_CAMERA_STREAM_OFFSET,
+ MM_CAMERA_STREAM_CID,
+ MM_CAMERA_STREAM_CROP
+}mm_camera_stream_parm_t;
+
+typedef struct {
+ int32_t width;
+ int32_t height;
+} mm_camera_dimension_t;
+
+typedef struct{
+ uint32_t tbl_size;
+ struct camera_size_type *sizes_tbl;
+}default_sizes_tbl_t;
+
+typedef struct {
+ int32_t left;
+ int32_t top;
+ int32_t width;
+ int32_t height;
+} mm_camera_rect_t;
+
+/* TBD: meta header needs to move to msm_camear.h */
+typedef enum {
+ /* no meta data header used */
+ MM_CAMEAR_META_DATA_TYPE_DEF,
+ /* has focus flag, exposure idx,
+ * flash flag, etc. TBD */
+ MM_CAMEAR_META_DATA_TYPE_BRACKETING,
+ MM_CAMEAR_META_DATA_TYPE_MAX,
+} mm_camera_meta_header_t;
+
+typedef struct {
+ cam_format_t fmt;
+ mm_camera_meta_header_t meta_header;
+ uint32_t width;
+ uint32_t height;
+ uint8_t rotation;
+} mm_camera_image_fmt_t;
+
+typedef struct {
+ /* image format */
+ mm_camera_image_fmt_t fmt;
+
+ /* num of buffers needed */
+ uint8_t num_of_bufs;
+
+ /* flag to indicate if this stream need stream on */
+ uint8_t need_stream_on;
+} mm_camera_stream_config_t;
+
+typedef struct {
+ uint8_t name[32];
+ int32_t min_value;
+ int32_t max_value;
+ int32_t step;
+ int32_t default_value;
+} mm_camera_ctrl_cap_sharpness_t;
+
+typedef struct {
+ int16_t *zoom_ratio_tbl;
+ int32_t size;
+} mm_camera_zoom_tbl_t;
+
+typedef enum {
+ /* ZSL use case: get burst of frames */
+ MM_CAMERA_SUPER_BUF_NOTIFY_BURST = 0,
+ /* get continuous frames: when the super buf is
+ * ready dispatch it to HAL */
+ MM_CAMERA_SUPER_BUF_NOTIFY_CONTINUOUS,
+ MM_CAMERA_SUPER_BUF_NOTIFY_MAX
+}mm_camera_super_buf_notify_mode_t;
+
+typedef enum {
+ /* save the frame. No matter focused or not */
+ MM_CAMERA_SUPER_BUF_PRIORITY_NORMAL = 0,
+ /* only queue the frame that is focused. Will enable
+ * meta data header to carry focus info*/
+ MM_CAMERA_SUPER_BUF_PRIORITY_FOCUS,
+ /* after shutter, only queue matched exposure index */
+ MM_CAMERA_SUPER_BUF_PRIORITY_EXPOSURE_BRACKETING,
+ MM_CAMERA_SUPER_BUF_PRIORITY_MAX
+}mm_camera_super_buf_priority_t;
+
+typedef struct {
+ mm_camera_super_buf_notify_mode_t notify_mode;
+ /* queue depth. Only for burst mode */
+ uint8_t water_mark;
+ /* look back how many frames from last buf */
+ uint8_t look_back;
+ /* after send first frame to HAL, how many frames
+ * needing to be skipped for next delivery? */
+ uint8_t post_frame_skip;
+ /* save matched priority frames only */
+ mm_camera_super_buf_priority_t priority;
+ /* in burst mode, how many super
+ * bufs for each shutter press? */
+ uint8_t burst_num;
+} mm_camera_bundle_attr_t;
+
+typedef struct {
+ uint8_t camera_id; /* camera id */
+ qcamera_info_t camera_info; /* postion, mount_angle, etc. */
+ enum sensor_type_t main_sensor_type; /* BAYER, YUV, JPEG_SOC, etc. */
+ char *video_dev_name; /* device node name, e.g. /dev/video1 */
+} mm_camera_info_t;
+
+typedef struct {
+ uint8_t cid;
+ uint8_t dt;
+}stream_cid_t;
+
+/* the stats struct will need rework after
+ * defining the stats buf sharing logic */
+typedef struct {
+ int type;
+ uint32_t length;
+ void *value;
+} mm_camera_stats_t;
+
+typedef struct {
+ uint32_t stream_id; /* stream handler */
+ int8_t buf_idx; /* buf idx within the stream bufs */
+
+ struct timespec ts; /* time stamp, to be filled when DQBUF*/
+ uint32_t frame_idx; /* frame sequence num, to be filled when DQBUF */
+
+ int8_t num_planes; /* num of planes, to be filled during mem allocation */
+ struct v4l2_plane planes[VIDEO_MAX_PLANES]; /* plane info, to be filled during mem allocation*/
+ int fd; /* fd of the frame, to be filled during mem allocation */
+ void* buffer; /* ptr to real frame buffer, to be filled during mem allocation */
+ uint32_t frame_len; /* len of the whole frame, to be filled during mem allocation */
+} mm_camera_buf_def_t;
+
+typedef struct {
+ uint32_t camera_handle;
+ uint32_t ch_id;
+ uint8_t num_bufs;
+ mm_camera_buf_def_t* bufs[MM_CAMEAR_MAX_STRAEM_BUNDLE];
+} mm_camera_super_buf_t;
+
+typedef void (*mm_camera_event_notify_t)(uint32_t camera_handle,
+ mm_camera_event_t *evt,
+ void *user_data);
+
+typedef void (*mm_camera_buf_notify_t) (mm_camera_super_buf_t *bufs,
+ void *user_data);
+typedef cam_frame_len_offset_t mm_camera_frame_len_offset;
+
+typedef struct {
+ void *user_data;
+ int32_t (*get_buf) (uint32_t camera_handle,
+ uint32_t ch_id, uint32_t stream_id,
+ void *user_data,
+ mm_camera_frame_len_offset *frame_offset_info,
+ uint8_t num_bufs,
+ uint8_t *initial_reg_flag,
+ mm_camera_buf_def_t *bufs);
+ int32_t (*put_buf) (uint32_t camera_handle,
+ uint32_t ch_id, uint32_t stream_id,
+ void *user_data, uint8_t num_bufs,
+ mm_camera_buf_def_t *bufs);
+} mm_camear_mem_vtbl_t;
+
+typedef struct {
+ uint8_t num_2nd_sensors;
+ uint8_t sensor_idxs[MM_CAMERA_MAX_2ND_SENSORS];
+} mm_camera_2nd_sensor_t;
+
+typedef struct {
+ int32_t (*sync) (uint32_t camera_handle);
+ uint8_t (*is_event_supported) (uint32_t camera_handle,
+ mm_camera_event_type_t evt_type);
+ int32_t (*register_event_notify) (uint32_t camera_handle,
+ mm_camera_event_notify_t evt_cb,
+ void * user_data,
+ mm_camera_event_type_t evt_type);
+ int32_t (*qbuf) (uint32_t camera_handle, uint32_t ch_id,
+ mm_camera_buf_def_t *buf);
+ void (*camera_close) (uint32_t camera_handle);
+ /* Only fo supporting advanced 2nd sensors. If no secondary sensor needed
+ * HAL can ignore this function */
+ mm_camera_2nd_sensor_t * (*query_2nd_sensor_info) (uint32_t camera_handle);
+ /* if the parm is supported: TRUE - support, FALSE - not support */
+ int32_t (*is_parm_supported) (uint32_t camera_handle,
+ mm_camera_parm_type_t parm_type,
+ uint8_t *support_set_parm,
+ uint8_t *support_get_parm);
+ /* set a parms current value */
+ int32_t (*set_parm) (uint32_t camera_handle,
+ mm_camera_parm_type_t parm_type,
+ void* p_value);
+ /* get a parms current value */
+ int32_t (*get_parm) (uint32_t camera_handle,
+ mm_camera_parm_type_t parm_type,
+ void* p_value);
+ /* ch_id returned, zero is invalid ch_id */
+ uint32_t (*ch_acquire) (uint32_t camera_handle);
+ /* relaese channel */
+ void (*ch_release) (uint32_t camera_handle, uint32_t ch_id);
+ /* return stream_id. zero is invalid stream_id
+ * default set to preview: ext_image_mode = 0
+ * default set to primary sensor: sensor_idx = 0
+ * value of ext_image_mode is defined in msm_camera.h
+ */
+ uint32_t (*add_stream) (uint32_t camera_handle, uint32_t ch_id,
+ mm_camera_buf_notify_t buf_cb, void *user_data,
+ uint32_t ext_image_mode, uint32_t sensor_idx);
+ /* delete stream */
+ int32_t (*del_stream) (uint32_t camera_handle, uint32_t ch_id,
+ uint32_t stream_id);
+ /* set straem format This will trigger getting bufs from HAL */
+ int32_t (*config_stream) (uint32_t camera_handle, uint32_t ch_id,
+ uint32_t stream_id,
+ mm_camera_stream_config_t *config);
+ /* setup super buf bundle for ZSL */
+ int32_t (*init_stream_bundle) (uint32_t camera_handle, uint32_t ch_id,
+ mm_camera_buf_notify_t super_frame_notify_cb,
+ void *user_data, mm_camera_bundle_attr_t *attr,
+ uint8_t num_streams, uint32_t *stream_ids);
+ /* remove the super buf bundle */
+ int32_t (*destroy_stream_bundle) (uint32_t camera_handle, uint32_t ch_id);
+ /* start streaming */
+ int32_t (*start_streams) (uint32_t camera_handle, uint32_t ch_id,
+ uint8_t num_streams, uint32_t *stream_ids);
+ /* stop streaming */
+ int32_t (*stop_streams) (uint32_t camera_handle, uint32_t ch_id,
+ uint8_t num_streams, uint32_t *stream_ids);
+ /* tear down streams asyncly */
+ int32_t (*async_teardown_streams) (uint32_t camera_handle, uint32_t ch_id,
+ uint8_t num_streams, uint32_t *stream_ids);
+ /* get super bufs. for burst mode only */
+ int32_t (*request_super_buf) (uint32_t camera_handle, uint32_t ch_id);
+ /* abort the super buf dispatching. for burst mode only */
+ int32_t (*cancel_super_buf_request) (uint32_t camera_handle,
+ uint32_t ch_id);
+ /* start focus: by default sensor_idx=0 */
+ int32_t (*start_focus) (uint32_t camera_handle,
+ uint32_t ch_id,
+ uint32_t sensor_idx,
+ uint32_t focus_mode);
+ /* abort focus: by default sensor_idx=0 */
+ int32_t (*abort_focus) (uint32_t camera_handle,
+ uint32_t ch_id,
+ uint32_t sensor_idx);
+ /* prepare hardware will settle aec and flash.
+ * by default sensor_idx=0 */
+ int32_t (*prepare_snapshot) (uint32_t camera_handle,
+ uint32_t ch_id,
+ uint32_t sensor_idx);
+ /* set a parms current value */
+ int32_t (*set_stream_parm) (uint32_t camera_handle,
+ uint32_t ch_id,
+ uint32_t s_id,
+ mm_camera_stream_parm_t parm_type,
+ void* p_value);
+ /* get a parms current value */
+ int32_t (*get_stream_parm) (uint32_t camera_handle,
+ uint32_t ch_id,
+ uint32_t s_id,
+ mm_camera_stream_parm_t parm_type,
+ void* p_value);
+} mm_camera_ops_t;
+
+typedef struct {
+ uint32_t camera_handle; /* camera object handle */
+ mm_camera_info_t *camera_info; /* reference pointer of camear info */
+ mm_camera_ops_t *ops; /* API call table */
+} mm_camera_vtbl_t;
+
+mm_camera_info_t * camera_query(uint8_t *num_cameras);
+mm_camera_vtbl_t * camera_open(uint8_t camera_idx,
+ mm_camear_mem_vtbl_t *mem_vtbl);
+
+typedef enum {
+ MM_CAMERA_PREVIEW,
+ MM_CAMERA_VIDEO,
+ MM_CAMERA_SNAPSHOT_MAIN,
+ MM_CAMERA_SNAPSHOT_THUMBNAIL,
+ MM_CAMERA_SNAPSHOT_RAW,
+ MM_CAMERA_RDI
+}mm_camera_img_mode;
+
+/* may remove later */
+typedef enum {
+ MM_CAMERA_OP_MODE_NOTUSED,
+ MM_CAMERA_OP_MODE_CAPTURE,
+ MM_CAMERA_OP_MODE_VIDEO,
+ MM_CAMERA_OP_MODE_ZSL,
+ MM_CAMERA_OP_MODE_MAX
+} mm_camera_op_mode_type_t;
+#endif /*__MM_CAMERA_INTERFACE_H__*/
diff --git a/camera/QCamera/stack/mm-camera-interface/inc/mm_camera_sock.h b/camera/QCamera/stack/mm-camera-interface/inc/mm_camera_sock.h
new file mode 100644
index 0000000..0846c05
--- /dev/null
+++ b/camera/QCamera/stack/mm-camera-interface/inc/mm_camera_sock.h
@@ -0,0 +1,57 @@
+/*
+Copyright (c) 2012, The Linux Foundation. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following
+ disclaimer in the documentation and/or other materials provided
+ with the distribution.
+ * Neither the name of The Linux Foundation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+
+#ifndef __MM_CAMERA_SOCKET_H__
+#define __MM_CAMERA_SOCKET_H__
+
+#include <inttypes.h>
+
+typedef enum {
+ MM_CAMERA_SOCK_TYPE_UDP,
+ MM_CAMERA_SOCK_TYPE_TCP,
+} mm_camera_sock_type_t;
+
+int mm_camera_socket_create(int cam_id, mm_camera_sock_type_t sock_type);
+
+int mm_camera_socket_sendmsg(
+ int fd,
+ void *msg,
+ uint32_t buf_size,
+ int sendfd);
+
+int mm_camera_socket_recvmsg(
+ int fd,
+ void *msg,
+ uint32_t buf_size,
+ int *rcvdfd);
+
+void mm_camera_socket_close(int fd);
+
+#endif /*__MM_CAMERA_SOCKET_H__*/
+
diff --git a/camera/QCamera/stack/mm-camera-interface/src/mm_camera.c b/camera/QCamera/stack/mm-camera-interface/src/mm_camera.c
new file mode 100644
index 0000000..9038bd4
--- /dev/null
+++ b/camera/QCamera/stack/mm-camera-interface/src/mm_camera.c
@@ -0,0 +1,1774 @@
+/*
+Copyright (c) 2011-2012, The Linux Foundation. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following
+ disclaimer in the documentation and/or other materials provided
+ with the distribution.
+ * Neither the name of The Linux Foundation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+
+#include <pthread.h>
+#include <errno.h>
+#include <sys/ioctl.h>
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <fcntl.h>
+#include <poll.h>
+#include <semaphore.h>
+
+#include "mm_camera_dbg.h"
+#include "mm_camera_sock.h"
+#include "mm_camera_interface.h"
+#include "mm_camera.h"
+
+#define SET_PARM_BIT32(parm, parm_arr) \
+ (parm_arr[parm/32] |= (1<<(parm%32)))
+
+#define GET_PARM_BIT32(parm, parm_arr) \
+ ((parm_arr[parm/32]>>(parm%32))& 0x1)
+
+/* internal function declare */
+int32_t mm_camera_send_native_ctrl_cmd(mm_camera_obj_t * my_obj,
+ cam_ctrl_type type,
+ uint32_t length,
+ void *value);
+int32_t mm_camera_send_native_ctrl_timeout_cmd(mm_camera_obj_t * my_obj,
+ cam_ctrl_type type,
+ uint32_t length,
+ void *value,
+ int timeout);
+int mm_camera_evt_sub(mm_camera_obj_t * my_obj,
+ mm_camera_event_type_t evt_type,
+ int reg_count);
+int32_t mm_camera_set_general_parm(mm_camera_obj_t * my_obj,
+ mm_camera_parm_type_t parm_type,
+ void* p_value);
+int32_t mm_camera_enqueue_evt(mm_camera_obj_t *my_obj,
+ mm_camera_event_t *event);
+extern int32_t mm_channel_init(mm_channel_t *my_obj);
+
+mm_channel_t * mm_camera_util_get_channel_by_handler(
+ mm_camera_obj_t * cam_obj,
+ uint32_t handler)
+{
+ int i;
+ mm_channel_t *ch_obj = NULL;
+ uint8_t ch_idx = mm_camera_util_get_index_by_handler(handler);
+
+ for(i = 0; i < MM_CAMERA_CHANNEL_MAX; i++) {
+ if (handler == cam_obj->ch[i].my_hdl) {
+ ch_obj = &cam_obj->ch[i];
+ break;
+ }
+ }
+ return ch_obj;
+}
+
+static void mm_camera_dispatch_app_event(mm_camera_cmdcb_t *cmd_cb,
+ void* user_data)
+{
+ int i;
+ mm_camera_event_type_t evt_type = cmd_cb->u.evt.event_type;
+ mm_camera_event_t *event = &cmd_cb->u.evt;
+ mm_camera_obj_t * my_obj = (mm_camera_obj_t *)user_data;
+ if (NULL != my_obj) {
+ if (evt_type < MM_CAMERA_EVT_TYPE_MAX) {
+ pthread_mutex_lock(&my_obj->cb_lock);
+ for(i = 0; i < MM_CAMERA_EVT_ENTRY_MAX; i++) {
+ if(my_obj->evt[evt_type].evt[i].evt_cb) {
+ my_obj->evt[evt_type].evt[i].evt_cb(
+ my_obj->my_hdl,
+ event,
+ my_obj->evt[evt_type].evt[i].user_data);
+ }
+ }
+ pthread_mutex_unlock(&my_obj->cb_lock);
+ }
+ }
+}
+
+static void mm_camera_handle_async_cmd(mm_camera_cmdcb_t *cmd_cb,
+ void* user_data)
+{
+ int i;
+ mm_camera_async_cmd_t *async_cmd = &cmd_cb->u.async;
+ mm_camera_obj_t * my_obj = NULL;
+ mm_evt_payload_stop_stream_t payload;
+
+ my_obj = (mm_camera_obj_t *)user_data;
+ if (NULL != my_obj) {
+ if (MM_CAMERA_ASYNC_CMD_TYPE_STOP == async_cmd->cmd_type) {
+ memset(&payload, 0, sizeof(mm_evt_payload_stop_stream_t));
+ payload.num_streams = async_cmd->u.stop_cmd.num_streams;
+ payload.stream_ids = async_cmd->u.stop_cmd.stream_ids;
+ mm_channel_fsm_fn(async_cmd->u.stop_cmd.ch_obj,
+ MM_CHANNEL_EVT_TEARDOWN_STREAM,
+ (void*)&payload,
+ NULL);
+ }
+ }
+}
+
+static void mm_camera_event_notify(void* user_data)
+{
+ struct v4l2_event ev;
+ int rc;
+ mm_camera_event_t *evt = NULL;
+ mm_camera_cmdcb_t *node = NULL;
+
+ mm_camera_obj_t *my_obj = (mm_camera_obj_t*)user_data;
+ if (NULL != my_obj) {
+ /* read evt */
+ memset(&ev, 0, sizeof(ev));
+ rc = ioctl(my_obj->ctrl_fd, VIDIOC_DQEVENT, &ev);
+ evt = (mm_camera_event_t *)ev.u.data;
+
+ if (rc >= 0) {
+ if(ev.type == V4L2_EVENT_PRIVATE_START+MSM_CAM_APP_NOTIFY_ERROR_EVENT) {
+ evt->event_type = MM_CAMERA_EVT_TYPE_CTRL;
+ evt->e.ctrl.evt = MM_CAMERA_CTRL_EVT_ERROR;
+ }
+
+ mm_camera_enqueue_evt(my_obj, evt);
+ }
+ }
+}
+
+int32_t mm_camera_enqueue_evt(mm_camera_obj_t *my_obj,
+ mm_camera_event_t *event)
+{
+ int32_t rc = 0;
+ mm_camera_cmdcb_t *node = NULL;
+
+ node = (mm_camera_cmdcb_t *)malloc(sizeof(mm_camera_cmdcb_t));
+ if (NULL != node) {
+ memset(node, 0, sizeof(mm_camera_cmdcb_t));
+ node->cmd_type = MM_CAMERA_CMD_TYPE_EVT_CB;
+ memcpy(&node->u.evt, event, sizeof(mm_camera_event_t));
+
+ /* enqueue to evt cmd thread */
+ mm_camera_queue_enq(&(my_obj->evt_thread.cmd_queue), node);
+ /* wake up evt cmd thread */
+ sem_post(&(my_obj->evt_thread.cmd_sem));
+ } else {
+ CDBG_ERROR("%s: No memory for mm_camera_node_t", __func__);
+ rc = -1;
+ }
+
+ return rc;
+}
+
+/* send local CH evt to HAL
+ * may not needed since we have return val for each channel/stream operation */
+int32_t mm_camera_send_ch_event(mm_camera_obj_t *my_obj,
+ uint32_t ch_id,
+ uint32_t stream_id,
+ mm_camera_ch_event_type_t evt)
+{
+ int rc = 0;
+ mm_camera_event_t event;
+ event.event_type = MM_CAMERA_EVT_TYPE_CH;
+ event.e.ch.evt = evt;
+ /* TODO: need to change the ch evt struct to include ch_id and stream_id. */
+ event.e.ch.ch = stream_id;
+ CDBG("%s: stream on event, type=0x%x, ch=%d, evt=%d",
+ __func__, event.event_type, event.e.ch.ch, event.e.ch.evt);
+ rc = mm_camera_enqueue_evt(my_obj, &event);
+ return rc;
+}
+
+int32_t mm_camera_open(mm_camera_obj_t *my_obj)
+{
+ char dev_name[MM_CAMERA_DEV_NAME_LEN];
+ int32_t rc = 0;
+ int8_t n_try=MM_CAMERA_DEV_OPEN_TRIES;
+ uint8_t sleep_msec=MM_CAMERA_DEV_OPEN_RETRY_SLEEP;
+ uint8_t i;
+ uint8_t cam_idx = mm_camera_util_get_index_by_handler(my_obj->my_hdl);
+
+ CDBG("%s: begin\n", __func__);
+
+ snprintf(dev_name, sizeof(dev_name), "/dev/%s",
+ mm_camera_util_get_dev_name(my_obj->my_hdl));
+
+ do{
+ n_try--;
+ my_obj->ctrl_fd = open(dev_name, O_RDWR | O_NONBLOCK);
+ CDBG("%s: ctrl_fd = %d, errno == %d", __func__, my_obj->ctrl_fd, errno);
+ if((my_obj->ctrl_fd > 0) || (errno != EIO) || (n_try <= 0 )) {
+ CDBG_ERROR("%s: opened, break out while loop", __func__);
+ break;
+ }
+ CDBG("%s:failed with I/O error retrying after %d milli-seconds",
+ __func__,sleep_msec);
+ usleep(sleep_msec*1000);
+ }while(n_try>0);
+
+ if (my_obj->ctrl_fd <= 0) {
+ CDBG_ERROR("%s: cannot open control fd of '%s' (%s)\n",
+ __func__, dev_name, strerror(errno));
+ rc = -1;
+ goto on_error;
+ }
+
+ /* open domain socket*/
+ n_try=MM_CAMERA_DEV_OPEN_TRIES;
+ do{
+ n_try--;
+ my_obj->ds_fd = mm_camera_socket_create(cam_idx, MM_CAMERA_SOCK_TYPE_UDP);
+ CDBG("%s: ds_fd = %d, errno = %d", __func__, my_obj->ds_fd, errno);
+ if((my_obj->ds_fd > 0) || (n_try <= 0 )) {
+ CDBG("%s: opened, break out while loop", __func__);
+ break;
+ }
+ CDBG("%s:failed with I/O error retrying after %d milli-seconds",
+ __func__,sleep_msec);
+ usleep(sleep_msec*1000);
+ }while(n_try>0);
+
+ if (my_obj->ds_fd <= 0) {
+ CDBG_ERROR("%s: cannot open domain socket fd of '%s'(%s)\n",
+ __func__, dev_name, strerror(errno));
+ rc = -1;
+ goto on_error;
+ }
+
+ /* set ctrl_fd to be the mem_mapping fd */
+ rc = mm_camera_util_s_ctrl(my_obj->ctrl_fd,
+ MSM_V4L2_PID_MMAP_INST, 0);
+ if (rc < 0) {
+ CDBG_ERROR("error: ioctl VIDIOC_S_CTRL MSM_V4L2_PID_MMAP_INST failed: %s\n",
+ strerror(errno));
+ goto on_error;
+ }
+
+ /* set geo mode to 2D by default */
+ my_obj->current_mode = CAMERA_MODE_2D;
+
+ /* TODO:
+ * We need to remove function call once we consider sync as saperate API
+ * and HAL needs to call after each camera open call.
+ */
+ mm_camera_sync(my_obj);
+
+ pthread_mutex_init(&my_obj->cb_lock, NULL);
+
+ CDBG("%s : Launch async cmd Thread in Cam Open",__func__);
+ mm_camera_cmd_thread_launch(&my_obj->async_cmd_thread,
+ mm_camera_handle_async_cmd,
+ (void *)my_obj);
+
+ CDBG("%s : Launch evt Thread in Cam Open",__func__);
+ mm_camera_cmd_thread_launch(&my_obj->evt_thread,
+ mm_camera_dispatch_app_event,
+ (void *)my_obj);
+
+ /* launch event poll thread
+ * we will add evt fd into event poll thread upon user first register for evt */
+ CDBG("%s : Launch evt Poll Thread in Cam Open",__func__);
+ mm_camera_poll_thread_launch(&my_obj->evt_poll_thread,
+ MM_CAMERA_POLL_TYPE_EVT);
+
+ CDBG("%s: end (rc = %d)\n", __func__, rc);
+ /* we do not need to unlock cam_lock here before return
+ * because for open, it's done within intf_lock */
+ return rc;
+
+on_error:
+ if (my_obj->ctrl_fd > 0) {
+ close(my_obj->ctrl_fd);
+ my_obj->ctrl_fd = -1;
+ }
+ if (my_obj->ds_fd > 0) {
+ mm_camera_socket_close(my_obj->ds_fd);
+ my_obj->ds_fd = -1;
+ }
+
+ /* we do not need to unlock cam_lock here before return
+ * because for open, it's done within intf_lock */
+ return rc;
+}
+
+int32_t mm_camera_close(mm_camera_obj_t *my_obj)
+{
+ CDBG("%s : Close evt Poll Thread in Cam Close",__func__);
+ mm_camera_poll_thread_release(&my_obj->evt_poll_thread);
+
+ CDBG("%s : Close evt cmd Thread in Cam Close",__func__);
+ mm_camera_cmd_thread_release(&my_obj->evt_thread);
+
+ CDBG("%s : Close asyn cmd Thread in Cam Close",__func__);
+ mm_camera_cmd_thread_release(&my_obj->async_cmd_thread);
+
+ if(my_obj->ctrl_fd > 0) {
+ close(my_obj->ctrl_fd);
+ my_obj->ctrl_fd = -1;
+ }
+ if(my_obj->ds_fd > 0) {
+ mm_camera_socket_close(my_obj->ds_fd);
+ my_obj->ds_fd = -1;
+ }
+
+ pthread_mutex_destroy(&my_obj->cb_lock);
+
+ pthread_mutex_unlock(&my_obj->cam_lock);
+ return 0;
+}
+
+uint8_t mm_camera_is_event_supported(mm_camera_obj_t *my_obj, mm_camera_event_type_t evt_type)
+{
+ switch(evt_type) {
+ case MM_CAMERA_EVT_TYPE_CH:
+ case MM_CAMERA_EVT_TYPE_CTRL:
+ case MM_CAMERA_EVT_TYPE_STATS:
+ case MM_CAMERA_EVT_TYPE_INFO:
+ return 1;
+ default:
+ return 0;
+ }
+ return 0;
+}
+
+int32_t mm_camera_register_event_notify_internal(
+ mm_camera_obj_t *my_obj,
+ mm_camera_event_notify_t evt_cb,
+ void * user_data,
+ mm_camera_event_type_t evt_type)
+{
+ int i;
+ int rc = -1;
+ mm_camera_evt_obj_t *evt_array = NULL;
+
+ pthread_mutex_lock(&my_obj->cb_lock);
+ evt_array = &my_obj->evt[evt_type];
+ if(evt_cb) {
+ /* this is reg case */
+ for(i = 0; i < MM_CAMERA_EVT_ENTRY_MAX; i++) {
+ if(evt_array->evt[i].user_data == NULL) {
+ evt_array->evt[i].evt_cb = evt_cb;
+ evt_array->evt[i].user_data = user_data;
+ evt_array->reg_count++;
+ rc = 0;
+ break;
+ }
+ }
+ } else {
+ /* this is unreg case */
+ for(i = 0; i < MM_CAMERA_EVT_ENTRY_MAX; i++) {
+ if(evt_array->evt[i].user_data == user_data) {
+ evt_array->evt[i].evt_cb = NULL;
+ evt_array->evt[i].user_data = NULL;
+ evt_array->reg_count--;
+ rc = 0;
+ break;
+ }
+ }
+ }
+
+ if(rc == 0 && evt_array->reg_count <= 1) {
+ /* subscribe/unsubscribe event to kernel */
+ rc = mm_camera_evt_sub(my_obj, evt_type, evt_array->reg_count);
+ }
+
+ pthread_mutex_unlock(&my_obj->cb_lock);
+ return rc;
+}
+
+int32_t mm_camera_register_event_notify(mm_camera_obj_t *my_obj,
+ mm_camera_event_notify_t evt_cb,
+ void * user_data,
+ mm_camera_event_type_t evt_type)
+{
+ int i;
+ int rc = -1;
+ mm_camera_evt_obj_t *evt_array = &my_obj->evt[evt_type];
+
+ rc = mm_camera_register_event_notify_internal(my_obj, evt_cb,
+ user_data, evt_type);
+
+ pthread_mutex_unlock(&my_obj->cam_lock);
+ return rc;
+}
+
+int32_t mm_camera_qbuf(mm_camera_obj_t *my_obj,
+ uint32_t ch_id,
+ mm_camera_buf_def_t *buf)
+{
+ int rc = -1;
+ mm_channel_t * ch_obj = NULL;
+ ch_obj = mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+ pthread_mutex_unlock(&my_obj->cam_lock);
+
+ /* we always assume qbuf will be done before channel/stream is fully stopped
+ * because qbuf is done within dataCB context
+ * in order to avoid deadlock, we are not locking ch_lock for qbuf */
+ if (NULL != ch_obj) {
+ rc = mm_channel_qbuf(ch_obj, buf);
+ }
+
+ return rc;
+}
+
+mm_camera_2nd_sensor_t * mm_camera_query_2nd_sensor_info(mm_camera_obj_t *my_obj)
+{
+ /* TODO: need to sync with backend how to get 2nd sensor info */
+ return NULL;
+}
+
+int32_t mm_camera_sync(mm_camera_obj_t *my_obj)
+{
+ int32_t rc = 0;
+
+ /* get camera capabilities */
+ memset(&my_obj->properties, 0, sizeof(cam_prop_t));
+ rc = mm_camera_send_native_ctrl_cmd(my_obj,
+ CAMERA_GET_CAPABILITIES,
+ sizeof(cam_prop_t),
+ (void *)&my_obj->properties);
+ if (rc != 0) {
+ CDBG_ERROR("%s: cannot get camera capabilities\n", __func__);
+ goto on_error;
+ }
+
+ /* TODO */
+ /* after kernel/backend support query of offline pp capability
+ * we can get the value from capabilities.
+ * for now, hard-coded to 1, meaning always need post processing */
+ //my_obj->need_pp = 1;
+ my_obj->need_pp = 0;
+
+on_error:
+ /* TODO:
+ * We need to enable below lock once we consider sync as saperate API
+ * and HAL needs to call after each camera open call.
+ */
+ //pthread_mutex_unlock(&my_obj->cam_lock);
+ return rc;
+
+}
+
+int32_t mm_camera_is_parm_supported(mm_camera_obj_t *my_obj,
+ mm_camera_parm_type_t parm_type,
+ uint8_t *support_set_parm,
+ uint8_t *support_get_parm)
+{
+ /* TODO: need to sync with backend if it can support set/get */
+ int32_t rc = 0;
+ *support_set_parm = GET_PARM_BIT32(parm_type,
+ my_obj->properties.parm);
+ *support_get_parm = GET_PARM_BIT32(parm_type,
+ my_obj->properties.parm);
+ pthread_mutex_unlock(&my_obj->cam_lock);
+
+ return rc;
+}
+
+int32_t mm_camera_util_set_op_mode(mm_camera_obj_t * my_obj,
+ mm_camera_op_mode_type_t *op_mode)
+{
+ int32_t rc = 0;
+ int32_t v4l2_op_mode = MSM_V4L2_CAM_OP_DEFAULT;
+
+ if (my_obj->op_mode == *op_mode)
+ goto end;
+ switch(*op_mode) {
+ case MM_CAMERA_OP_MODE_ZSL:
+ v4l2_op_mode = MSM_V4L2_CAM_OP_ZSL;
+ break;
+ case MM_CAMERA_OP_MODE_CAPTURE:
+ v4l2_op_mode = MSM_V4L2_CAM_OP_CAPTURE;
+ break;
+ case MM_CAMERA_OP_MODE_VIDEO:
+ v4l2_op_mode = MSM_V4L2_CAM_OP_VIDEO;
+ break;
+ default:
+ rc = - 1;
+ goto end;
+ break;
+ }
+ if(0 != (rc = mm_camera_util_s_ctrl(my_obj->ctrl_fd,
+ MSM_V4L2_PID_CAM_MODE, v4l2_op_mode))){
+ CDBG_ERROR("%s: input op_mode=%d, s_ctrl rc=%d\n", __func__, *op_mode, rc);
+ goto end;
+ }
+ /* if success update mode field */
+ my_obj->op_mode = *op_mode;
+end:
+ CDBG("%s: op_mode=%d,rc=%d\n", __func__, *op_mode, rc);
+ return rc;
+}
+
+int32_t mm_camera_set_parm(mm_camera_obj_t *my_obj,
+ mm_camera_parm_type_t parm_type,
+ void* p_value)
+{
+ int32_t rc = 0;
+ CDBG("%s type =%d", __func__, parm_type);
+ switch(parm_type) {
+ case MM_CAMERA_PARM_OP_MODE:
+ rc = mm_camera_util_set_op_mode(my_obj,
+ (mm_camera_op_mode_type_t *)p_value);
+ break;
+ case MM_CAMERA_PARM_DIMENSION:
+ rc = mm_camera_send_native_ctrl_cmd(my_obj,
+ CAMERA_SET_PARM_DIMENSION, sizeof(cam_ctrl_dimension_t), p_value);
+ if(rc != 0) {
+ CDBG("%s: mm_camera_send_native_ctrl_cmd err=%d\n", __func__, rc);
+ break;
+ }
+ memcpy(&my_obj->dim, (cam_ctrl_dimension_t *)p_value,
+ sizeof(cam_ctrl_dimension_t));
+ CDBG("%s: dw=%d,dh=%d,vw=%d,vh=%d,pw=%d,ph=%d,tw=%d,th=%d,raw_w=%d,raw_h=%d\n",
+ __func__,
+ my_obj->dim.display_width,my_obj->dim.display_height,
+ my_obj->dim.video_width, my_obj->dim.video_height,
+ my_obj->dim.picture_width,my_obj->dim.picture_height,
+ my_obj->dim.ui_thumbnail_width,my_obj->dim.ui_thumbnail_height,
+ my_obj->dim.raw_picture_width,my_obj->dim.raw_picture_height);
+ break;
+ case MM_CAMERA_PARM_SNAPSHOT_BURST_NUM:
+ my_obj->snap_burst_num_by_user = *((uint32_t *)p_value);
+ break;
+ default:
+ rc = mm_camera_set_general_parm(my_obj, parm_type, p_value);
+ break;
+ }
+ pthread_mutex_unlock(&my_obj->cam_lock);
+ return rc;
+}
+
+int32_t mm_camera_get_parm(mm_camera_obj_t *my_obj,
+ mm_camera_parm_type_t parm_type,
+ void* p_value)
+{
+ int32_t rc = 0;
+
+ switch(parm_type) {
+ case MM_CAMERA_PARM_MAX_PICTURE_SIZE:
+ {
+ mm_camera_dimension_t *dim =
+ (mm_camera_dimension_t *)p_value;
+ dim->height = my_obj->properties.max_pict_height;
+ dim->width = my_obj->properties.max_pict_width;
+ CDBG("%s: Max Picture Size: %d X %d\n", __func__,
+ dim->width, dim->height);
+ }
+ break;
+ case MM_CAMERA_PARM_PREVIEW_FORMAT:
+ *((int *)p_value) = my_obj->properties.preview_format;
+ break;
+ case MM_CAMERA_PARM_PREVIEW_SIZES_CNT:
+ *((int *)p_value) = my_obj->properties.preview_sizes_cnt;
+ break;
+ case MM_CAMERA_PARM_VIDEO_SIZES_CNT:
+ *((int *)p_value) = my_obj->properties.video_sizes_cnt;
+ break;
+ case MM_CAMERA_PARM_THUMB_SIZES_CNT:
+ *((int *)p_value) = my_obj->properties.thumb_sizes_cnt;
+ break;
+ case MM_CAMERA_PARM_HFR_SIZES_CNT:
+ *((int *)p_value) = my_obj->properties.hfr_sizes_cnt;
+ break;
+ case MM_CAMERA_PARM_HFR_FRAME_SKIP:
+ *((int *)p_value) = my_obj->properties.hfr_frame_skip;
+ break;
+ case MM_CAMERA_PARM_DEFAULT_PREVIEW_WIDTH:
+ *((int *)p_value) = my_obj->properties.default_preview_width;
+ break;
+ case MM_CAMERA_PARM_DEFAULT_PREVIEW_HEIGHT:
+ *((int *)p_value) = my_obj->properties.default_preview_height;
+ break;
+ case MM_CAMERA_PARM_MAX_PREVIEW_SIZE:
+ {
+ mm_camera_dimension_t *dim =
+ (mm_camera_dimension_t *)p_value;
+ dim->height = my_obj->properties.max_preview_height;
+ dim->width = my_obj->properties.max_preview_width;
+ CDBG("%s: Max Preview Size: %d X %d\n", __func__,
+ dim->width, dim->height);
+ }
+ break;
+ case MM_CAMERA_PARM_MAX_VIDEO_SIZE:
+ {
+ mm_camera_dimension_t *dim =
+ (mm_camera_dimension_t *)p_value;
+ dim->height = my_obj->properties.max_video_height;
+ dim->width = my_obj->properties.max_video_width;
+ CDBG("%s: Max Video Size: %d X %d\n", __func__,
+ dim->width, dim->height);
+ }
+ break;
+ case MM_CAMERA_PARM_MAX_HFR_MODE:
+ rc = mm_camera_send_native_ctrl_cmd(my_obj,
+ CAMERA_GET_PARM_MAX_HFR_MODE,
+ sizeof(camera_hfr_mode_t),
+ p_value);
+ break;
+ case MM_CAMERA_PARM_FOCAL_LENGTH:
+ rc = mm_camera_send_native_ctrl_cmd(my_obj,
+ CAMERA_GET_PARM_FOCAL_LENGTH,
+ sizeof(float),
+ p_value);
+ break;
+ case MM_CAMERA_PARM_HORIZONTAL_VIEW_ANGLE:
+ rc = mm_camera_send_native_ctrl_cmd(my_obj,
+ CAMERA_GET_PARM_HORIZONTAL_VIEW_ANGLE,
+ sizeof(float),
+ p_value);
+ break;
+ case MM_CAMERA_PARM_VERTICAL_VIEW_ANGLE:
+ rc = mm_camera_send_native_ctrl_cmd(my_obj,
+ CAMERA_GET_PARM_VERTICAL_VIEW_ANGLE,
+ sizeof(float),
+ p_value);
+ break;
+ case MM_CAMERA_PARM_FOCUS_DISTANCES:
+ rc = mm_camera_send_native_ctrl_cmd(my_obj,
+ CAMERA_GET_PARM_FOCUS_DISTANCES,
+ sizeof(focus_distances_info_t),
+ p_value);
+ break;
+ case MM_CAMERA_PARM_QUERY_FALSH4SNAP:
+ rc = mm_camera_send_native_ctrl_cmd(my_obj,
+ CAMERA_QUERY_FLASH_FOR_SNAPSHOT,
+ sizeof(int),
+ p_value);
+ break;
+ case MM_CAMERA_PARM_3D_FRAME_FORMAT:
+ rc = mm_camera_send_native_ctrl_cmd(my_obj,
+ CAMERA_GET_PARM_3D_FRAME_FORMAT,
+ sizeof(camera_3d_frame_t),
+ p_value);
+ break;
+ case MM_CAMERA_PARM_MAXZOOM:
+ rc = mm_camera_send_native_ctrl_cmd(my_obj,
+ CAMERA_GET_PARM_MAXZOOM,
+ sizeof(int),
+ p_value);
+ break;
+ case MM_CAMERA_PARM_ZOOM_RATIO:
+ {
+ mm_camera_zoom_tbl_t *tbl = (mm_camera_zoom_tbl_t *)p_value;
+ rc = mm_camera_send_native_ctrl_cmd(my_obj,
+ CAMERA_GET_PARM_ZOOMRATIOS,
+ sizeof(int16_t)*tbl->size,
+ (void *)(tbl->zoom_ratio_tbl));
+ }
+ break;
+ case MM_CAMERA_PARM_DEF_PREVIEW_SIZES:
+ {
+ default_sizes_tbl_t *tbl = (default_sizes_tbl_t*)p_value;
+ rc = mm_camera_send_native_ctrl_cmd(my_obj,
+ CAMERA_GET_PARM_DEF_PREVIEW_SIZES,
+ sizeof(struct camera_size_type)*tbl->tbl_size,
+ (void* )(tbl->sizes_tbl));
+ }
+ break;
+ case MM_CAMERA_PARM_DEF_VIDEO_SIZES:
+ {
+ default_sizes_tbl_t *tbl = (default_sizes_tbl_t*)p_value;
+ rc = mm_camera_send_native_ctrl_cmd(my_obj,
+ CAMERA_GET_PARM_DEF_VIDEO_SIZES,
+ sizeof(struct camera_size_type)*tbl->tbl_size,
+ (void *)(tbl->sizes_tbl));
+ }
+ break;
+ case MM_CAMERA_PARM_DEF_THUMB_SIZES:
+ {
+ default_sizes_tbl_t *tbl = (default_sizes_tbl_t*)p_value;
+ rc = mm_camera_send_native_ctrl_cmd(my_obj,
+ CAMERA_GET_PARM_DEF_THUMB_SIZES,
+ sizeof(struct camera_size_type)*tbl->tbl_size,
+ (void *)(tbl->sizes_tbl));
+ }
+ break;
+ case MM_CAMERA_PARM_DEF_HFR_SIZES:
+ {
+ default_sizes_tbl_t *tbl = (default_sizes_tbl_t*)p_value;
+ rc = mm_camera_send_native_ctrl_cmd(my_obj,
+ CAMERA_GET_PARM_DEF_HFR_SIZES,
+ sizeof(struct camera_size_type)*tbl->tbl_size,
+ (void *)(tbl->sizes_tbl));
+ }
+ break;
+ case MM_CAMERA_PARM_SNAPSHOT_BURST_NUM:
+ *((int *)p_value) = my_obj->snap_burst_num_by_user;
+ break;
+ case MM_CAMERA_PARM_VFE_OUTPUT_ENABLE:
+ *((int *)p_value) = my_obj->properties.vfe_output_enable;
+ break;
+ case MM_CAMERA_PARM_DIMENSION:
+ memcpy(p_value, &my_obj->dim, sizeof(my_obj->dim));
+ CDBG("%s: dw=%d,dh=%d,vw=%d,vh=%d,pw=%d,ph=%d,tw=%d,th=%d,ovx=%x,ovy=%d,opx=%d,opy=%d, m_fmt=%d, t_ftm=%d\n",
+ __func__,
+ my_obj->dim.display_width,my_obj->dim.display_height,
+ my_obj->dim.video_width,my_obj->dim.video_height,
+ my_obj->dim.picture_width,my_obj->dim.picture_height,
+ my_obj->dim.ui_thumbnail_width,my_obj->dim.ui_thumbnail_height,
+ my_obj->dim.orig_video_width,my_obj->dim.orig_video_height,
+ my_obj->dim.orig_picture_width,my_obj->dim.orig_picture_height,
+ my_obj->dim.main_img_format, my_obj->dim.thumb_format);
+ break;
+ case MM_CAMERA_PARM_OP_MODE:
+ *((mm_camera_op_mode_type_t *)p_value) = my_obj->op_mode;
+ break;
+ default:
+ /* needs to add more implementation */
+ rc = -1;
+ break;
+ }
+
+ pthread_mutex_unlock(&my_obj->cam_lock);
+ return rc;
+}
+
+uint32_t mm_camera_add_channel(mm_camera_obj_t *my_obj)
+{
+ mm_channel_t *ch_obj = NULL;
+ uint8_t ch_idx = 0;
+ uint32_t ch_hdl = 0;
+
+ for(ch_idx = 0; ch_idx < MM_CAMERA_CHANNEL_MAX; ch_idx++) {
+ if (MM_CHANNEL_STATE_NOTUSED == my_obj->ch[ch_idx].state) {
+ ch_obj = &my_obj->ch[ch_idx];
+ break;
+ }
+ }
+
+ if (NULL != ch_obj) {
+ /* initialize channel obj */
+ memset(ch_obj, 0, sizeof(mm_channel_t));
+ ch_hdl = mm_camera_util_generate_handler(ch_idx);
+ ch_obj->my_hdl = ch_hdl;
+ ch_obj->state = MM_CHANNEL_STATE_STOPPED;
+ ch_obj->cam_obj = my_obj;
+ pthread_mutex_init(&ch_obj->ch_lock, NULL);
+ }
+
+ mm_channel_init(ch_obj);
+ pthread_mutex_unlock(&my_obj->cam_lock);
+
+ return ch_hdl;
+}
+
+void mm_camera_del_channel(mm_camera_obj_t *my_obj,
+ uint32_t ch_id)
+{
+ mm_channel_t * ch_obj =
+ mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+ if (NULL != ch_obj) {
+ pthread_mutex_lock(&ch_obj->ch_lock);
+ pthread_mutex_unlock(&my_obj->cam_lock);
+
+ mm_channel_fsm_fn(ch_obj,
+ MM_CHANNEL_EVT_DELETE,
+ NULL,
+ NULL);
+
+ pthread_mutex_destroy(&ch_obj->ch_lock);
+ memset(ch_obj, 0, sizeof(mm_channel_t));
+ } else {
+ pthread_mutex_unlock(&my_obj->cam_lock);
+ }
+}
+
+uint32_t mm_camera_add_stream(mm_camera_obj_t *my_obj,
+ uint32_t ch_id,
+ mm_camera_buf_notify_t buf_cb, void *user_data,
+ uint32_t ext_image_mode, uint32_t sensor_idx)
+{
+ uint32_t s_hdl = 0;
+ mm_channel_t * ch_obj =
+ mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+ mm_evt_paylod_add_stream_t payload;
+ void* out_val = NULL;
+
+ if (NULL != ch_obj) {
+ pthread_mutex_lock(&ch_obj->ch_lock);
+ pthread_mutex_unlock(&my_obj->cam_lock);
+
+ memset(&payload, 0, sizeof(mm_evt_paylod_add_stream_t));
+ payload.buf_cb = buf_cb;
+ payload.user_data = user_data;
+ payload.ext_image_mode = ext_image_mode;
+ payload.sensor_idx = sensor_idx;
+ mm_channel_fsm_fn(ch_obj,
+ MM_CHANNEL_EVT_ADD_STREAM,
+ (void*)&payload,
+ (void*)&s_hdl);
+ } else {
+ pthread_mutex_unlock(&my_obj->cam_lock);
+ }
+
+ return s_hdl;
+}
+
+int32_t mm_camera_del_stream(mm_camera_obj_t *my_obj,
+ uint32_t ch_id,
+ uint32_t stream_id)
+{
+ int32_t rc = -1;
+ mm_channel_t * ch_obj =
+ mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+ if (NULL != ch_obj) {
+ pthread_mutex_lock(&ch_obj->ch_lock);
+ pthread_mutex_unlock(&my_obj->cam_lock);
+
+ rc = mm_channel_fsm_fn(ch_obj,
+ MM_CHANNEL_EVT_DEL_STREAM,
+ (void*)&stream_id,
+ NULL);
+ } else {
+ pthread_mutex_unlock(&my_obj->cam_lock);
+ }
+
+ return rc;
+}
+
+int32_t mm_camera_config_stream(mm_camera_obj_t *my_obj,
+ uint32_t ch_id,
+ uint32_t stream_id,
+ mm_camera_stream_config_t *config)
+{
+ int32_t rc = -1;
+ mm_channel_t * ch_obj =
+ mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+ mm_evt_paylod_config_stream_t payload;
+
+ if (NULL != ch_obj) {
+ pthread_mutex_lock(&ch_obj->ch_lock);
+ pthread_mutex_unlock(&my_obj->cam_lock);
+
+ memset(&payload, 0, sizeof(mm_evt_paylod_config_stream_t));
+ payload.stream_id = stream_id;
+ payload.config = config;
+ rc = mm_channel_fsm_fn(ch_obj,
+ MM_CHANNEL_EVT_CONFIG_STREAM,
+ (void*)&payload,
+ NULL);
+ } else {
+ pthread_mutex_unlock(&my_obj->cam_lock);
+ }
+
+ return rc;
+}
+
+int32_t mm_camera_bundle_streams(mm_camera_obj_t *my_obj,
+ uint32_t ch_id,
+ mm_camera_buf_notify_t super_frame_notify_cb,
+ void *user_data,
+ mm_camera_bundle_attr_t *attr,
+ uint8_t num_streams,
+ uint32_t *stream_ids)
+{
+ int32_t rc = -1;
+ mm_channel_t * ch_obj =
+ mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+ mm_evt_payload_bundle_stream_t payload;
+
+ if (NULL != ch_obj) {
+ pthread_mutex_lock(&ch_obj->ch_lock);
+ pthread_mutex_unlock(&my_obj->cam_lock);
+
+ memset(&payload, 0, sizeof(mm_evt_payload_bundle_stream_t));
+ payload.super_frame_notify_cb = super_frame_notify_cb;
+ payload.user_data = user_data;
+ payload.attr = attr;
+ payload.num_streams = num_streams;
+ payload.stream_ids = stream_ids;
+ rc = mm_channel_fsm_fn(ch_obj,
+ MM_CHANNEL_EVT_INIT_BUNDLE,
+ (void*)&payload,
+ NULL);
+ } else {
+ pthread_mutex_unlock(&my_obj->cam_lock);
+ }
+
+ return rc;
+}
+
+int32_t mm_camera_destroy_bundle(mm_camera_obj_t *my_obj, uint32_t ch_id)
+{
+ int32_t rc = -1;
+ mm_channel_t * ch_obj =
+ mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+ if (NULL != ch_obj) {
+ pthread_mutex_lock(&ch_obj->ch_lock);
+ pthread_mutex_unlock(&my_obj->cam_lock);
+
+ rc = mm_channel_fsm_fn(ch_obj,
+ MM_CHANNEL_EVT_DESTROY_BUNDLE,
+ NULL,
+ NULL);
+ } else {
+ pthread_mutex_unlock(&my_obj->cam_lock);
+ }
+
+ return rc;
+}
+
+int32_t mm_camera_start_streams(mm_camera_obj_t *my_obj,
+ uint32_t ch_id,
+ uint8_t num_streams,
+ uint32_t *stream_ids)
+{
+ int32_t rc = -1;
+ mm_channel_t * ch_obj =
+ mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+ mm_evt_payload_start_stream_t payload;
+
+ if (NULL != ch_obj) {
+ pthread_mutex_lock(&ch_obj->ch_lock);
+ pthread_mutex_unlock(&my_obj->cam_lock);
+
+ memset(&payload, 0, sizeof(mm_evt_payload_start_stream_t));
+ payload.num_streams = num_streams;
+ payload.stream_ids = stream_ids;
+ rc = mm_channel_fsm_fn(ch_obj,
+ MM_CHANNEL_EVT_START_STREAM,
+ (void*)&payload,
+ NULL);
+ } else {
+ pthread_mutex_unlock(&my_obj->cam_lock);
+ }
+
+ return rc;
+}
+
+int32_t mm_camera_stop_streams(mm_camera_obj_t *my_obj,
+ uint32_t ch_id,
+ uint8_t num_streams,
+ uint32_t *stream_ids)
+{
+ int32_t rc = 0;
+ mm_evt_payload_stop_stream_t payload;
+ mm_camera_cmdcb_t * node = NULL;
+
+ mm_channel_t * ch_obj =
+ mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+ if (NULL != ch_obj) {
+ pthread_mutex_lock(&ch_obj->ch_lock);
+ pthread_mutex_unlock(&my_obj->cam_lock);
+
+ memset(&payload, 0, sizeof(mm_evt_payload_stop_stream_t));
+ payload.num_streams = num_streams;
+ payload.stream_ids = stream_ids;
+
+ rc = mm_channel_fsm_fn(ch_obj,
+ MM_CHANNEL_EVT_STOP_STREAM,
+ (void*)&payload,
+ NULL);
+ } else {
+ pthread_mutex_unlock(&my_obj->cam_lock);
+ }
+ return rc;
+}
+
+int32_t mm_camera_async_teardown_streams(mm_camera_obj_t *my_obj,
+ uint32_t ch_id,
+ uint8_t num_streams,
+ uint32_t *stream_ids)
+{
+ int32_t rc = 0;
+ mm_evt_payload_stop_stream_t payload;
+ mm_camera_cmdcb_t * node = NULL;
+
+ mm_channel_t * ch_obj =
+ mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+ if (NULL != ch_obj) {
+ pthread_mutex_lock(&ch_obj->ch_lock);
+ pthread_mutex_unlock(&my_obj->cam_lock);
+
+ /* enqueu asyn stop cmd to async_cmd_thread */
+ node = (mm_camera_cmdcb_t *)malloc(sizeof(mm_camera_cmdcb_t));
+ if (NULL != node) {
+ memset(node, 0, sizeof(mm_camera_cmdcb_t));
+ node->cmd_type = MM_CAMERA_CMD_TYPE_ASYNC_CB;
+ node->u.async.cmd_type = MM_CAMERA_ASYNC_CMD_TYPE_STOP;
+ node->u.async.u.stop_cmd.ch_obj = ch_obj;
+ node->u.async.u.stop_cmd.num_streams = num_streams;
+ memcpy(node->u.async.u.stop_cmd.stream_ids, stream_ids, sizeof(uint32_t)*num_streams);
+
+ /* enqueue to async cmd thread */
+ mm_camera_queue_enq(&(my_obj->async_cmd_thread.cmd_queue), node);
+ /* wake up async cmd thread */
+ sem_post(&(my_obj->async_cmd_thread.cmd_sem));
+ } else {
+ CDBG_ERROR("%s: No memory for mm_camera_cmdcb_t", __func__);
+ pthread_mutex_unlock(&ch_obj->ch_lock);
+ rc = -1;
+ return rc;
+ }
+ } else {
+ pthread_mutex_unlock(&my_obj->cam_lock);
+ }
+ return rc;
+}
+
+int32_t mm_camera_request_super_buf(mm_camera_obj_t *my_obj, uint32_t ch_id)
+{
+ int32_t rc = -1;
+ mm_channel_t * ch_obj =
+ mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+ if (NULL != ch_obj) {
+ pthread_mutex_lock(&ch_obj->ch_lock);
+ pthread_mutex_unlock(&my_obj->cam_lock);
+
+ rc = mm_channel_fsm_fn(ch_obj,
+ MM_CHANNEL_EVT_REQUEST_SUPER_BUF,
+ NULL,
+ NULL);
+ } else {
+ pthread_mutex_unlock(&my_obj->cam_lock);
+ }
+
+ return rc;
+}
+
+int32_t mm_camera_cancel_super_buf_request(mm_camera_obj_t *my_obj, uint32_t ch_id)
+{
+ int32_t rc = -1;
+ mm_channel_t * ch_obj =
+ mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+ if (NULL != ch_obj) {
+ pthread_mutex_lock(&ch_obj->ch_lock);
+ pthread_mutex_unlock(&my_obj->cam_lock);
+
+ rc = mm_channel_fsm_fn(ch_obj,
+ MM_CHANNEL_EVT_CANCEL_REQUEST_SUPER_BUF,
+ NULL,
+ NULL);
+ } else {
+ pthread_mutex_unlock(&my_obj->cam_lock);
+ }
+
+ return rc;
+}
+
+int32_t mm_camera_start_focus(mm_camera_obj_t *my_obj,
+ uint32_t ch_id,
+ uint32_t sensor_idx,
+ uint32_t focus_mode)
+{
+ int32_t rc = -1;
+ mm_evt_payload_start_focus_t payload;
+ mm_channel_t * ch_obj =
+ mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+ if (NULL != ch_obj) {
+ pthread_mutex_lock(&ch_obj->ch_lock);
+ pthread_mutex_unlock(&my_obj->cam_lock);
+
+ memset(&payload, 0, sizeof(mm_evt_payload_start_focus_t));
+ payload.sensor_idx = sensor_idx;
+ payload.focus_mode = focus_mode;
+ rc = mm_channel_fsm_fn(ch_obj,
+ MM_CHANNEL_EVT_START_FOCUS,
+ (void *)&payload,
+ NULL);
+ } else {
+ pthread_mutex_unlock(&my_obj->cam_lock);
+ }
+
+ return rc;
+}
+
+int32_t mm_camera_abort_focus(mm_camera_obj_t *my_obj,
+ uint32_t ch_id,
+ uint32_t sensor_idx)
+{
+ int32_t rc = -1;
+ mm_channel_t * ch_obj =
+ mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+ if (NULL != ch_obj) {
+ pthread_mutex_lock(&ch_obj->ch_lock);
+ pthread_mutex_unlock(&my_obj->cam_lock);
+
+ rc = mm_channel_fsm_fn(ch_obj,
+ MM_CHANNEL_EVT_ABORT_FOCUS,
+ (void*)sensor_idx,
+ NULL);
+ } else {
+ pthread_mutex_unlock(&my_obj->cam_lock);
+ }
+
+ return rc;
+}
+
+int32_t mm_camera_prepare_snapshot(mm_camera_obj_t *my_obj,
+ uint32_t ch_id,
+ uint32_t sensor_idx)
+{
+ int32_t rc = -1;
+ mm_channel_t * ch_obj =
+ mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+ if (NULL != ch_obj) {
+ pthread_mutex_lock(&ch_obj->ch_lock);
+ pthread_mutex_unlock(&my_obj->cam_lock);
+
+ rc = mm_channel_fsm_fn(ch_obj,
+ MM_CHANNEL_EVT_PREPARE_SNAPSHOT,
+ (void *)sensor_idx,
+ NULL);
+ } else {
+ pthread_mutex_unlock(&my_obj->cam_lock);
+ }
+
+ return rc;
+}
+
+int32_t mm_camera_set_stream_parm(mm_camera_obj_t *my_obj,
+ uint32_t ch_id,
+ uint32_t s_id,
+ mm_camera_stream_parm_t parm_type,
+ void* p_value)
+{
+ int32_t rc = -1;
+ mm_evt_paylod_stream_parm_t payload;
+ mm_channel_t * ch_obj =
+ mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+ if (NULL != ch_obj) {
+ pthread_mutex_lock(&ch_obj->ch_lock);
+ pthread_mutex_unlock(&my_obj->cam_lock);
+
+ memset(&payload,0,sizeof(mm_evt_paylod_stream_parm_t));
+ payload.parm_type = parm_type;
+ payload.value = p_value;
+ rc = mm_channel_fsm_fn(ch_obj,
+ MM_CHANNEL_EVT_SET_STREAM_PARM,
+ (void *)s_id,
+ &payload);
+ } else {
+ pthread_mutex_unlock(&my_obj->cam_lock);
+ }
+
+ return rc;
+}
+
+int32_t mm_camera_get_stream_parm(mm_camera_obj_t *my_obj,
+ uint32_t ch_id,
+ uint32_t s_id,
+ mm_camera_stream_parm_t parm_type,
+ void* p_value)
+{
+ int32_t rc = -1;
+ mm_evt_paylod_stream_parm_t payload;
+ mm_channel_t * ch_obj =
+ mm_camera_util_get_channel_by_handler(my_obj, ch_id);
+
+ if (NULL != ch_obj) {
+ pthread_mutex_lock(&ch_obj->ch_lock);
+ pthread_mutex_unlock(&my_obj->cam_lock);
+
+ memset(&payload,0,sizeof(mm_evt_paylod_stream_parm_t));
+ payload.parm_type = parm_type;
+ payload.value = p_value;
+ rc = mm_channel_fsm_fn(ch_obj,
+ MM_CHANNEL_EVT_GET_STREAM_PARM,
+ (void *)s_id,
+ &payload);
+ } else {
+ pthread_mutex_unlock(&my_obj->cam_lock);
+ }
+
+ return rc;
+}
+
+int32_t mm_camera_ctrl_set_specialEffect (mm_camera_obj_t *my_obj, int32_t effect) {
+ struct v4l2_control ctrl;
+ if (effect == CAMERA_EFFECT_MAX)
+ effect = CAMERA_EFFECT_OFF;
+ int rc = 0;
+
+ ctrl.id = MSM_V4L2_PID_EFFECT;
+ ctrl.value = effect;
+ rc = ioctl(my_obj->ctrl_fd, VIDIOC_S_CTRL, &ctrl);
+ return rc;
+}
+
+int32_t mm_camera_ctrl_set_auto_focus (mm_camera_obj_t *my_obj, int32_t value)
+{
+ int32_t rc = 0;
+ struct v4l2_queryctrl queryctrl;
+
+ memset (&queryctrl, 0, sizeof (queryctrl));
+ queryctrl.id = V4L2_CID_FOCUS_AUTO;
+
+ if(value != 0 && value != 1) {
+ CDBG("%s:boolean required, invalid value = %d\n",__func__, value);
+ return -1;
+ }
+ if (-1 == ioctl (my_obj->ctrl_fd, VIDIOC_QUERYCTRL, &queryctrl)) {
+ CDBG ("V4L2_CID_FOCUS_AUTO is not supported\n");
+ } else if (queryctrl.flags & V4L2_CTRL_FLAG_DISABLED) {
+ CDBG ("%s:V4L2_CID_FOCUS_AUTO is not supported\n", __func__);
+ } else {
+ if(0 != (rc = mm_camera_util_s_ctrl(my_obj->ctrl_fd,
+ V4L2_CID_FOCUS_AUTO, value))){
+ CDBG("%s: error, id=0x%x, value=%d, rc = %d\n",
+ __func__, V4L2_CID_FOCUS_AUTO, value, rc);
+ rc = -1;
+ }
+ }
+ return rc;
+}
+
+int32_t mm_camera_ctrl_set_whitebalance (mm_camera_obj_t *my_obj, int32_t mode) {
+
+ int32_t rc = 0, value;
+ uint32_t id;
+
+ switch(mode) {
+ case MM_CAMERA_WHITE_BALANCE_AUTO:
+ id = V4L2_CID_AUTO_WHITE_BALANCE;
+ value = 1; /* TRUE */
+ break;
+ case MM_CAMERA_WHITE_BALANCE_OFF:
+ id = V4L2_CID_AUTO_WHITE_BALANCE;
+ value = 0; /* FALSE */
+ break;
+ case MM_CAMERA_WHITE_BALANCE_DAYLIGHT:
+ id = V4L2_CID_WHITE_BALANCE_TEMPERATURE;
+ value = 6500;
+ break;
+ case MM_CAMERA_WHITE_BALANCE_INCANDESCENT:
+ id = V4L2_CID_WHITE_BALANCE_TEMPERATURE;
+ value = 2800;
+ break;
+ case MM_CAMERA_WHITE_BALANCE_FLUORESCENT:
+ id = V4L2_CID_WHITE_BALANCE_TEMPERATURE;
+ value = 4200;
+ break;
+ case MM_CAMERA_WHITE_BALANCE_CLOUDY:
+ id = V4L2_CID_WHITE_BALANCE_TEMPERATURE;
+ value = 7500;
+ break;
+ default:
+ id = V4L2_CID_WHITE_BALANCE_TEMPERATURE;
+ value = 4200;
+ break;
+ }
+ rc = mm_camera_util_s_ctrl(my_obj->ctrl_fd, id, value);
+ if(0 != rc){
+ CDBG("%s: error, exp_metering_action_param=%d, rc = %d\n", __func__, value, rc);
+ }
+ return rc;
+}
+
+int32_t mm_camera_set_general_parm(mm_camera_obj_t * my_obj,
+ mm_camera_parm_type_t parm_type,
+ void* p_value)
+{
+ int rc = -1;
+ int isZSL =0;
+
+ switch(parm_type) {
+ case MM_CAMERA_PARM_EXPOSURE:
+ rc = mm_camera_util_s_ctrl(my_obj->ctrl_fd,
+ MSM_V4L2_PID_EXP_METERING,
+ *((int32_t *)p_value));
+ break;
+ case MM_CAMERA_PARM_SHARPNESS:
+ rc = mm_camera_util_s_ctrl(my_obj->ctrl_fd,
+ V4L2_CID_SHARPNESS,
+ *((int32_t *)p_value));
+ break;
+ case MM_CAMERA_PARM_CONTRAST:
+ rc = mm_camera_util_s_ctrl(my_obj->ctrl_fd,
+ V4L2_CID_CONTRAST,
+ *((int32_t *)p_value));
+ break;
+ case MM_CAMERA_PARM_SATURATION:
+ rc = mm_camera_util_s_ctrl(my_obj->ctrl_fd,
+ V4L2_CID_SATURATION,
+ *((int32_t *)p_value));
+ break;
+ case MM_CAMERA_PARM_BRIGHTNESS:
+ rc = mm_camera_util_s_ctrl(my_obj->ctrl_fd,
+ V4L2_CID_BRIGHTNESS,
+ *((int32_t *)p_value));
+ break;
+ case MM_CAMERA_PARM_WHITE_BALANCE:
+ rc = mm_camera_ctrl_set_whitebalance (my_obj, *((int32_t *)p_value));
+ break;
+ case MM_CAMERA_PARM_ISO:
+ rc = mm_camera_util_s_ctrl(my_obj->ctrl_fd,
+ MSM_V4L2_PID_ISO,
+ *((int32_t *)p_value));
+ break;
+ case MM_CAMERA_PARM_ZOOM:
+ rc = mm_camera_util_s_ctrl(my_obj->ctrl_fd,
+ V4L2_CID_ZOOM_ABSOLUTE,
+ *((int32_t *)p_value));
+ break;
+ case MM_CAMERA_PARM_LUMA_ADAPTATION:
+ rc = mm_camera_util_s_ctrl(my_obj->ctrl_fd,
+ MSM_V4L2_PID_LUMA_ADAPTATION,
+ *((int32_t *)p_value));
+ break;
+ case MM_CAMERA_PARM_ANTIBANDING:
+ rc = mm_camera_util_s_ctrl(my_obj->ctrl_fd,
+ V4L2_CID_POWER_LINE_FREQUENCY,
+ *((int32_t *)p_value));
+ break;
+ case MM_CAMERA_PARM_CONTINUOUS_AF:
+ rc = mm_camera_ctrl_set_auto_focus(my_obj,
+ *((int32_t *)p_value));
+ break;
+ case MM_CAMERA_PARM_HJR:
+ rc = mm_camera_util_s_ctrl(my_obj->ctrl_fd,
+ MSM_V4L2_PID_HJR,
+ *((int32_t *)p_value));
+ break;
+ case MM_CAMERA_PARM_EFFECT:
+ rc = mm_camera_ctrl_set_specialEffect (my_obj,
+ *((int32_t *)p_value));
+ break;
+ case MM_CAMERA_PARM_FPS:
+ rc = mm_camera_send_native_ctrl_cmd(my_obj,
+ CAMERA_SET_PARM_FPS,
+ sizeof(uint32_t),
+ p_value);
+ break;
+ case MM_CAMERA_PARM_FPS_MODE:
+ rc = mm_camera_send_native_ctrl_cmd(my_obj,
+ CAMERA_SET_FPS_MODE,
+ sizeof(int32_t),
+ p_value);
+ break;
+ case MM_CAMERA_PARM_EXPOSURE_COMPENSATION:
+ rc = mm_camera_send_native_ctrl_cmd(my_obj,
+ CAMERA_SET_PARM_EXPOSURE_COMPENSATION,
+ sizeof(int32_t),
+ p_value);
+ break;
+ case MM_CAMERA_PARM_LED_MODE:
+ rc = mm_camera_send_native_ctrl_cmd(my_obj,
+ CAMERA_SET_PARM_LED_MODE,
+ sizeof(int32_t),
+ p_value);
+ break;
+ case MM_CAMERA_PARM_ROLLOFF:
+ rc = mm_camera_send_native_ctrl_cmd(my_obj,
+ CAMERA_SET_PARM_ROLLOFF,
+ sizeof(int32_t),
+ p_value);
+ break;
+ case MM_CAMERA_PARM_MODE:
+ my_obj->current_mode = *((camera_mode_t *)p_value);
+ break;
+ case MM_CAMERA_PARM_FOCUS_RECT:
+ rc = mm_camera_send_native_ctrl_cmd(my_obj,
+ CAMERA_SET_PARM_FOCUS_RECT,
+ sizeof(int32_t),
+ p_value);
+ break;
+ case MM_CAMERA_PARM_AEC_ROI:
+ rc = mm_camera_send_native_ctrl_cmd(my_obj,
+ CAMERA_SET_PARM_AEC_ROI,
+ sizeof(cam_set_aec_roi_t),
+ p_value);
+ break;
+ case MM_CAMERA_PARM_AF_ROI:
+ rc = mm_camera_send_native_ctrl_cmd(my_obj,
+ CAMERA_SET_PARM_AF_ROI,
+ sizeof(roi_info_t),
+ p_value);
+ break;
+ case MM_CAMERA_PARM_FOCUS_MODE:
+ rc = mm_camera_send_native_ctrl_cmd(my_obj,
+ CAMERA_SET_PARM_AF_MODE,
+ sizeof(int32_t),
+ p_value);
+ break;
+#if 0 /* to be enabled later: @punits */
+ case MM_CAMERA_PARM_AF_MTR_AREA:
+ rc = mm_camera_send_native_ctrl_cmd(my_obj,
+ CAMERA_SET_PARM_AF_MTR_AREA,
+ sizeof(af_mtr_area_t),
+ p_value);
+ break;
+ case MM_CAMERA_PARM_AEC_MTR_AREA:
+ rc = mm_camera_send_native_ctrl_cmd(my_obj,
+ CAMERA_SET_AEC_MTR_AREA,
+ sizeof(aec_mtr_area_t),
+ p_value);
+ break;
+#endif
+ case MM_CAMERA_PARM_CAF_ENABLE:
+ rc = mm_camera_send_native_ctrl_cmd(my_obj,
+ CAMERA_SET_PARM_CAF,
+ sizeof(uint32_t),
+ p_value);
+ break;
+ case MM_CAMERA_PARM_BESTSHOT_MODE:
+ rc = mm_camera_send_native_ctrl_cmd(my_obj,
+ CAMERA_SET_PARM_BESTSHOT_MODE,
+ sizeof(int32_t),
+ p_value);
+ break;
+ case MM_CAMERA_PARM_VIDEO_DIS:
+ rc = mm_camera_send_native_ctrl_cmd(my_obj,
+ CAMERA_SET_VIDEO_DIS_PARAMS,
+ sizeof(video_dis_param_ctrl_t),
+ p_value);
+ break;
+ case MM_CAMERA_PARM_VIDEO_ROT:
+ rc = mm_camera_send_native_ctrl_cmd(my_obj,
+ CAMERA_SET_VIDEO_ROT_PARAMS,
+ sizeof(video_rotation_param_ctrl_t),
+ p_value);
+ break;
+ case MM_CAMERA_PARM_SCE_FACTOR:
+ rc = mm_camera_send_native_ctrl_cmd(my_obj,
+ CAMERA_SET_SCE_FACTOR,
+ sizeof(int32_t),
+ p_value);
+ break;
+ case MM_CAMERA_PARM_FD:
+ rc = mm_camera_send_native_ctrl_cmd(my_obj,
+ CAMERA_SET_PARM_FD,
+ sizeof(int32_t),
+ p_value);
+ break;
+ case MM_CAMERA_PARM_AEC_LOCK:
+ rc = mm_camera_send_native_ctrl_cmd(my_obj,
+ CAMERA_SET_AEC_LOCK,
+ sizeof(int32_t),
+ p_value);
+ break;
+ case MM_CAMERA_PARM_AWB_LOCK:
+ rc = mm_camera_send_native_ctrl_cmd(my_obj,
+ CAMERA_SET_AWB_LOCK,
+ sizeof(int32_t),
+ p_value);
+ break;
+ case MM_CAMERA_PARM_MCE:
+ rc = mm_camera_send_native_ctrl_cmd(my_obj,
+ CAMERA_SET_PARM_MCE,
+ sizeof(int32_t),
+ p_value);
+ break;
+ case MM_CAMERA_PARM_HORIZONTAL_VIEW_ANGLE:
+ rc = mm_camera_send_native_ctrl_cmd(my_obj,
+ CAMERA_GET_PARM_HORIZONTAL_VIEW_ANGLE,
+ sizeof(focus_distances_info_t),
+ p_value);
+ break;
+ case MM_CAMERA_PARM_VERTICAL_VIEW_ANGLE:
+ rc = mm_camera_send_native_ctrl_cmd(my_obj,
+ CAMERA_GET_PARM_VERTICAL_VIEW_ANGLE,
+ sizeof(focus_distances_info_t),
+ p_value);
+ break;
+ case MM_CAMERA_PARM_RESET_LENS_TO_INFINITY:
+ rc = mm_camera_send_native_ctrl_cmd(my_obj,
+ CAMERA_SET_PARM_RESET_LENS_TO_INFINITY,
+ 0, NULL);
+ break;
+ case MM_CAMERA_PARM_SNAPSHOTDATA:
+ rc = mm_camera_send_native_ctrl_cmd(my_obj,
+ CAMERA_GET_PARM_SNAPSHOTDATA,
+ sizeof(snapshotData_info_t),
+ p_value);
+ break;
+ case MM_CAMERA_PARM_HFR:
+ rc = mm_camera_send_native_ctrl_cmd(my_obj,
+ CAMERA_SET_PARM_HFR,
+ sizeof(int32_t),
+ p_value);
+ break;
+ case MM_CAMERA_PARM_REDEYE_REDUCTION:
+ rc = mm_camera_send_native_ctrl_cmd(my_obj,
+ CAMERA_SET_REDEYE_REDUCTION,
+ sizeof(int32_t),
+ p_value);
+ break;
+ case MM_CAMERA_PARM_WAVELET_DENOISE:
+ rc = mm_camera_send_native_ctrl_cmd(my_obj,
+ CAMERA_SET_PARM_WAVELET_DENOISE,
+ sizeof(denoise_param_t),
+ p_value);
+ break;
+ case MM_CAMERA_PARM_3D_DISPLAY_DISTANCE:
+ rc = mm_camera_send_native_ctrl_cmd(my_obj,
+ CAMERA_SET_PARM_3D_DISPLAY_DISTANCE,
+ sizeof(float),
+ p_value);
+ break;
+ case MM_CAMERA_PARM_3D_VIEW_ANGLE:
+ rc = mm_camera_send_native_ctrl_cmd(my_obj,
+ CAMERA_SET_PARM_3D_VIEW_ANGLE,
+ sizeof(uint32_t),
+ p_value);
+ break;
+ case MM_CAMERA_PARM_ZOOM_RATIO:
+ break;
+ case MM_CAMERA_PARM_HISTOGRAM:
+ rc = mm_camera_send_native_ctrl_cmd(my_obj,
+ CAMERA_SET_PARM_HISTOGRAM,
+ sizeof(int8_t),
+ p_value);
+ break;
+ /* Moved to mm-jpeg-interface */
+ /* case MM_CAMERA_PARM_JPEG_ROTATION:
+ break; */
+ case MM_CAMERA_PARM_ASD_ENABLE:
+ rc = mm_camera_send_native_ctrl_cmd(my_obj,
+ CAMERA_SET_ASD_ENABLE,
+ sizeof(uint32_t),
+ p_value);
+ break;
+ case MM_CAMERA_PARM_RECORDING_HINT:
+ rc = mm_camera_send_native_ctrl_cmd(my_obj,
+ CAMERA_SET_RECORDING_HINT,
+ sizeof(uint32_t),
+ p_value);
+ break;
+ case MM_CAMERA_PARM_PREVIEW_FORMAT:
+ rc = mm_camera_send_native_ctrl_cmd(my_obj,
+ CAMERA_SET_PARM_PREVIEW_FORMAT,
+ sizeof(uint32_t),
+ p_value);
+ break;
+ /* TODO: need code review to determine any of the three is redundent
+ * MM_CAMERA_PARM_DIS_ENABLE,
+ * MM_CAMERA_PARM_FULL_LIVESHOT,
+ * MM_CAMERA_PARM_LOW_POWER_MODE*/
+ case MM_CAMERA_PARM_DIS_ENABLE:
+ rc = mm_camera_send_native_ctrl_cmd(my_obj,
+ CAMERA_SET_DIS_ENABLE,
+ sizeof(uint32_t),
+ p_value);
+ break;
+ case MM_CAMERA_PARM_FULL_LIVESHOT:
+ rc = mm_camera_send_native_ctrl_cmd(my_obj,
+ CAMERA_SET_FULL_LIVESHOT,
+ sizeof(uint32_t),
+ p_value);
+ break;
+ case MM_CAMERA_PARM_LOW_POWER_MODE:
+ rc = mm_camera_send_native_ctrl_cmd(my_obj,
+ CAMERA_SET_LOW_POWER_MODE,
+ sizeof(uint32_t),
+ p_value);
+ break;
+ case MM_CAMERA_PARM_HDR:
+ rc = mm_camera_send_native_ctrl_cmd(my_obj,
+ CAMERA_SET_PARM_HDR,
+ sizeof(exp_bracketing_t),
+ p_value);
+ break;
+ default:
+ CDBG("%s: default: parm %d not supported\n", __func__, parm_type);
+ break;
+ }
+ return rc;
+}
+
+int32_t mm_camera_util_private_s_ctrl(int32_t fd, uint32_t id, void* value)
+{
+ int rc = -1;
+ struct msm_camera_v4l2_ioctl_t v4l2_ioctl;
+
+ memset(&v4l2_ioctl, 0, sizeof(v4l2_ioctl));
+ v4l2_ioctl.id = id;
+ v4l2_ioctl.ioctl_ptr = value;
+ rc = ioctl (fd, MSM_CAM_V4L2_IOCTL_PRIVATE_S_CTRL, &v4l2_ioctl);
+
+ if(rc) {
+ CDBG_ERROR("%s: fd=%d, S_CTRL, id=0x%x, value = 0x%x, rc = %d\n",
+ __func__, fd, id, (uint32_t)value, rc);
+ rc = 1;
+ }
+ return rc;
+}
+
+int32_t mm_camera_send_native_ctrl_cmd(mm_camera_obj_t * my_obj,
+ cam_ctrl_type type,
+ uint32_t length,
+ void *value)
+{
+ return mm_camera_send_native_ctrl_timeout_cmd(my_obj, type,
+ length, value,
+ 1000);
+}
+
+int32_t mm_camera_send_native_ctrl_timeout_cmd(mm_camera_obj_t * my_obj,
+ cam_ctrl_type type,
+ uint32_t length,
+ void *value,
+ int timeout)
+{
+ int rc = -1;
+ struct msm_ctrl_cmd ctrl_cmd;
+
+ memset(&ctrl_cmd, 0, sizeof(ctrl_cmd));
+ ctrl_cmd.type = type;
+ ctrl_cmd.length = (uint16_t)length;
+ ctrl_cmd.timeout_ms = timeout;
+ ctrl_cmd.value = value;
+ ctrl_cmd.status = (uint16_t)CAM_CTRL_SUCCESS;
+ rc = mm_camera_util_private_s_ctrl(my_obj->ctrl_fd,
+ MSM_V4L2_PID_CTRL_CMD,
+ (void*)&ctrl_cmd);
+ CDBG("%s: type=%d, rc = %d, status = %d\n",
+ __func__, type, rc, ctrl_cmd.status);
+ if(rc != 0 || ((ctrl_cmd.status != CAM_CTRL_ACCEPTED) &&
+ (ctrl_cmd.status != CAM_CTRL_SUCCESS) &&
+ (ctrl_cmd.status != CAM_CTRL_INVALID_PARM)))
+ rc = -1;
+ return rc;
+}
+
+int mm_camera_evt_sub(mm_camera_obj_t * my_obj,
+ mm_camera_event_type_t evt_type,
+ int reg_count)
+{
+ int rc = 0;
+ struct v4l2_event_subscription sub;
+
+ memset(&sub, 0, sizeof(sub));
+ sub.type = V4L2_EVENT_PRIVATE_START+MSM_CAM_APP_NOTIFY_EVENT;
+ if(reg_count == 0) {
+ /* unsubscribe */
+ if(my_obj->evt_type_mask == (uint32_t)(1 << evt_type)) {
+ rc = ioctl(my_obj->ctrl_fd, VIDIOC_UNSUBSCRIBE_EVENT, &sub);
+ CDBG("%s: unsubscribe event 0x%x, rc = %d", __func__, sub.type, rc);
+ sub.type = V4L2_EVENT_PRIVATE_START+MSM_CAM_APP_NOTIFY_ERROR_EVENT;
+ rc = ioctl(my_obj->ctrl_fd, VIDIOC_UNSUBSCRIBE_EVENT, &sub);
+ CDBG("%s: unsubscribe event 0x%x, rc = %d", __func__, sub.type, rc);
+ }
+ my_obj->evt_type_mask &= ~(1 << evt_type);
+ if(my_obj->evt_type_mask == 0) {
+ /* remove evt fd from the polling thraed when unreg the last event */
+ mm_camera_poll_thread_del_poll_fd(&my_obj->evt_poll_thread, my_obj->my_hdl);
+ }
+ } else {
+ if(!my_obj->evt_type_mask) {
+ /* this is the first reg event */
+ rc = ioctl(my_obj->ctrl_fd, VIDIOC_SUBSCRIBE_EVENT, &sub);
+ CDBG("%s: subscribe event 0x%x, rc = %d", __func__, sub.type, rc);
+ if (rc < 0)
+ goto end;
+ sub.type = V4L2_EVENT_PRIVATE_START+MSM_CAM_APP_NOTIFY_ERROR_EVENT;
+ rc = ioctl(my_obj->ctrl_fd, VIDIOC_SUBSCRIBE_EVENT, &sub);
+ CDBG("%s: subscribe event 0x%x, rc = %d", __func__, sub.type, rc);
+ if (rc < 0)
+ goto end;
+ }
+ my_obj->evt_type_mask |= (1 << evt_type);
+ if(my_obj->evt_type_mask == (uint32_t)(1 << evt_type)) {
+ /* add evt fd to polling thread when subscribe the first event */
+ rc = mm_camera_poll_thread_add_poll_fd(&my_obj->evt_poll_thread,
+ my_obj->my_hdl,
+ my_obj->ctrl_fd,
+ mm_camera_event_notify,
+ (void*)my_obj);
+ }
+ }
+end:
+ return rc;
+}
+
+int32_t mm_camera_util_sendmsg(mm_camera_obj_t *my_obj, void *msg, uint32_t buf_size, int sendfd)
+{
+ return mm_camera_socket_sendmsg(my_obj->ds_fd, msg, buf_size, sendfd);
+}
+
+int32_t mm_camera_map_buf(mm_camera_obj_t *my_obj,
+ int ext_mode,
+ int idx,
+ int fd,
+ uint32_t size)
+{
+ cam_sock_packet_t packet;
+ memset(&packet, 0, sizeof(cam_sock_packet_t));
+ packet.msg_type = CAM_SOCK_MSG_TYPE_FD_MAPPING;
+ packet.payload.frame_fd_map.ext_mode = ext_mode;
+ packet.payload.frame_fd_map.frame_idx = idx;
+ packet.payload.frame_fd_map.fd = fd;
+ packet.payload.frame_fd_map.size = size;
+
+ return mm_camera_util_sendmsg(my_obj, &packet,
+ sizeof(cam_sock_packet_t),
+ packet.payload.frame_fd_map.fd);
+}
+
+int32_t mm_camera_unmap_buf(mm_camera_obj_t *my_obj,
+ int ext_mode,
+ int idx)
+{
+ cam_sock_packet_t packet;
+ memset(&packet, 0, sizeof(cam_sock_packet_t));
+ packet.msg_type = CAM_SOCK_MSG_TYPE_FD_UNMAPPING;
+ packet.payload.frame_fd_unmap.ext_mode = ext_mode;
+ packet.payload.frame_fd_unmap.frame_idx = idx;
+ return mm_camera_util_sendmsg(my_obj, &packet,
+ sizeof(cam_sock_packet_t),
+ packet.payload.frame_fd_map.fd);
+}
+
+int32_t mm_camera_util_s_ctrl(int32_t fd, uint32_t id, int32_t value)
+{
+ int rc = 0;
+ struct v4l2_control control;
+
+ memset(&control, 0, sizeof(control));
+ control.id = id;
+ control.value = value;
+ rc = ioctl (fd, VIDIOC_S_CTRL, &control);
+
+ if(rc) {
+ CDBG("%s: fd=%d, S_CTRL, id=0x%x, value = 0x%x, rc = %d\n",
+ __func__, fd, id, (uint32_t)value, rc);
+ }
+ return rc;
+}
+
+int32_t mm_camera_util_g_ctrl( int32_t fd, uint32_t id, int32_t *value)
+{
+ int rc = 0;
+ struct v4l2_control control;
+
+ memset(&control, 0, sizeof(control));
+ control.id = id;
+ control.value = (int32_t)value;
+ rc = ioctl (fd, VIDIOC_G_CTRL, &control);
+ if(rc) {
+ CDBG("%s: fd=%d, G_CTRL, id=0x%x, rc = %d\n", __func__, fd, id, rc);
+ }
+ *value = control.value;
+ return rc;
+}
diff --git a/camera/QCamera/stack/mm-camera-interface/src/mm_camera_channel.c b/camera/QCamera/stack/mm-camera-interface/src/mm_camera_channel.c
new file mode 100644
index 0000000..81f10e9
--- /dev/null
+++ b/camera/QCamera/stack/mm-camera-interface/src/mm_camera_channel.c
@@ -0,0 +1,1537 @@
+/*
+Copyright (c) 2011-2012, The Linux Foundation. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following
+ disclaimer in the documentation and/or other materials provided
+ with the distribution.
+ * Neither the name of The Linux Foundation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+
+#include <pthread.h>
+#include <errno.h>
+#include <sys/ioctl.h>
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <fcntl.h>
+#include <poll.h>
+#include <semaphore.h>
+#include <linux/msm_ion.h>
+
+#include "mm_camera_dbg.h"
+#include "mm_camera_interface.h"
+#include "mm_camera.h"
+
+extern mm_camera_obj_t* mm_camera_util_get_camera_by_handler(uint32_t cam_handler);
+extern mm_channel_t * mm_camera_util_get_channel_by_handler(
+ mm_camera_obj_t * cam_obj,
+ uint32_t handler);
+extern int32_t mm_camera_send_native_ctrl_cmd(mm_camera_obj_t * my_obj,
+ cam_ctrl_type type,
+ uint32_t length,
+ void *value);
+extern int32_t mm_camera_send_native_ctrl_timeout_cmd(mm_camera_obj_t * my_obj,
+ cam_ctrl_type type,
+ uint32_t length,
+ void *value,
+ int timeout);
+
+/* internal function declare goes here */
+int32_t mm_channel_qbuf(mm_channel_t *my_obj,
+ mm_camera_buf_def_t *buf);
+int32_t mm_channel_init(mm_channel_t *my_obj);
+void mm_channel_release(mm_channel_t *my_obj);
+uint32_t mm_channel_add_stream(mm_channel_t *my_obj,
+ mm_camera_buf_notify_t buf_cb, void *user_data,
+ uint32_t ext_image_mode, uint32_t sensor_idx);
+int32_t mm_channel_del_stream(mm_channel_t *my_obj,
+ uint32_t stream_id);
+int32_t mm_channel_config_stream(mm_channel_t *my_obj,
+ uint32_t stream_id,
+ mm_camera_stream_config_t *config);
+int32_t mm_channel_bundle_stream(mm_channel_t *my_obj,
+ mm_camera_buf_notify_t super_frame_notify_cb,
+ void *user_data,
+ mm_camera_bundle_attr_t *attr,
+ uint8_t num_streams,
+ uint32_t *stream_ids);
+int32_t mm_channel_destroy_bundle(mm_channel_t *my_obj);
+int32_t mm_channel_start_streams(mm_channel_t *my_obj,
+ uint8_t num_streams,
+ uint32_t *stream_ids);
+int32_t mm_channel_stop_streams(mm_channel_t *my_obj,
+ uint8_t num_streams,
+ uint32_t *stream_ids,
+ uint8_t tear_down_flag);
+int32_t mm_channel_request_super_buf(mm_channel_t *my_obj);
+int32_t mm_channel_cancel_super_buf_request(mm_channel_t *my_obj);
+int32_t mm_channel_start_focus(mm_channel_t *my_obj,
+ uint32_t sensor_idx,
+ uint32_t focus_mode);
+int32_t mm_channel_abort_focus(mm_channel_t *my_obj,
+ uint32_t sensor_idx);
+int32_t mm_channel_prepare_snapshot(mm_channel_t *my_obj,
+ uint32_t sensor_idx);
+int32_t mm_channel_set_stream_parm(mm_channel_t *my_obj,
+ uint32_t s_id,
+ void *value);
+int32_t mm_channel_get_stream_parm(mm_channel_t *my_obj,
+ uint32_t s_id,
+ void *value);
+uint8_t mm_channel_need_do_pp(mm_channel_t *my_obj,
+ mm_channel_queue_node_t *super_buf);
+int32_t mm_channel_do_post_processing(mm_channel_t *my_obj,
+ mm_channel_queue_node_t *super_buf);
+int32_t mm_channel_cancel_post_processing(mm_channel_t *my_obj);
+int32_t mm_channel_superbuf_bufdone_overflow(mm_channel_t* my_obj,
+ mm_channel_queue_t * queue);
+int32_t mm_channel_superbuf_skip(mm_channel_t* my_obj,
+ mm_channel_queue_t * queue);
+
+/* state machine function declare */
+int32_t mm_channel_fsm_fn_notused(mm_channel_t *my_obj,
+ mm_channel_evt_type_t evt,
+ void * in_val,
+ void * out_val);
+int32_t mm_channel_fsm_fn_stopped(mm_channel_t *my_obj,
+ mm_channel_evt_type_t evt,
+ void * in_val,
+ void * out_val);
+int32_t mm_channel_fsm_fn_active(mm_channel_t *my_obj,
+ mm_channel_evt_type_t evt,
+ void * in_val,
+ void * out_val);
+int32_t mm_channel_fsm_fn_paused(mm_channel_t *my_obj,
+ mm_channel_evt_type_t evt,
+ void * in_val,
+ void * out_val);
+
+/* channel super queue functions */
+int32_t mm_channel_superbuf_queue_init(mm_channel_queue_t * queue);
+int32_t mm_channel_superbuf_queue_deinit(mm_channel_queue_t * queue);
+int32_t mm_channel_superbuf_comp_and_enqueue(mm_channel_t *ch_obj,
+ mm_channel_queue_t * queue,
+ mm_camera_buf_info_t *buf);
+mm_channel_queue_node_t* mm_channel_superbuf_dequeue(mm_channel_queue_t * queue);
+
+/* channel utility function */
+mm_stream_t * mm_channel_util_get_stream_by_handler(
+ mm_channel_t * ch_obj,
+ uint32_t handler)
+{
+ int i;
+ mm_stream_t *s_obj = NULL;
+ uint8_t ch_idx = mm_camera_util_get_index_by_handler(handler);
+
+ for(i = 0; i < MM_CAMEAR_STRAEM_NUM_MAX; i++) {
+ if (handler == ch_obj->streams[i].my_hdl) {
+ s_obj = &ch_obj->streams[i];
+ break;
+ }
+ }
+ return s_obj;
+}
+
+/* CB for handling post processing result from ctrl evt */
+static void mm_channel_pp_result_notify(uint32_t camera_handler,
+ mm_camera_event_t *evt,
+ void *user_data)
+{
+ uint32_t ch_hdl = (uint32_t)user_data;
+ mm_camera_obj_t *cam_obj = NULL;
+ mm_channel_t *ch_obj = NULL;
+ mm_channel_queue_node_t * node = NULL;
+ mm_channel_pp_info_t* pp_info = NULL;
+
+ cam_obj = mm_camera_util_get_camera_by_handler(camera_handler);
+ if (NULL == cam_obj) {
+ CDBG("%s: No matching camera handler", __func__);
+ return;
+ }
+
+ ch_obj = mm_camera_util_get_channel_by_handler(cam_obj, ch_hdl);
+ if (NULL == ch_obj) {
+ CDBG("%s: No matching channel handler", __func__);
+ return;
+ }
+
+ pthread_mutex_lock(&ch_obj->ch_lock);
+ if (ch_obj->pending_pp_cnt > 0) {
+ if (MM_CAMERA_EVT_TYPE_CTRL == evt->event_type) {
+ /* PP result is sent as CTRL event
+ * currently we only have WNR */
+ if (MM_CAMERA_CTRL_EVT_WDN_DONE == evt->e.ctrl.evt) {
+ /* WNR result */
+ pp_info = (mm_channel_pp_info_t *)evt->e.ctrl.cookie;
+ if (NULL != pp_info) {
+ node = pp_info->super_buf;
+ }
+ }
+ }
+
+ if (NULL != node) {
+ uint8_t pp_done = 1;
+ if (CAM_CTRL_SUCCESS == evt->e.ctrl.status) {
+ uint8_t i;
+ /* 1) update need_pp flag as action done */
+ for (i=0; i<node->num_of_bufs; i++) {
+ if (node->super_buf[i].stream_id == pp_info->stream_hdl) {
+ /* stream pp is done, set the flag to 0*/
+ node->super_buf[i].need_pp = 0;
+ break;
+ }
+ }
+
+ /* 2) check if all bufs done with pp */
+ for (i=0; i<node->num_of_bufs; i++) {
+ if (node->super_buf[i].need_pp) {
+ pp_done = 0;
+ break;
+ }
+ }
+ }
+
+ if (pp_done) {
+ /* send super buf to CB */
+ if (NULL != ch_obj->bundle.super_buf_notify_cb) {
+ mm_camera_super_buf_t super_buf;
+ uint8_t i;
+
+ memset(&super_buf, 0, sizeof(mm_camera_super_buf_t));
+ super_buf.num_bufs = node->num_of_bufs;
+ for (i=0; i<node->num_of_bufs; i++) {
+ super_buf.bufs[i] = node->super_buf[i].buf;
+ }
+ super_buf.camera_handle = ch_obj->cam_obj->my_hdl;
+ super_buf.ch_id = ch_obj->my_hdl;
+
+ ch_obj->bundle.super_buf_notify_cb(&super_buf,
+ ch_obj->bundle.user_data);
+ ch_obj->pending_pp_cnt--;
+ } else {
+ /* buf done with the nonuse super buf */
+ uint8_t i;
+ for (i=0; i<node->num_of_bufs; i++) {
+ mm_channel_qbuf(ch_obj, node->super_buf[i].buf);
+ }
+ }
+
+ /* done with node, free it */
+ free(node);
+ }
+ }
+ }
+ pthread_mutex_unlock(&ch_obj->ch_lock);
+}
+
+/* CB for processing stream buffer */
+static void mm_channel_process_stream_buf(mm_camera_cmdcb_t * cmd_cb,
+ void *user_data)
+{
+ mm_camera_super_buf_notify_mode_t notify_mode;
+ mm_channel_queue_node_t *node = NULL;
+ mm_channel_t *ch_obj = (mm_channel_t *)user_data;
+ if (NULL == ch_obj) {
+ return;
+ }
+
+ if (MM_CAMERA_CMD_TYPE_DATA_CB == cmd_cb->cmd_type) {
+ /* comp_and_enqueue */
+ mm_channel_superbuf_comp_and_enqueue(
+ ch_obj,
+ &ch_obj->bundle.superbuf_queue,
+ &cmd_cb->u.buf);
+ }
+
+ notify_mode = ch_obj->bundle.superbuf_queue.attr.notify_mode;
+
+ /* bufdone for overflowed bufs */
+ mm_channel_superbuf_bufdone_overflow(ch_obj, &ch_obj->bundle.superbuf_queue);
+
+ /* lock ch_lock */
+ pthread_mutex_lock(&ch_obj->ch_lock);
+
+ /* dispatch frame if pending_cnt>0 or is in continuous streaming mode */
+ while ( (ch_obj->pending_cnt > 0) ||
+ (MM_CAMERA_SUPER_BUF_NOTIFY_CONTINUOUS == notify_mode) ) {
+
+ /* skip frames if needed */
+ mm_channel_superbuf_skip(ch_obj, &ch_obj->bundle.superbuf_queue);
+
+ /* dequeue */
+ node = mm_channel_superbuf_dequeue(&ch_obj->bundle.superbuf_queue);
+ if (NULL != node) {
+ /* decrease pending_cnt */
+ CDBG("%s: Super Buffer received, Call client callback",__func__);
+ if (MM_CAMERA_SUPER_BUF_NOTIFY_BURST == notify_mode) {
+ ch_obj->pending_cnt--;
+ }
+
+ if (mm_channel_need_do_pp(ch_obj, node)) {
+ /* do post processing */
+ ch_obj->pending_pp_cnt++;
+ mm_channel_do_post_processing(ch_obj, node);
+ /* no need to free node here
+ * node will be as cookie sent to backend doing pp */
+ } else {
+ /* dispatch superbuf */
+ if (NULL != ch_obj->bundle.super_buf_notify_cb) {
+ mm_camera_super_buf_t super_buf;
+ uint8_t i;
+
+ memset(&super_buf, 0, sizeof(mm_camera_super_buf_t));
+ super_buf.num_bufs = node->num_of_bufs;
+ for (i=0; i<node->num_of_bufs; i++) {
+ super_buf.bufs[i] = node->super_buf[i].buf;
+ }
+ super_buf.camera_handle = ch_obj->cam_obj->my_hdl;
+ super_buf.ch_id = ch_obj->my_hdl;
+
+ ch_obj->bundle.super_buf_notify_cb(&super_buf,
+ ch_obj->bundle.user_data);
+ } else {
+ /* buf done with the nonuse super buf */
+ uint8_t i;
+ for (i=0; i<node->num_of_bufs; i++) {
+ mm_channel_qbuf(ch_obj, node->super_buf[i].buf);
+ }
+ }
+ free(node);
+ }
+ } else {
+ /* no superbuf avail, break the loop */
+ CDBG_ERROR("%s : Superbuffer not available",__func__);
+ break;
+ }
+ }
+
+ /* unlock ch_lock */
+ pthread_mutex_unlock(&ch_obj->ch_lock);
+}
+
+/* state machine entry */
+int32_t mm_channel_fsm_fn(mm_channel_t *my_obj,
+ mm_channel_evt_type_t evt,
+ void * in_val,
+ void * out_val)
+{
+ int32_t rc = -1;
+
+ CDBG("%s : E state = %d",__func__,my_obj->state);
+ switch (my_obj->state) {
+ case MM_CHANNEL_STATE_NOTUSED:
+ rc = mm_channel_fsm_fn_notused(my_obj, evt, in_val, out_val);
+ break;
+ case MM_CHANNEL_STATE_STOPPED:
+ rc = mm_channel_fsm_fn_stopped(my_obj, evt, in_val, out_val);
+ break;
+ case MM_CHANNEL_STATE_ACTIVE:
+ rc = mm_channel_fsm_fn_active(my_obj, evt, in_val, out_val);
+ break;
+ case MM_CHANNEL_STATE_PAUSED:
+ rc = mm_channel_fsm_fn_paused(my_obj, evt, in_val, out_val);
+ break;
+ default:
+ CDBG("%s: Not a valid state (%d)", __func__, my_obj->state);
+ break;
+ }
+
+ /* unlock ch_lock */
+ pthread_mutex_unlock(&my_obj->ch_lock);
+ CDBG("%s : X rc = %d",__func__,rc);
+ return rc;
+}
+
+int32_t mm_channel_fsm_fn_notused(mm_channel_t *my_obj,
+ mm_channel_evt_type_t evt,
+ void * in_val,
+ void * out_val)
+{
+ int32_t rc = -1;
+
+ switch (evt) {
+ default:
+ CDBG_ERROR("%s: invalid state (%d) for evt (%d)",
+ __func__, my_obj->state, evt);
+ break;
+ }
+
+ return rc;
+}
+
+int32_t mm_channel_fsm_fn_stopped(mm_channel_t *my_obj,
+ mm_channel_evt_type_t evt,
+ void * in_val,
+ void * out_val)
+{
+ int32_t rc = -1;
+ CDBG("%s : E evt = %d",__func__,evt);
+ switch (evt) {
+ case MM_CHANNEL_EVT_ADD_STREAM:
+ {
+ uint32_t s_hdl = 0;
+ mm_evt_paylod_add_stream_t *payload =
+ (mm_evt_paylod_add_stream_t *)in_val;
+ s_hdl = mm_channel_add_stream(my_obj,
+ payload->buf_cb,
+ payload->user_data,
+ payload->ext_image_mode,
+ payload->sensor_idx);
+ *((uint32_t*)out_val) = s_hdl;
+ rc = 0;
+ }
+ break;
+ case MM_CHANNEL_EVT_DEL_STREAM:
+ {
+ uint32_t s_id = *((uint32_t *)in_val);
+ rc = mm_channel_del_stream(my_obj, s_id);
+ }
+ break;
+ case MM_CHANNEL_EVT_START_STREAM:
+ {
+ mm_evt_payload_start_stream_t *payload =
+ (mm_evt_payload_start_stream_t *)in_val;
+ rc = mm_channel_start_streams(my_obj,
+ payload->num_streams,
+ payload->stream_ids);
+ /* first stream started in stopped state
+ * move to active state */
+ if (0 == rc) {
+ my_obj->state = MM_CHANNEL_STATE_ACTIVE;
+ }
+ }
+ break;
+ case MM_CHANNEL_EVT_CONFIG_STREAM:
+ {
+ mm_evt_paylod_config_stream_t *payload =
+ (mm_evt_paylod_config_stream_t *)in_val;
+ rc = mm_channel_config_stream(my_obj,
+ payload->stream_id,
+ payload->config);
+ }
+ break;
+ case MM_CHANNEL_EVT_INIT_BUNDLE:
+ {
+ mm_evt_payload_bundle_stream_t *payload =
+ (mm_evt_payload_bundle_stream_t *)in_val;
+ rc = mm_channel_bundle_stream(my_obj,
+ payload->super_frame_notify_cb,
+ payload->user_data,
+ payload->attr,
+ payload->num_streams,
+ payload->stream_ids);
+ }
+ break;
+ case MM_CHANNEL_EVT_DESTROY_BUNDLE:
+ rc = mm_channel_destroy_bundle(my_obj);
+ break;
+ case MM_CHANNEL_EVT_PREPARE_SNAPSHOT:
+ {
+ uint32_t sensor_idx = (uint32_t)in_val;
+ rc = mm_channel_prepare_snapshot(my_obj, sensor_idx);
+ }
+ break;
+ case MM_CHANNEL_EVT_DELETE:
+ mm_channel_release(my_obj);
+ rc = 0;
+ break;
+ case MM_CHANNEL_EVT_SET_STREAM_PARM:
+ {
+ uint32_t s_id = (uint32_t)in_val;
+ rc = mm_channel_set_stream_parm(my_obj, s_id, out_val);
+ }
+ break;
+ case MM_CHANNEL_EVT_GET_STREAM_PARM:
+ {
+ uint32_t s_id = (uint32_t)in_val;
+ rc = mm_channel_get_stream_parm(my_obj, s_id, out_val);
+ }
+ break;
+ default:
+ CDBG_ERROR("%s: invalid state (%d) for evt (%d)",
+ __func__, my_obj->state, evt);
+ break;
+ }
+ CDBG("%s : E rc = %d",__func__,rc);
+ return rc;
+}
+
+int32_t mm_channel_fsm_fn_active(mm_channel_t *my_obj,
+ mm_channel_evt_type_t evt,
+ void * in_val,
+ void * out_val)
+{
+ int32_t rc = -1;
+
+ CDBG("%s : E evt = %d",__func__,evt);
+ switch (evt) {
+ case MM_CHANNEL_EVT_CONFIG_STREAM:
+ {
+ mm_evt_paylod_config_stream_t *payload =
+ (mm_evt_paylod_config_stream_t *)in_val;
+ rc = mm_channel_config_stream(my_obj,
+ payload->stream_id,
+ payload->config);
+ }
+ break;
+ case MM_CHANNEL_EVT_START_STREAM:
+ {
+ mm_evt_payload_start_stream_t *payload =
+ (mm_evt_payload_start_stream_t *)in_val;
+ rc = mm_channel_start_streams(my_obj,
+ payload->num_streams,
+ payload->stream_ids);
+ }
+ break;
+ case MM_CHANNEL_EVT_STOP_STREAM:
+ case MM_CHANNEL_EVT_TEARDOWN_STREAM:
+ {
+ int i;
+ uint8_t tear_down_flag = (MM_CHANNEL_EVT_TEARDOWN_STREAM == evt)? 1:0;
+ uint8_t all_stopped = 1;
+ mm_evt_payload_stop_stream_t *payload =
+ (mm_evt_payload_stop_stream_t *)in_val;
+
+ rc = mm_channel_stop_streams(my_obj,
+ payload->num_streams,
+ payload->stream_ids,
+ tear_down_flag);
+
+ /* check if all streams are stopped
+ * then we move to stopped state */
+
+ for (i=0; i<MM_CAMEAR_STRAEM_NUM_MAX; i++) {
+ if (MM_STREAM_STATE_ACTIVE_STREAM_ON == my_obj->streams[i].state ||
+ MM_STREAM_STATE_ACTIVE_STREAM_OFF == my_obj->streams[i].state) {
+ all_stopped = 0;
+ break;
+ }
+ }
+ if (all_stopped) {
+ my_obj->state = MM_CHANNEL_STATE_STOPPED;
+ }
+
+ }
+ break;
+ case MM_CHANNEL_EVT_INIT_BUNDLE:
+ {
+ mm_evt_payload_bundle_stream_t *payload =
+ (mm_evt_payload_bundle_stream_t *)in_val;
+ rc = mm_channel_bundle_stream(my_obj,
+ payload->super_frame_notify_cb,
+ payload->user_data,
+ payload->attr,
+ payload->num_streams,
+ payload->stream_ids);
+ }
+ break;
+ case MM_CHANNEL_EVT_DESTROY_BUNDLE:
+ rc = mm_channel_destroy_bundle(my_obj);
+ break;
+ case MM_CHANNEL_EVT_REQUEST_SUPER_BUF:
+ rc = mm_channel_request_super_buf(my_obj);
+ break;
+ case MM_CHANNEL_EVT_CANCEL_REQUEST_SUPER_BUF:
+ rc = mm_channel_cancel_super_buf_request(my_obj);
+ break;
+ case MM_CHANNEL_EVT_START_FOCUS:
+ {
+ mm_evt_payload_start_focus_t* payload =
+ (mm_evt_payload_start_focus_t *)in_val;
+ rc = mm_channel_start_focus(my_obj,
+ payload->sensor_idx,
+ payload->focus_mode);
+ }
+ break;
+ case MM_CHANNEL_EVT_ABORT_FOCUS:
+ {
+ uint32_t sensor_idx = (uint32_t)in_val;
+ rc = mm_channel_abort_focus(my_obj, sensor_idx);
+ }
+ break;
+ case MM_CHANNEL_EVT_PREPARE_SNAPSHOT:
+ {
+ uint32_t sensor_idx = (uint32_t)in_val;
+ rc = mm_channel_prepare_snapshot(my_obj, sensor_idx);
+ }
+ break;
+ case MM_CHANNEL_EVT_SET_STREAM_PARM:
+ {
+ uint32_t s_id = (uint32_t)in_val;
+ rc = mm_channel_set_stream_parm(my_obj, s_id, out_val);
+ }
+ break;
+ case MM_CHANNEL_EVT_GET_STREAM_PARM:
+ {
+ uint32_t s_id = (uint32_t)in_val;
+ rc = mm_channel_get_stream_parm(my_obj, s_id, out_val);
+ }
+ break;
+
+ case MM_CHANNEL_EVT_DEL_STREAM:
+ {
+ uint32_t s_id = *((uint32_t *)in_val);
+ rc = mm_channel_del_stream(my_obj, s_id);
+ }
+ break;
+ default:
+ CDBG_ERROR("%s: invalid state (%d) for evt (%d)",
+ __func__, my_obj->state, evt);
+ break;
+ }
+ CDBG("%s : X rc = %d",__func__,rc);
+ return rc;
+}
+
+int32_t mm_channel_fsm_fn_paused(mm_channel_t *my_obj,
+ mm_channel_evt_type_t evt,
+ void * in_val,
+ void * out_val)
+{
+ int32_t rc = -1;
+
+ /* currently we are not supporting pause/resume channel */
+ CDBG_ERROR("%s: evt (%d) not supported in state (%d)",
+ __func__, evt, my_obj->state);
+
+ return rc;
+}
+
+int32_t mm_channel_init(mm_channel_t *my_obj)
+{
+ int32_t rc = 0;
+ CDBG("%s : Launch data poll thread in channel open",__func__);
+ mm_camera_poll_thread_launch(&my_obj->poll_thread[0],
+ MM_CAMERA_POLL_TYPE_CH);
+
+ /* change state to stopped state */
+ my_obj->state = MM_CHANNEL_STATE_STOPPED;
+ return rc;
+}
+
+void mm_channel_release(mm_channel_t *my_obj)
+{
+ /* stop data poll thread */
+ mm_camera_poll_thread_release(&my_obj->poll_thread[0]);
+
+ /* change state to notused state */
+ my_obj->state = MM_CHANNEL_STATE_NOTUSED;
+}
+
+uint32_t mm_channel_add_stream(mm_channel_t *my_obj,
+ mm_camera_buf_notify_t buf_cb, void *user_data,
+ uint32_t ext_image_mode, uint32_t sensor_idx)
+{
+ int32_t rc = 0;
+ uint8_t idx = 0;
+ uint32_t s_hdl = 0;
+ mm_stream_t *stream_obj = NULL;
+
+ CDBG("%s : image mode = %d",__func__,ext_image_mode);
+ /* check available stream */
+ for (idx = 0; idx < MM_CAMEAR_STRAEM_NUM_MAX; idx++) {
+ if (MM_STREAM_STATE_NOTUSED == my_obj->streams[idx].state) {
+ stream_obj = &my_obj->streams[idx];
+ break;
+ }
+ }
+ if (NULL == stream_obj) {
+ CDBG_ERROR("%s: streams reach max, no more stream allowed to add", __func__);
+ return s_hdl;
+ }
+
+ /* initialize stream object */
+ memset(stream_obj, 0, sizeof(mm_stream_t));
+ stream_obj->my_hdl = mm_camera_util_generate_handler(idx);
+ stream_obj->ch_obj = my_obj;
+ /* cd through intf always palced at idx 0 of buf_cb */
+ stream_obj->buf_cb[0].cb = buf_cb;
+ stream_obj->buf_cb[0].user_data = user_data;
+ stream_obj->buf_cb[0].cb_count = -1; /* infinite by default */
+ stream_obj->ext_image_mode = ext_image_mode + 1;
+ stream_obj->sensor_idx = sensor_idx;
+ stream_obj->fd = -1;
+ pthread_mutex_init(&stream_obj->buf_lock, NULL);
+ pthread_mutex_init(&stream_obj->cb_lock, NULL);
+ stream_obj->state = MM_STREAM_STATE_INITED;
+
+ /* acquire stream */
+ rc = mm_stream_fsm_fn(stream_obj, MM_STREAM_EVT_ACQUIRE, NULL, NULL);
+ if (0 == rc) {
+ s_hdl = stream_obj->my_hdl;
+ } else {
+ /* error during acquire, de-init */
+ pthread_mutex_destroy(&stream_obj->buf_lock);
+ pthread_mutex_destroy(&stream_obj->cb_lock);
+ memset(stream_obj, 0, sizeof(mm_stream_t));
+ }
+ CDBG("%s : stream handle = %d",__func__,s_hdl);
+ return s_hdl;
+}
+
+int32_t mm_channel_del_stream(mm_channel_t *my_obj,
+ uint32_t stream_id)
+{
+ int rc = -1;
+ mm_stream_t * stream_obj = NULL;
+ stream_obj = mm_channel_util_get_stream_by_handler(my_obj, stream_id);
+
+ if (NULL == stream_obj) {
+ CDBG_ERROR("%s :Invalid Stream Object",__func__);
+ return rc;
+ }
+
+ rc = mm_stream_fsm_fn(stream_obj,
+ MM_STREAM_EVT_RELEASE,
+ NULL,
+ NULL);
+
+ return rc;
+}
+
+int32_t mm_channel_config_stream(mm_channel_t *my_obj,
+ uint32_t stream_id,
+ mm_camera_stream_config_t *config)
+{
+ int rc = -1;
+ mm_stream_t * stream_obj = NULL;
+ CDBG("%s : E stream ID = %d",__func__,stream_id);
+ stream_obj = mm_channel_util_get_stream_by_handler(my_obj, stream_id);
+
+ if (NULL == stream_obj) {
+ CDBG_ERROR("%s : X rc = %d",__func__,rc);
+ return rc;
+ }
+
+ /* set stream fmt */
+ rc = mm_stream_fsm_fn(stream_obj,
+ MM_STREAM_EVT_SET_FMT,
+ (void *)config,
+ NULL);
+ CDBG("%s : X rc = %d",__func__,rc);
+ return rc;
+}
+
+int32_t mm_channel_bundle_stream(mm_channel_t *my_obj,
+ mm_camera_buf_notify_t super_frame_notify_cb,
+ void *user_data,
+ mm_camera_bundle_attr_t *attr,
+ uint8_t num_streams,
+ uint32_t *stream_ids)
+{
+ int32_t rc = 0;
+ int i;
+ mm_stream_t* s_objs[MM_CAMEAR_MAX_STRAEM_BUNDLE] = {NULL};
+
+ /* first check if all streams to be bundled are valid */
+ for (i=0; i < num_streams; i++) {
+ s_objs[i] = mm_channel_util_get_stream_by_handler(my_obj, stream_ids[i]);
+ if (NULL == s_objs[i]) {
+ CDBG_ERROR("%s: invalid stream handler %d (idx=%d) to be bundled",
+ __func__, stream_ids[i], i);
+ return -1;
+ }
+ }
+
+ /* init superbuf queue */
+ mm_channel_superbuf_queue_init(&my_obj->bundle.superbuf_queue);
+
+ /* save bundle config */
+ memcpy(&my_obj->bundle.superbuf_queue.attr, attr, sizeof(mm_camera_bundle_attr_t));
+ my_obj->bundle.super_buf_notify_cb = super_frame_notify_cb;
+ my_obj->bundle.user_data = user_data;
+ my_obj->bundle.superbuf_queue.num_streams = num_streams;
+ my_obj->bundle.superbuf_queue.expected_frame_id = 0;
+
+ for (i=0; i < num_streams; i++) {
+ /* set bundled flag to streams */
+ s_objs[i]->is_bundled = 1;
+ /* init bundled streams to invalid value -1 */
+ //my_obj->bundle.superbuf_queue.bundled_streams[i] = -1;
+ my_obj->bundle.superbuf_queue.bundled_streams[i] = stream_ids[i];
+ }
+
+ /* launch cmd thread for super buf dataCB */
+ mm_camera_cmd_thread_launch(&my_obj->cmd_thread,
+ mm_channel_process_stream_buf,
+ (void*)my_obj);
+
+ /* check if we need to do post-processing */
+ if (my_obj->cam_obj->need_pp) {
+ /* register postProcessingCB at camera evt polling thread
+ * because pp result is coming from ctrl evt */
+ mm_camera_register_event_notify_internal(my_obj->cam_obj,
+ mm_channel_pp_result_notify,
+ (void *)my_obj->my_hdl,
+ MM_CAMERA_EVT_TYPE_CTRL);
+ }
+
+ return rc;
+}
+
+/* bundled streams must all be stopped before bundle can be destroyed */
+int32_t mm_channel_destroy_bundle(mm_channel_t *my_obj)
+{
+
+ mm_camera_cmd_thread_release(&my_obj->cmd_thread);
+
+ /* deinit superbuf queue */
+ mm_channel_superbuf_queue_deinit(&my_obj->bundle.superbuf_queue);
+
+ /* memset bundle info */
+ memset(&my_obj->bundle, 0, sizeof(mm_channel_bundle_t));
+ return 0;
+}
+
+int32_t mm_channel_start_streams(mm_channel_t *my_obj,
+ uint8_t num_streams,
+ uint32_t *stream_ids)
+{
+ int32_t rc = 0;
+ int i, j;
+ mm_stream_t* s_objs[MM_CAMEAR_MAX_STRAEM_BUNDLE] = {NULL};
+ uint8_t num_streams_to_start = num_streams;
+ uint32_t streams_to_start[MM_CAMEAR_MAX_STRAEM_BUNDLE];
+ uint8_t bundle_to_start = 0;
+
+ /* check if any bundled stream to be start,
+ * then all bundled stream should be started */
+ memcpy(streams_to_start, stream_ids, sizeof(uint32_t) * num_streams);
+ for (i=0; i < num_streams; i++) {
+ for (j=0; j < my_obj->bundle.superbuf_queue.num_streams; j++) {
+ if (stream_ids[i] == my_obj->bundle.superbuf_queue.bundled_streams[j]) {
+ bundle_to_start = 1;
+ break;
+ }
+ }
+ }
+
+ if (bundle_to_start) {
+ uint8_t need_add;
+ /* add bundled streams into the start list if not already added*/
+ for (i=0; i<my_obj->bundle.superbuf_queue.num_streams; i++) {
+ need_add = 1;
+ for (j=0; j<num_streams; j++) {
+ if (stream_ids[j] == my_obj->bundle.superbuf_queue.bundled_streams[i]) {
+ need_add = 0;
+ break;
+ }
+ }
+ if (need_add) {
+ streams_to_start[num_streams_to_start++] =
+ my_obj->bundle.superbuf_queue.bundled_streams[i];
+ }
+ }
+ }
+
+ /* check if all streams to be started are valid */
+ for (i=0; i<num_streams_to_start; i++) {
+ s_objs[i] = mm_channel_util_get_stream_by_handler(my_obj, streams_to_start[i]);
+
+ if (NULL == s_objs[i]) {
+ CDBG_ERROR("%s: invalid stream handler %d (idx=%d) to be started",
+ __func__, streams_to_start[i], i);
+ return -1;
+ }
+ }
+
+ for (i=0; i<num_streams_to_start; i++) {
+ /* allocate buf */
+ rc = mm_stream_fsm_fn(s_objs[i],
+ MM_STREAM_EVT_GET_BUF,
+ NULL,
+ NULL);
+ if (0 != rc) {
+ CDBG_ERROR("%s: get buf failed at idx(%d)", __func__, i);
+ break;
+ }
+
+ /* reg buf */
+ rc = mm_stream_fsm_fn(s_objs[i],
+ MM_STREAM_EVT_REG_BUF,
+ NULL,
+ NULL);
+ if (0 != rc) {
+ CDBG_ERROR("%s: reg buf failed at idx(%d)", __func__, i);
+ break;
+ }
+
+ /* TODO */
+ /* for now, hard-coded to 1 for main image stream */
+ /* Query if stream need to do pp under current hardware configuration,
+ * when camera has offline pp capability */
+ if (my_obj->cam_obj->need_pp) {
+ if (MSM_V4L2_EXT_CAPTURE_MODE_MAIN == s_objs[i]->ext_image_mode) {
+ s_objs[i]->is_pp_needed = 1;
+ } else {
+ s_objs[i]->is_pp_needed = 0;
+ }
+ }
+
+ /* start stream */
+ rc = mm_stream_fsm_fn(s_objs[i],
+ MM_STREAM_EVT_START,
+ NULL,
+ NULL);
+ if (0 != rc) {
+ CDBG_ERROR("%s: start stream failed at idx(%d)", __func__, i);
+ break;
+ }
+ }
+
+ /* error handling */
+ if (0 != rc) {
+ for (j=0; j<=i; j++) {
+ /* stop streams*/
+ mm_stream_fsm_fn(s_objs[i],
+ MM_STREAM_EVT_STOP,
+ NULL,
+ NULL);
+
+ /* unreg buf */
+ mm_stream_fsm_fn(s_objs[i],
+ MM_STREAM_EVT_UNREG_BUF,
+ NULL,
+ NULL);
+
+ /* put buf back */
+ mm_stream_fsm_fn(s_objs[i],
+ MM_STREAM_EVT_PUT_BUF,
+ NULL,
+ NULL);
+ }
+ }
+
+ return rc;
+}
+
+/* Required: bundled streams need to be stopped together */
+int32_t mm_channel_stop_streams(mm_channel_t *my_obj,
+ uint8_t num_streams,
+ uint32_t *stream_ids,
+ uint8_t tear_down_flag)
+{
+ int32_t rc = 0;
+ int i, j;
+ mm_stream_t* s_objs[MM_CAMEAR_MAX_STRAEM_BUNDLE] = {NULL};
+ uint8_t num_streams_to_stop = num_streams;
+ uint32_t streams_to_stop[MM_CAMEAR_MAX_STRAEM_BUNDLE];
+ uint8_t bundle_to_stop = 0;
+
+ /* make sure bundled streams are stopped together */
+ memcpy(streams_to_stop, stream_ids, sizeof(uint32_t) * num_streams);
+ for (i=0; i<num_streams; i++) {
+ for (j=0; j<my_obj->bundle.superbuf_queue.num_streams; j++) {
+ if (stream_ids[i] == my_obj->bundle.superbuf_queue.bundled_streams[j]) {
+ bundle_to_stop = 1;
+ break;
+ }
+ }
+ }
+ if (bundle_to_stop) {
+ uint8_t need_add;
+ /* add bundled streams into the start list if not already added*/
+ for (i=0; i<my_obj->bundle.superbuf_queue.num_streams; i++) {
+ need_add = 1;
+ for (j=0; j<num_streams; j++) {
+ if (stream_ids[j] == my_obj->bundle.superbuf_queue.bundled_streams[i]) {
+ need_add = 0;
+ break;
+ }
+ }
+ if (need_add) {
+ streams_to_stop[num_streams_to_stop++] =
+ my_obj->bundle.superbuf_queue.bundled_streams[i];
+ }
+ }
+ }
+
+ for (i=0; i<num_streams_to_stop; i++) {
+ s_objs[i] = mm_channel_util_get_stream_by_handler(my_obj, streams_to_stop[i]);
+
+ if (NULL != s_objs[i]) {
+ /* stream off */
+ mm_stream_fsm_fn(s_objs[i],
+ MM_STREAM_EVT_STOP,
+ NULL,
+ NULL);
+
+ /* unreg buf at kernel */
+ mm_stream_fsm_fn(s_objs[i],
+ MM_STREAM_EVT_UNREG_BUF,
+ NULL,
+ NULL);
+ }
+ }
+
+ /* since all streams are stopped, we are safe to
+ * release all buffers allocated in stream */
+ for (i=0; i<num_streams_to_stop; i++) {
+ if (NULL != s_objs[i]) {
+ /* put buf back */
+ mm_stream_fsm_fn(s_objs[i],
+ MM_STREAM_EVT_PUT_BUF,
+ NULL,
+ NULL);
+
+ if (tear_down_flag) {
+ /* to tear down stream totally, stream needs to be released */
+ mm_stream_fsm_fn(s_objs[i],
+ MM_STREAM_EVT_RELEASE,
+ NULL,
+ NULL);
+ }
+ }
+ }
+
+ return rc;
+}
+
+int32_t mm_channel_request_super_buf(mm_channel_t *my_obj)
+{
+ int32_t rc = 0;
+ mm_camera_cmdcb_t* node = NULL;
+
+ /* set pending_cnt
+ * will trigger dispatching super frames if pending_cnt > 0 */
+ my_obj->pending_cnt = my_obj->bundle.superbuf_queue.attr.burst_num;
+
+ /* send sem_post to wake up cmd thread to dispatch super buffer */
+ node = (mm_camera_cmdcb_t *)malloc(sizeof(mm_camera_cmdcb_t));
+ if (NULL != node) {
+ memset(node, 0, sizeof(mm_camera_cmdcb_t));
+ node->cmd_type = MM_CAMERA_CMD_TYPE_REQ_DATA_CB;
+
+ /* enqueue to cmd thread */
+ mm_camera_queue_enq(&(my_obj->cmd_thread.cmd_queue), node);
+
+ /* wake up cmd thread */
+ sem_post(&(my_obj->cmd_thread.cmd_sem));
+ } else {
+ CDBG_ERROR("%s: No memory for mm_camera_node_t", __func__);
+ rc = -1;
+ }
+
+ return rc;
+}
+
+int32_t mm_channel_cancel_super_buf_request(mm_channel_t *my_obj)
+{
+ int32_t rc = 0;
+ /* reset pending_cnt */
+ my_obj->pending_cnt = 0;
+
+ if (my_obj->pending_pp_cnt > 0) {
+ rc = mm_channel_cancel_post_processing(my_obj);
+ }
+ my_obj->pending_pp_cnt = 0;
+ return rc;
+}
+
+int32_t mm_channel_qbuf(mm_channel_t *my_obj,
+ mm_camera_buf_def_t *buf)
+{
+ int32_t rc = -1;
+ struct ion_flush_data cache_inv_data;
+ int ion_fd;
+ struct msm_frame *cache_frame;
+ struct msm_frame *cache_frame1 = NULL;
+
+ mm_stream_t* s_obj = mm_channel_util_get_stream_by_handler(my_obj, buf->stream_id);
+
+ if (NULL != s_obj) {
+ rc = mm_stream_fsm_fn(s_obj,
+ MM_STREAM_EVT_QBUF,
+ (void *)buf,
+ NULL);
+ }
+
+#ifdef USE_ION
+ cache_inv_data.vaddr = cache_frame->buffer;
+ cache_inv_data.fd = cache_frame->fd;
+ cache_inv_data.handle = cache_frame->fd_data.handle;
+ cache_inv_data.length = cache_frame->ion_alloc.len;
+ ion_fd = cache_frame->ion_dev_fd;
+ if(ion_fd > 0) {
+ if(ioctl(ion_fd, ION_IOC_INV_CACHES, &cache_inv_data) < 0)
+ CDBG_ERROR("%s: Cache Invalidate failed\n", __func__);
+ else {
+ CDBG("%s: Successful cache invalidate\n", __func__);
+ if(cache_frame1) {
+ ion_fd = cache_frame1->ion_dev_fd;
+ cache_inv_data.vaddr = cache_frame1->buffer;
+ cache_inv_data.fd = cache_frame1->fd;
+ cache_inv_data.handle = cache_frame1->fd_data.handle;
+ cache_inv_data.length = cache_frame1->ion_alloc.len;
+ if(ioctl(ion_fd, ION_IOC_INV_CACHES, &cache_inv_data) < 0)
+ CDBG_ERROR("%s: Cache Invalidate failed\n", __func__);
+ else
+ CDBG("%s: Successful cache invalidate\n", __func__);
+ }
+ }
+ }
+#endif
+
+ return rc;
+}
+
+int32_t mm_channel_start_focus(mm_channel_t *my_obj,
+ uint32_t sensor_idx,
+ uint32_t focus_mode)
+{
+ return mm_camera_send_native_ctrl_cmd(my_obj->cam_obj,
+ CAMERA_SET_PARM_AUTO_FOCUS,
+ sizeof(uint32_t), (void*)focus_mode);
+}
+
+int32_t mm_channel_abort_focus(mm_channel_t *my_obj, uint32_t sensor_idx)
+{
+ return mm_camera_send_native_ctrl_cmd(my_obj->cam_obj,
+ CAMERA_AUTO_FOCUS_CANCEL,
+ 0, NULL);
+}
+
+int32_t mm_channel_prepare_snapshot(mm_channel_t *my_obj, uint32_t sensor_idx)
+{
+ return mm_camera_send_native_ctrl_timeout_cmd(my_obj->cam_obj,
+ CAMERA_PREPARE_SNAPSHOT,
+ 0, NULL, 2000);
+}
+
+int32_t mm_channel_set_stream_parm(mm_channel_t *my_obj,
+ uint32_t s_id,
+ void *value)
+{
+ int32_t rc = -1;
+ mm_stream_t* s_obj = mm_channel_util_get_stream_by_handler(my_obj, s_id);
+ if (NULL != s_obj) {
+ rc = mm_stream_fsm_fn(s_obj,
+ MM_STREAM_EVT_SET_PARM,
+ value,
+ NULL);
+ }
+
+ return rc;
+}
+
+int32_t mm_channel_get_stream_parm(mm_channel_t *my_obj,
+ uint32_t s_id,
+ void *value)
+{
+ int32_t rc = -1;
+ mm_stream_t* s_obj = mm_channel_util_get_stream_by_handler(my_obj, s_id);
+ if (NULL != s_obj) {
+ rc = mm_stream_fsm_fn(s_obj,
+ MM_STREAM_EVT_GET_PARM,
+ NULL,
+ value);
+ }
+
+ return rc;
+}
+
+uint8_t mm_channel_need_do_pp(mm_channel_t *my_obj,
+ mm_channel_queue_node_t *super_buf)
+{
+ uint8_t need_pp = 0;
+ uint8_t i;
+
+ for (i=0; i<super_buf->num_of_bufs; i++) {
+ if (super_buf->super_buf[i].need_pp) {
+ need_pp = 1;
+ break;
+ }
+ }
+ return need_pp;
+}
+
+int32_t mm_channel_do_post_processing(mm_channel_t *my_obj,
+ mm_channel_queue_node_t *super_buf)
+{
+ int32_t rc = 0;
+ cam_sock_packet_t packet;
+ uint8_t i;
+ mm_stream_t* s_obj = NULL;
+ mm_channel_pp_info_t *cookie = NULL;
+
+ /* TODO : currently we only do WNR, may need extended PP */
+ /* send cmd to backend to start pp */
+ for (i=0; i<super_buf->num_of_bufs; i++) {
+ if (super_buf->super_buf[i].need_pp) {
+ s_obj = mm_channel_util_get_stream_by_handler(my_obj, super_buf->super_buf[i].stream_id);
+ if (NULL != s_obj) {
+ cookie = (mm_channel_pp_info_t*)malloc(sizeof(mm_channel_pp_info_t));
+ if (NULL != cookie) {
+ memset(&packet, 0, sizeof(cam_sock_packet_t));
+ memset(cookie, 0, sizeof(mm_channel_pp_info_t));
+ cookie->cam_hdl = my_obj->cam_obj->my_hdl;
+ cookie->ch_hdl = my_obj->my_hdl;
+ cookie->stream_hdl = s_obj->my_hdl;
+ cookie->super_buf = super_buf;
+
+ packet.msg_type = CAM_SOCK_MSG_TYPE_WDN_START;
+ packet.payload.wdn_start.cookie = (unsigned long)cookie;
+ packet.payload.wdn_start.num_frames = 1;
+ packet.payload.wdn_start.ext_mode[0] = s_obj->ext_image_mode;
+ packet.payload.wdn_start.frame_idx[0] = super_buf->super_buf[i].buf->buf_idx;
+ rc = mm_camera_util_sendmsg(my_obj->cam_obj, &packet, sizeof(packet), 0);
+ if (0 != rc) {
+ CDBG_ERROR("%s: Send DoPP msg failed (rc=%d)", __func__, rc);
+ free(cookie);
+ break;
+ }
+ } else {
+ CDBG_ERROR("%s: No memory for mm_channel_pp_info_t", __func__);
+ break;
+ }
+ }
+ }
+ }
+
+ return rc;
+}
+
+int32_t mm_channel_cancel_post_processing(mm_channel_t *my_obj)
+{
+ int32_t rc = 0;
+ /* TODO */
+ /* need send cmd to backend to cancel all pp request */
+
+ return rc;
+}
+
+int32_t mm_channel_reg_stream_cb(mm_channel_t *my_obj,
+ mm_stream_data_cb_t *cb,
+ uint32_t ext_image_mode,
+ uint32_t sensor_idx)
+{
+ uint8_t idx;
+ mm_stream_t *dest_stream = NULL;
+ int32_t rc = -1;
+
+ /* browse all streams in channel to find the destination */
+ for (idx=0; idx < MM_CAMEAR_STRAEM_NUM_MAX; idx++) {
+ if (my_obj->streams[idx].state != MM_STREAM_STATE_NOTUSED &&
+ my_obj->streams[idx].ext_image_mode == ext_image_mode &&
+ my_obj->streams[idx].sensor_idx == sensor_idx) {
+ /* find matching stream as the destination */
+ dest_stream = &my_obj->streams[idx];
+ break;
+ }
+ }
+
+ if (NULL != dest_stream) {
+ rc = mm_stream_reg_buf_cb(dest_stream, cb);
+ }
+
+ return rc;
+}
+
+int32_t mm_channel_superbuf_queue_init(mm_channel_queue_t * queue)
+{
+ return mm_camera_queue_init(&queue->que);
+}
+
+int32_t mm_channel_superbuf_queue_deinit(mm_channel_queue_t * queue)
+{
+ return mm_camera_queue_deinit(&queue->que);
+}
+
+int8_t mm_channel_util_seq_comp_w_rollover(uint32_t v1,
+ uint32_t v2)
+{
+ int8_t ret = 0;
+
+ /* TODO: need to handle the case if v2 roll over to 0 */
+ if (v1 > v2) {
+ ret = 1;
+ } else if (v1 < v2) {
+ ret = -1;
+ }
+
+ return ret;
+}
+
+int32_t mm_channel_superbuf_comp_and_enqueue(
+ mm_channel_t* ch_obj,
+ mm_channel_queue_t * queue,
+ mm_camera_buf_info_t *buf_info)
+{
+ mm_camera_q_node_t* node = NULL;
+ struct cam_list *head = NULL;
+ struct cam_list *pos = NULL;
+ mm_channel_queue_node_t* super_buf = NULL;
+ uint8_t buf_s_idx, i;
+
+ for (buf_s_idx=0; buf_s_idx < queue->num_streams; buf_s_idx++) {
+ if (buf_info->stream_id == queue->bundled_streams[buf_s_idx]) {
+ break;
+ }
+ }
+ if (buf_s_idx == queue->num_streams) {
+ CDBG_ERROR("%s: buf from stream (%d) not bundled", __func__, buf_info->stream_id);
+ return -1;
+ }
+
+ if (mm_channel_util_seq_comp_w_rollover(buf_info->frame_idx,
+ queue->expected_frame_id) < 0) {
+ /* incoming buf is older than expected buf id, will discard it */
+ mm_channel_qbuf(ch_obj, buf_info->buf);
+ return 0;
+ }
+
+ if (MM_CAMERA_SUPER_BUF_PRIORITY_NORMAL != queue->attr.priority) {
+ /* TODO */
+ /* need to decide if we want to queue the frame based on focus or exposure
+ * if frame not to be queued, we need to qbuf it back */
+ }
+
+ /* comp */
+ pthread_mutex_lock(&queue->que.lock);
+ head = &queue->que.head.list;
+ /* get the last one in the queue which is possibly having no matching */
+ pos = head->next;
+ while (pos != head) {
+ node = member_of(pos, mm_camera_q_node_t, list);
+ super_buf = (mm_channel_queue_node_t*)node->data;
+ if (NULL != super_buf) {
+ if (super_buf->matched) {
+ /* find a matched super buf, move to next one */
+ pos = pos->next;
+ continue;
+ } else {
+ /* have an unmatched super buf, break the loop */
+ break;
+ }
+ }
+ }
+
+ if (pos == head) {
+ /* all nodes in queue are mtached, or no node in queue
+ * create a new node */
+ mm_channel_queue_node_t *new_buf = NULL;
+ mm_camera_q_node_t* new_node = NULL;
+
+ new_buf = (mm_channel_queue_node_t*)malloc(sizeof(mm_channel_queue_node_t));
+ new_node = (mm_camera_q_node_t*)malloc(sizeof(mm_camera_q_node_t));
+ if (NULL != new_buf && NULL != new_node) {
+ memset(new_buf, 0, sizeof(mm_channel_queue_node_t));
+ memset(new_node, 0, sizeof(mm_camera_q_node_t));
+ new_node->data = (void *)new_buf;
+ new_buf->num_of_bufs = queue->num_streams;
+ memcpy(&new_buf->super_buf[buf_s_idx], buf_info, sizeof(mm_camera_buf_info_t));
+
+ /* enqueue */
+ //cam_list_add_tail_node(&node->list, &queue->que.head.list);
+ cam_list_add_tail_node(&new_node->list, &queue->que.head.list);
+ queue->que.size++;
+
+ if(queue->num_streams == 1) {
+ //TODO : Check. Live snapshot will have one stream in bundle?
+ new_buf->matched = 1;
+
+ if (new_buf->matched) {
+ queue->expected_frame_id = buf_info->frame_idx + queue->attr.post_frame_skip;
+ queue->match_cnt++;
+ }
+ }
+ } else {
+ /* No memory */
+ if (NULL != new_buf) {
+ free(new_buf);
+ }
+ if (NULL != new_node) {
+ free(new_node);
+ }
+ /* qbuf the new buf since we cannot enqueue */
+ mm_channel_qbuf(ch_obj, buf_info->buf);
+ }
+ } else {
+ /* find an unmatched super buf */
+ if (super_buf->super_buf[buf_s_idx].frame_idx == 0) {
+ /* new frame from the stream_id */
+ uint8_t is_new = 1;
+ uint32_t frame_idx;
+
+ for (i=0; i < super_buf->num_of_bufs; i++) {
+ //for (i=0; i < buf_s_idx; i++) {
+ if(super_buf->super_buf[i].buf == NULL) {
+ continue;
+ }
+ frame_idx = super_buf->super_buf[i].buf->frame_idx;
+ if (frame_idx == 0) {
+ continue;
+ }
+ if (frame_idx < buf_info->frame_idx) {
+ /* existing frame is older than the new frame, qbuf it */
+ mm_channel_qbuf(ch_obj, super_buf->super_buf[i].buf);
+ memset(&super_buf->super_buf[i], 0, sizeof(mm_camera_buf_info_t));
+ } else if (frame_idx > buf_info->frame_idx) {
+ /* new frame is older */
+ is_new = 0;
+ break;
+ }else{
+ //TODO: reveiw again
+ break;
+ }
+ }
+ if (is_new) {
+ memcpy(&super_buf->super_buf[buf_s_idx], buf_info, sizeof(mm_camera_buf_info_t));
+
+ /* check if superbuf is all matched */
+ super_buf->matched = 1;
+ for (i=0; i < super_buf->num_of_bufs; i++) {
+ if (super_buf->super_buf[i].frame_idx == 0) {
+ super_buf->matched = 0;
+ break;
+ }
+ }
+
+ if (super_buf->matched) {
+ queue->expected_frame_id = buf_info->frame_idx + queue->attr.post_frame_skip;
+ queue->match_cnt++;
+ }
+ } else {
+ mm_channel_qbuf(ch_obj, buf_info->buf);
+ }
+ } else {
+ if (super_buf->super_buf[buf_s_idx].frame_idx < buf_info->frame_idx) {
+ /* current frames in superbuf are older than the new frame
+ * qbuf all current frames */
+ for (i=0; i<super_buf->num_of_bufs; i++) {
+ if (super_buf->super_buf[i].frame_idx != 0) {
+ mm_channel_qbuf(ch_obj, super_buf->super_buf[i].buf);
+ memset(&super_buf->super_buf[i], 0, sizeof(mm_camera_buf_info_t));
+ }
+ }
+ /* add the new frame into the superbuf */
+ memcpy(&super_buf->super_buf[buf_s_idx], buf_info, sizeof(mm_camera_buf_info_t));
+ } else {
+ /* the new frame is older, just ignor */
+ mm_channel_qbuf(ch_obj, buf_info->buf);
+ }
+ }
+ }
+ pthread_mutex_unlock(&queue->que.lock);
+
+ return 0;
+}
+
+mm_channel_queue_node_t* mm_channel_superbuf_dequeue_internal(mm_channel_queue_t * queue)
+{
+ mm_camera_q_node_t* node = NULL;
+ struct cam_list *head = NULL;
+ struct cam_list *pos = NULL;
+ mm_channel_queue_node_t* super_buf = NULL;
+
+ head = &queue->que.head.list;
+ pos = head->next;
+ if (pos != head) {
+ /* get the first node */
+ node = member_of(pos, mm_camera_q_node_t, list);
+ super_buf = (mm_channel_queue_node_t*)node->data;
+ if (NULL != super_buf) {
+ if (super_buf->matched) {
+ /* we found a mtaching super buf, dequeue it */
+ cam_list_del_node(&node->list);
+ queue->que.size--;
+ queue->match_cnt--;
+ free(node);
+ } else {
+ super_buf = NULL;
+ }
+ }
+ }
+
+ return super_buf;
+}
+
+mm_channel_queue_node_t* mm_channel_superbuf_dequeue(mm_channel_queue_t * queue)
+{
+ mm_camera_q_node_t* node = NULL;
+ struct cam_list *head = NULL;
+ struct cam_list *pos = NULL;
+ mm_channel_queue_node_t* super_buf = NULL;
+
+ pthread_mutex_lock(&queue->que.lock);
+ super_buf = mm_channel_superbuf_dequeue_internal(queue);
+ pthread_mutex_unlock(&queue->que.lock);
+
+ return super_buf;
+}
+
+int32_t mm_channel_superbuf_bufdone_overflow(mm_channel_t* my_obj,
+ mm_channel_queue_t * queue)
+{
+ int32_t rc = 0, i;
+ mm_channel_queue_node_t* super_buf = NULL;
+ if (MM_CAMERA_SUPER_BUF_NOTIFY_CONTINUOUS == queue->attr.notify_mode) {
+ /* for continuous streaming mode, no overflow is needed */
+ return 0;
+ }
+
+ /* bufdone overflowed bufs */
+ pthread_mutex_lock(&queue->que.lock);
+ while (queue->match_cnt > queue->attr.water_mark) {
+ super_buf = mm_channel_superbuf_dequeue_internal(queue);
+ if (NULL != super_buf) {
+ for (i=0; i<super_buf->num_of_bufs; i++) {
+ if (NULL != super_buf->super_buf[i].buf) {
+ mm_channel_qbuf(my_obj, super_buf->super_buf[i].buf);
+ }
+ }
+ free(super_buf);
+ }
+ }
+ pthread_mutex_unlock(&queue->que.lock);
+
+ return rc;
+}
+
+int32_t mm_channel_superbuf_skip(mm_channel_t* my_obj,
+ mm_channel_queue_t * queue)
+{
+ int32_t rc = 0, i, count;
+ mm_channel_queue_node_t* super_buf = NULL;
+ if (MM_CAMERA_SUPER_BUF_NOTIFY_CONTINUOUS == queue->attr.notify_mode) {
+ /* for continuous streaming mode, no skip is needed */
+ return 0;
+ }
+
+ /* bufdone overflowed bufs */
+ pthread_mutex_lock(&queue->que.lock);
+ while (queue->match_cnt > queue->attr.look_back) {
+ super_buf = mm_channel_superbuf_dequeue_internal(queue);
+ if (NULL != super_buf) {
+ for (i=0; i<super_buf->num_of_bufs; i++) {
+ if (NULL != super_buf->super_buf[i].buf) {
+ mm_channel_qbuf(my_obj, super_buf->super_buf[i].buf);
+ }
+ }
+ free(super_buf);
+ }
+ }
+ pthread_mutex_unlock(&queue->que.lock);
+
+ return rc;
+}
+
diff --git a/camera/QCamera/stack/mm-camera-interface/src/mm_camera_data.c b/camera/QCamera/stack/mm-camera-interface/src/mm_camera_data.c
new file mode 100644
index 0000000..7bbd49c
--- /dev/null
+++ b/camera/QCamera/stack/mm-camera-interface/src/mm_camera_data.c
@@ -0,0 +1,227 @@
+/*
+Copyright (c) 2012, The Linux Foundation. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following
+ disclaimer in the documentation and/or other materials provided
+ with the distribution.
+ * Neither the name of The Linux Foundation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+
+#include <pthread.h>
+#include <errno.h>
+#include <sys/ioctl.h>
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <fcntl.h>
+#include <semaphore.h>
+
+#include "mm_camera_dbg.h"
+#include "mm_camera_interface.h"
+#include "mm_camera.h"
+
+#if 0
+#undef CDBG
+#undef LOG_TAG
+#define CDBG ALOGE
+#define LOG_TAG "NotifyLogs"
+#endif
+
+int32_t mm_camera_queue_init(mm_camera_queue_t* queue)
+{
+ pthread_mutex_init(&queue->lock, NULL);
+ cam_list_init(&queue->head.list);
+ queue->size = 0;
+ return 0;
+}
+
+int32_t mm_camera_queue_enq(mm_camera_queue_t* queue, void* data)
+{
+ mm_camera_q_node_t* node =
+ (mm_camera_q_node_t *)malloc(sizeof(mm_camera_q_node_t));
+ if (NULL == node) {
+ CDBG_ERROR("%s: No memory for mm_camera_q_node_t", __func__);
+ return -1;
+ }
+
+ memset(node, 0, sizeof(mm_camera_q_node_t));
+ node->data = data;
+
+ pthread_mutex_lock(&queue->lock);
+ cam_list_add_tail_node(&node->list, &queue->head.list);
+ queue->size++;
+ pthread_mutex_unlock(&queue->lock);
+
+ return 0;
+
+}
+
+void* mm_camera_queue_deq(mm_camera_queue_t* queue)
+{
+ mm_camera_q_node_t* node = NULL;
+ void* data = NULL;
+ struct cam_list *head = NULL;
+ struct cam_list *pos = NULL;
+
+ pthread_mutex_lock(&queue->lock);
+ head = &queue->head.list;
+ pos = head->next;
+ if (pos != head) {
+ node = member_of(pos, mm_camera_q_node_t, list);
+ cam_list_del_node(&node->list);
+ queue->size--;
+ }
+ pthread_mutex_unlock(&queue->lock);
+
+ if (NULL != node) {
+ data = node->data;
+ free(node);
+ }
+
+ return data;
+}
+
+int32_t mm_camera_queue_deinit(mm_camera_queue_t* queue)
+{
+ mm_camera_queue_flush(queue);
+ pthread_mutex_destroy(&queue->lock);
+ return 0;
+}
+
+int32_t mm_camera_queue_flush(mm_camera_queue_t* queue)
+{
+ mm_camera_q_node_t* node = NULL;
+ void* data = NULL;
+ struct cam_list *head = NULL;
+ struct cam_list *pos = NULL;
+
+ pthread_mutex_lock(&queue->lock);
+ head = &queue->head.list;
+ pos = head->next;
+
+ while(pos != head) {
+ node = member_of(pos, mm_camera_q_node_t, list);
+ cam_list_del_node(&node->list);
+ queue->size--;
+
+ /* TODO later to consider ptr inside data */
+ /* for now we only assume there is no ptr inside data
+ * so we free data directly */
+ if (NULL != node->data) {
+ free(node->data);
+ }
+ free(node);
+ pos = pos->next;
+ }
+ queue->size = 0;
+ pthread_mutex_unlock(&queue->lock);
+ return 0;
+}
+
+static void *mm_camera_cmd_thread(void *data)
+{
+ int rc = 0;
+ int running = 1;
+ int ret;
+ mm_camera_cmd_thread_t *cmd_thread =
+ (mm_camera_cmd_thread_t *)data;
+ mm_camera_cmdcb_t* node = NULL;
+
+ do {
+ do {
+ ret = sem_wait(&cmd_thread->cmd_sem);
+ if (ret != 0 && errno != EINVAL) {
+ CDBG_ERROR("%s: sem_wait error (%s)",
+ __func__, strerror(errno));
+ return NULL;
+ }
+ } while (ret != 0);
+
+ /* we got notified about new cmd avail in cmd queue */
+ node = (mm_camera_cmdcb_t*)mm_camera_queue_deq(&cmd_thread->cmd_queue);
+ if (node != NULL) {
+ switch (node->cmd_type) {
+ case MM_CAMERA_CMD_TYPE_EVT_CB:
+ case MM_CAMERA_CMD_TYPE_DATA_CB:
+ case MM_CAMERA_CMD_TYPE_ASYNC_CB:
+ case MM_CAMERA_CMD_TYPE_REQ_DATA_CB:
+ if (NULL != cmd_thread->cb) {
+ cmd_thread->cb(node, cmd_thread->user_data);
+ }
+ break;
+ case MM_CAMERA_CMD_TYPE_EXIT:
+ default:
+ running = 0;
+ break;
+ }
+ free(node);
+ }
+ } while (running);
+ return NULL;
+}
+
+int32_t mm_camera_cmd_thread_launch(mm_camera_cmd_thread_t * cmd_thread,
+ mm_camera_cmd_cb_t cb,
+ void* user_data)
+{
+ int32_t rc = 0;
+
+ sem_init(&cmd_thread->cmd_sem, 0, 0);
+ mm_camera_queue_init(&cmd_thread->cmd_queue);
+ cmd_thread->cb = cb;
+ cmd_thread->user_data = user_data;
+
+ /* launch the thread */
+ pthread_create(&cmd_thread->cmd_pid,
+ NULL,
+ mm_camera_cmd_thread,
+ (void *)cmd_thread);
+ return rc;
+}
+
+int32_t mm_camera_cmd_thread_release(mm_camera_cmd_thread_t * cmd_thread)
+{
+ int32_t rc = 0;
+ mm_camera_buf_info_t buf_info;
+ mm_camera_cmdcb_t* node = (mm_camera_cmdcb_t *)malloc(sizeof(mm_camera_cmdcb_t));
+ if (NULL == node) {
+ CDBG_ERROR("%s: No memory for mm_camera_cmdcb_t", __func__);
+ return -1;
+ }
+
+ memset(node, 0, sizeof(mm_camera_cmdcb_t));
+ node->cmd_type = MM_CAMERA_CMD_TYPE_EXIT;
+
+ mm_camera_queue_enq(&cmd_thread->cmd_queue, node);
+ sem_post(&cmd_thread->cmd_sem);
+
+ /* wait until cmd thread exits */
+ if (pthread_join(cmd_thread->cmd_pid, NULL) != 0) {
+ CDBG("%s: pthread dead already\n", __func__);
+ }
+ mm_camera_queue_deinit(&cmd_thread->cmd_queue);
+
+ sem_destroy(&cmd_thread->cmd_sem);
+ memset(cmd_thread, 0, sizeof(mm_camera_cmd_thread_t));
+ return rc;
+}
+
diff --git a/camera/QCamera/stack/mm-camera-interface/src/mm_camera_helper.c b/camera/QCamera/stack/mm-camera-interface/src/mm_camera_helper.c
new file mode 100644
index 0000000..5289084
--- /dev/null
+++ b/camera/QCamera/stack/mm-camera-interface/src/mm_camera_helper.c
@@ -0,0 +1,318 @@
+/*
+Copyright (c) 2011-2012, The Linux Foundation. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following
+ disclaimer in the documentation and/or other materials provided
+ with the distribution.
+ * Neither the name of The Linux Foundation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+
+#include <sys/mman.h>
+#include <fcntl.h>
+#include <stdio.h>
+#include <unistd.h>
+#include <stdlib.h>
+#include <ctype.h>
+#include <errno.h>
+#include <string.h>
+#include "mm_camera_dbg.h"
+#include <time.h>
+#include "mm_camera_interface.h"
+#include <linux/ion.h>
+
+#define MM_CAMERA_PROFILE 1
+
+struct file;
+struct inode;
+struct vm_area_struct;
+
+/*===========================================================================
+ * FUNCTION - do_mmap -
+ *
+ * DESCRIPTION: retured virtual addresss
+ *==========================================================================*/
+uint8_t *mm_camera_do_mmap(uint32_t size, int *pmemFd)
+{
+ void *ret; /* returned virtual address */
+ int pmem_fd = open("/dev/pmem_adsp", O_RDWR|O_SYNC);
+
+ if (pmem_fd <= 0) {
+ CDBG("do_mmap: Open device /dev/pmem_adsp failed!\n");
+ return NULL;
+ }
+ /* to make it page size aligned */
+ size = (size + 4095) & (~4095);
+ ret = mmap(NULL,
+ size,
+ PROT_READ | PROT_WRITE,
+ MAP_SHARED,
+ pmem_fd,
+ 0);
+ if (ret == MAP_FAILED) {
+ CDBG("do_mmap: pmem mmap() failed: %s (%d)\n", strerror(errno), errno);
+ close(pmem_fd);
+ return NULL;
+ }
+ CDBG("do_mmap: pmem mmap fd %d ptr %p len %u\n", pmem_fd, ret, size);
+ *pmemFd = pmem_fd;
+ return(uint8_t *)ret;
+}
+
+/*===========================================================================
+ * FUNCTION - do_munmap -
+ *
+ * DESCRIPTION:
+ *==========================================================================*/
+int mm_camera_do_munmap(int pmem_fd, void *addr, size_t size)
+{
+ int rc;
+
+ if (pmem_fd <= 0) {
+ CDBG("%s:invalid fd=%d\n", __func__, pmem_fd);
+ return -1;
+ }
+ size = (size + 4095) & (~4095);
+ CDBG("munmapped size = %d, virt_addr = 0x%p\n",
+ size, addr);
+ rc = (munmap(addr, size));
+ close(pmem_fd);
+ CDBG("do_mmap: pmem munmap fd %d ptr %p len %u rc %d\n", pmem_fd, addr,
+ size, rc);
+ return rc;
+}
+
+#ifdef USE_ION
+uint8_t *mm_camera_do_mmap_ion(int ion_fd, struct ion_allocation_data *alloc,
+ struct ion_fd_data *ion_info_fd, int *mapFd)
+{
+ void *ret; /* returned virtual address */
+ int rc = 0;
+ struct ion_handle_data handle_data;
+
+ /* to make it page size aligned */
+ alloc->len = (alloc->len + 4095) & (~4095);
+
+ rc = ioctl(ion_fd, ION_IOC_ALLOC, alloc);
+ if (rc < 0) {
+ CDBG_ERROR("ION allocation failed\n");
+ goto ION_ALLOC_FAILED;
+ }
+
+ ion_info_fd->handle = alloc->handle;
+ rc = ioctl(ion_fd, ION_IOC_SHARE, ion_info_fd);
+ if (rc < 0) {
+ CDBG_ERROR("ION map failed %s\n", strerror(errno));
+ goto ION_MAP_FAILED;
+ }
+ *mapFd = ion_info_fd->fd;
+ /*ret = mmap(NULL,
+ alloc->len,
+ PROT_READ | PROT_WRITE,
+ MAP_SHARED,
+ *mapFd,
+ 0);
+
+ if (ret == MAP_FAILED) {
+ CDBG_ERROR("ION_MMAP_FAILED: %s (%d)\n", strerror(errno), errno);
+ goto ION_MAP_FAILED;
+ }*/
+
+ return ret;
+
+ION_MAP_FAILED:
+ handle_data.handle = ion_info_fd->handle;
+ ioctl(ion_fd, ION_IOC_FREE, &handle_data);
+ION_ALLOC_FAILED:
+ return NULL;
+}
+
+int mm_camera_do_munmap_ion (int ion_fd, struct ion_fd_data *ion_info_fd,
+ void *addr, size_t size)
+{
+ int rc = 0;
+ rc = munmap(addr, size);
+ close(ion_info_fd->fd);
+
+ struct ion_handle_data handle_data;
+ handle_data.handle = ion_info_fd->handle;
+ ioctl(ion_fd, ION_IOC_FREE, &handle_data);
+ return rc;
+}
+#endif
+
+/*============================================================
+ FUNCTION mm_camera_dump_image
+ DESCRIPTION:
+==============================================================*/
+int mm_camera_dump_image(void *addr, uint32_t size, char *filename)
+{
+ int file_fd = open(filename, O_RDWR | O_CREAT, 0777);
+
+ if (file_fd < 0) {
+ CDBG_HIGH("%s: cannot open file\n", __func__);
+ return -1;
+ } else
+ write(file_fd, addr, size);
+ close(file_fd);
+ CDBG("%s: %s, size=%d\n", __func__, filename, size);
+ return 0;
+}
+
+uint32_t mm_camera_get_msm_frame_len(cam_format_t fmt_type,
+ camera_mode_t mode,
+ int width,
+ int height,
+ int image_type,
+ uint8_t *num_planes,
+ uint32_t plane[])
+{
+ uint32_t size;
+ *num_planes = 0;
+ int local_height;
+
+ switch (fmt_type) {
+ case CAMERA_YUV_420_NV12:
+ case CAMERA_YUV_420_NV21:
+ *num_planes = 2;
+ if(CAMERA_MODE_3D == mode) {
+ size = (uint32_t)(PAD_TO_2K(width*height)*3/2);
+ plane[0] = PAD_TO_WORD(width*height);
+ } else {
+ if (image_type == OUTPUT_TYPE_V) {
+ plane[0] = PAD_TO_2K(width * height);
+ plane[1] = PAD_TO_2K(width * height/2);
+ } else if (image_type == OUTPUT_TYPE_P) {
+ plane[0] = PAD_TO_WORD(width * height);
+ plane[1] = PAD_TO_WORD(width * height/2);
+ } else {
+ plane[0] = PAD_TO_WORD(width * CEILING16(height));
+ plane[1] = PAD_TO_WORD(width * CEILING16(height)/2);
+ }
+ size = plane[0] + plane[1];
+ }
+ break;
+ case CAMERA_BAYER_SBGGR10:
+ *num_planes = 1;
+ plane[0] = PAD_TO_WORD(width * height);
+ size = plane[0];
+ break;
+ case CAMERA_YUV_422_NV16:
+ case CAMERA_YUV_422_NV61:
+ if( image_type == OUTPUT_TYPE_S || image_type == OUTPUT_TYPE_V) {
+ local_height = CEILING16(height);
+ } else {
+ local_height = height;
+ }
+ *num_planes = 2;
+ plane[0] = PAD_TO_WORD(width * height);
+ plane[1] = PAD_TO_WORD(width * height);
+ size = plane[0] + plane[1];
+ break;
+ default:
+ CDBG("%s: format %d not supported.\n",
+ __func__, fmt_type);
+ size = 0;
+ }
+ CDBG("%s:fmt=%d,image_type=%d,width=%d,height=%d,frame_len=%d\n",
+ __func__, fmt_type, image_type, width, height, size);
+ return size;
+}
+
+void mm_camera_util_profile(const char *str)
+{
+#if (MM_CAMERA_PROFILE)
+ struct timespec cur_time;
+
+ clock_gettime(CLOCK_REALTIME, &cur_time);
+ CDBG_HIGH("PROFILE %s: %ld.%09ld\n", str,
+ cur_time.tv_sec, cur_time.tv_nsec);
+#endif
+}
+
+/*===========================================================================
+ * FUNCTION - mm_camera_do_mmap_ion -
+ *
+ * DESCRIPTION:
+ *==========================================================================*/
+uint8_t *mm_camera_do_mmap_ion(int ion_fd, struct ion_allocation_data *alloc,
+ struct ion_fd_data *ion_info_fd, int *mapFd)
+{
+ void *ret; /* returned virtual address */
+ int rc = 0;
+ struct ion_handle_data handle_data;
+
+ /* to make it page size aligned */
+ alloc->len = (alloc->len + 4095) & (~4095);
+ alloc->align = 4096;
+
+ rc = ioctl(ion_fd, ION_IOC_ALLOC, alloc);
+ if (rc < 0) {
+ CDBG_ERROR("ION allocation failed %s\n", strerror(errno));
+ goto ION_ALLOC_FAILED;
+ }
+
+ ion_info_fd->handle = alloc->handle;
+ rc = ioctl(ion_fd, ION_IOC_SHARE, ion_info_fd);
+ if (rc < 0) {
+ CDBG_ERROR("ION map failed %s\n", strerror(errno));
+ goto ION_MAP_FAILED;
+ }
+ *mapFd = ion_info_fd->fd;
+ ret = mmap(NULL,
+ alloc->len,
+ PROT_READ | PROT_WRITE,
+ MAP_SHARED,
+ *mapFd,
+ 0);
+
+ if (ret == MAP_FAILED) {
+ CDBG_ERROR("ION_MMAP_FAILED: %s (%d)\n", strerror(errno), errno);
+ goto ION_MAP_FAILED;
+ }
+
+ return ret;
+
+ION_MAP_FAILED:
+ handle_data.handle = ion_info_fd->handle;
+ ioctl(ion_fd, ION_IOC_FREE, &handle_data);
+ION_ALLOC_FAILED:
+ return NULL;
+}
+
+/*===========================================================================
+ * FUNCTION - mm_camera_do_munmap_ion -
+ *
+ * DESCRIPTION:
+ *==========================================================================*/
+int mm_camera_do_munmap_ion (int ion_fd, struct ion_fd_data *ion_info_fd,
+ void *addr, size_t size)
+{
+ int rc = 0;
+ rc = munmap(addr, size);
+ close(ion_info_fd->fd);
+
+ struct ion_handle_data handle_data;
+ handle_data.handle = ion_info_fd->handle;
+ ioctl(ion_fd, ION_IOC_FREE, &handle_data);
+ return rc;
+}
diff --git a/camera/QCamera/stack/mm-camera-interface/src/mm_camera_interface.c b/camera/QCamera/stack/mm-camera-interface/src/mm_camera_interface.c
new file mode 100644
index 0000000..345e050
--- /dev/null
+++ b/camera/QCamera/stack/mm-camera-interface/src/mm_camera_interface.c
@@ -0,0 +1,951 @@
+/*
+Copyright (c) 2011-2012, The Linux Foundation. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following
+ disclaimer in the documentation and/or other materials provided
+ with the distribution.
+ * Neither the name of The Linux Foundation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+
+#include <pthread.h>
+#include <errno.h>
+#include <sys/ioctl.h>
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <fcntl.h>
+#include <poll.h>
+#include <linux/media.h>
+#include <semaphore.h>
+
+#include "mm_camera_dbg.h"
+#include "mm_camera_interface.h"
+#include "mm_camera.h"
+
+static pthread_mutex_t g_intf_lock = PTHREAD_MUTEX_INITIALIZER;
+
+static mm_camera_ctrl_t g_cam_ctrl = {{{0, {0, 0, 0, 0}, 0, 0}}, 0, {{0}}, {0}};
+
+static pthread_mutex_t g_handler_lock = PTHREAD_MUTEX_INITIALIZER;
+static uint16_t g_handler_history_count = 0; /* history count for handler */
+
+/* utility function to generate handler */
+uint32_t mm_camera_util_generate_handler(uint8_t index)
+{
+ uint32_t handler = 0;
+ pthread_mutex_lock(&g_handler_lock);
+ g_handler_history_count++;
+ if (0 == g_handler_history_count) {
+ g_handler_history_count++;
+ }
+ handler = g_handler_history_count;
+ handler = (handler<<8) | index;
+ pthread_mutex_unlock(&g_handler_lock);
+ return handler;
+}
+
+uint8_t mm_camera_util_get_index_by_handler(uint32_t handler)
+{
+ return (handler&0x000000ff);
+}
+
+const char *mm_camera_util_get_dev_name(uint32_t cam_handler)
+{
+ char *dev_name = NULL;
+ uint8_t cam_idx = mm_camera_util_get_index_by_handler(cam_handler);
+ dev_name = g_cam_ctrl.camera[cam_idx].video_dev_name;
+ return dev_name;
+}
+
+mm_camera_obj_t* mm_camera_util_get_camera_by_handler(uint32_t cam_handler)
+{
+ mm_camera_obj_t *cam_obj = NULL;
+ uint8_t cam_idx = mm_camera_util_get_index_by_handler(cam_handler);
+
+ if ((NULL != g_cam_ctrl.cam_obj[cam_idx]) &&
+ (cam_handler == g_cam_ctrl.cam_obj[cam_idx]->my_hdl)) {
+ cam_obj = g_cam_ctrl.cam_obj[cam_idx];
+ }
+ return cam_obj;
+}
+
+static int32_t mm_camera_intf_sync(uint32_t camera_handler)
+{
+ int32_t rc = -1;
+ mm_camera_obj_t * my_obj = NULL;
+
+ CDBG("%s E: camera_handler = %d ",__func__,camera_handler);
+
+ pthread_mutex_lock(&g_intf_lock);
+ my_obj = mm_camera_util_get_camera_by_handler(camera_handler);
+
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+ rc = mm_camera_sync(my_obj);
+ } else {
+ pthread_mutex_unlock(&g_intf_lock);
+ }
+ CDBG("%s :X rc = %d",__func__,rc);
+ return rc;
+}
+
+/* check if the parm is supported */
+static int32_t mm_camera_intf_is_parm_supported(uint32_t camera_handler,
+ mm_camera_parm_type_t parm_type,
+ uint8_t *support_set_parm,
+ uint8_t *support_get_parm)
+{
+ int32_t rc = -1;
+ mm_camera_obj_t * my_obj = NULL;
+
+ pthread_mutex_lock(&g_intf_lock);
+ my_obj = mm_camera_util_get_camera_by_handler(camera_handler);
+ *support_set_parm = 0;
+ *support_get_parm = 0;
+
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+ rc = mm_camera_is_parm_supported(my_obj, parm_type, support_set_parm, support_get_parm);
+ } else {
+ pthread_mutex_unlock(&g_intf_lock);
+ }
+ return rc;
+}
+
+/* set a parms current value */
+static int32_t mm_camera_intf_set_parm(uint32_t camera_handler,
+ mm_camera_parm_type_t parm_type,
+ void* p_value)
+{
+ int32_t rc = -1;
+ mm_camera_obj_t * my_obj = NULL;
+
+ pthread_mutex_lock(&g_intf_lock);
+ my_obj = mm_camera_util_get_camera_by_handler(camera_handler);
+
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+ rc = mm_camera_set_parm(my_obj, parm_type, p_value);
+ } else {
+ pthread_mutex_unlock(&g_intf_lock);
+ }
+ return rc;
+}
+
+/* get a parms current value */
+static int32_t mm_camera_intf_get_parm(uint32_t camera_handler,
+ mm_camera_parm_type_t parm_type,
+ void* p_value)
+{
+ int32_t rc = -1;
+ mm_camera_obj_t * my_obj = NULL;
+
+ pthread_mutex_lock(&g_intf_lock);
+ my_obj = mm_camera_util_get_camera_by_handler(camera_handler);
+
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+ rc = mm_camera_get_parm(my_obj, parm_type, p_value);
+ } else {
+ pthread_mutex_unlock(&g_intf_lock);
+ }
+ return rc;
+}
+
+static void mm_camera_intf_close(uint32_t camera_handler)
+{
+ int32_t rc = -1;
+ uint8_t cam_idx = camera_handler & 0x00ff;
+ mm_camera_obj_t * my_obj = NULL;
+
+ CDBG("%s E: camera_handler = %d ",__func__,camera_handler);
+ pthread_mutex_lock(&g_intf_lock);
+ my_obj = mm_camera_util_get_camera_by_handler(camera_handler);
+
+ if (my_obj){
+ my_obj->ref_count--;
+
+ if(my_obj->ref_count > 0) {
+ /* still have reference to obj, return here */
+ CDBG("%s: ref_count=%d\n", __func__, my_obj->ref_count);
+ pthread_mutex_unlock(&g_intf_lock);
+ return;
+ }
+
+ /* need close camera here as no other reference
+ * first empty g_cam_ctrl's referent to cam_obj */
+ g_cam_ctrl.cam_obj[cam_idx] = NULL;
+
+ pthread_mutex_lock(&my_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+
+ mm_camera_close(my_obj);
+
+ pthread_mutex_destroy(&my_obj->cam_lock);
+ free(my_obj);
+
+ } else {
+ pthread_mutex_unlock(&g_intf_lock);
+ }
+}
+
+static uint32_t mm_camera_intf_add_channel(uint32_t camera_handler)
+{
+ uint32_t ch_id = 0;
+ mm_camera_obj_t * my_obj = NULL;
+
+ CDBG("%s :E camera_handler = %d",__func__,camera_handler);
+ pthread_mutex_lock(&g_intf_lock);
+ my_obj = mm_camera_util_get_camera_by_handler(camera_handler);
+
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+ ch_id = mm_camera_add_channel(my_obj);
+ } else {
+ pthread_mutex_unlock(&g_intf_lock);
+ }
+ CDBG("%s :X ch_id = %d",__func__,ch_id);
+ return ch_id;
+}
+
+static void mm_camera_intf_del_channel(uint32_t camera_handler, uint32_t ch_id)
+{
+ mm_camera_obj_t * my_obj = NULL;
+
+ CDBG("%s :E ch_id = %d",__func__,ch_id);
+ pthread_mutex_lock(&g_intf_lock);
+ my_obj = mm_camera_util_get_camera_by_handler(camera_handler);
+
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+ mm_camera_del_channel(my_obj, ch_id);
+ } else {
+ pthread_mutex_unlock(&g_intf_lock);
+ }
+ CDBG("%s :X",__func__);
+}
+
+static uint8_t mm_camera_intf_is_event_supported(uint32_t camera_handler,
+ mm_camera_event_type_t evt_type)
+{
+ switch(evt_type) {
+ case MM_CAMERA_EVT_TYPE_CH:
+ case MM_CAMERA_EVT_TYPE_CTRL:
+ case MM_CAMERA_EVT_TYPE_STATS:
+ case MM_CAMERA_EVT_TYPE_INFO:
+ return 1;
+ default:
+ return 0;
+ }
+ return 0;
+}
+
+static int32_t mm_camera_intf_register_event_notify(
+ uint32_t camera_handler,
+ mm_camera_event_notify_t evt_cb,
+ void * user_data,
+ mm_camera_event_type_t evt_type)
+{
+ int32_t rc = -1;
+ mm_camera_obj_t * my_obj = NULL;
+
+ CDBG("%s :E evt_type = %d",__func__,evt_type);
+ pthread_mutex_lock(&g_intf_lock);
+ my_obj = mm_camera_util_get_camera_by_handler(camera_handler);
+
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+ rc = mm_camera_register_event_notify(my_obj, evt_cb, user_data, evt_type);
+ } else {
+ pthread_mutex_unlock(&g_intf_lock);
+ }
+ CDBG("%s :E rc = %d",__func__,rc);
+ return rc;
+}
+
+static mm_camera_2nd_sensor_t * mm_camera_intf_query_2nd_sensor_info(uint32_t camera_handler)
+{
+ mm_camera_2nd_sensor_t *sensor_info = NULL;
+ mm_camera_obj_t * my_obj = NULL;
+
+ CDBG("%s :E camera_handler = %d",__func__,camera_handler);
+ pthread_mutex_lock(&g_intf_lock);
+ my_obj = mm_camera_util_get_camera_by_handler(camera_handler);
+
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+ sensor_info = mm_camera_query_2nd_sensor_info(my_obj);
+ } else {
+ pthread_mutex_unlock(&g_intf_lock);
+ }
+ CDBG("%s :X",__func__);
+ return sensor_info;
+}
+
+static int32_t mm_camera_intf_qbuf(uint32_t camera_handler,
+ uint32_t ch_id,
+ mm_camera_buf_def_t *buf)
+{
+ int32_t rc = -1;
+ mm_camera_obj_t * my_obj = NULL;
+
+ pthread_mutex_lock(&g_intf_lock);
+ my_obj = mm_camera_util_get_camera_by_handler(camera_handler);
+
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+ rc = mm_camera_qbuf(my_obj, ch_id, buf);
+ } else {
+ pthread_mutex_unlock(&g_intf_lock);
+ }
+ CDBG("%s :X evt_type = %d",__func__,rc);
+ return rc;
+}
+
+static uint32_t mm_camera_intf_add_stream(
+ uint32_t camera_handler,
+ uint32_t ch_id,
+ mm_camera_buf_notify_t buf_cb, void *user_data,
+ uint32_t ext_image_mode, uint32_t sensor_idx)
+{
+ uint32_t stream_id = 0;
+ mm_camera_obj_t * my_obj = NULL;
+
+ CDBG("%s : E handle = %d ch_id = %d",__func__,camera_handler,
+ ch_id);
+
+ pthread_mutex_lock(&g_intf_lock);
+ my_obj = mm_camera_util_get_camera_by_handler(camera_handler);
+
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+ stream_id = mm_camera_add_stream(my_obj, ch_id, buf_cb,
+ user_data, ext_image_mode, sensor_idx);
+ } else {
+ pthread_mutex_unlock(&g_intf_lock);
+ }
+ CDBG("%s :X stream_id = %d",__func__,stream_id);
+ return stream_id;
+}
+
+static int32_t mm_camera_intf_del_stream(
+ uint32_t camera_handler,
+ uint32_t ch_id,
+ uint32_t stream_id)
+{
+ int32_t rc = -1;
+ mm_camera_obj_t * my_obj = NULL;
+
+ CDBG("%s : E handle = %d ch_id = %d stream_id = %d",__func__,camera_handler,
+ ch_id,stream_id);
+
+ pthread_mutex_lock(&g_intf_lock);
+ my_obj = mm_camera_util_get_camera_by_handler(camera_handler);
+
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+ rc = mm_camera_del_stream(my_obj, ch_id, stream_id);
+ } else {
+ pthread_mutex_unlock(&g_intf_lock);
+ }
+ CDBG("%s :X rc = %d",__func__,rc);
+ return rc;
+}
+
+static int32_t mm_camera_intf_config_stream(
+ uint32_t camera_handler,
+ uint32_t ch_id,
+ uint32_t stream_id,
+ mm_camera_stream_config_t *config)
+{
+ int32_t rc = -1;
+ mm_camera_obj_t * my_obj = NULL;
+
+ CDBG("%s :E handle = %d, ch_id = %d,stream_id = %d",__func__,
+ camera_handler,ch_id,stream_id);
+
+ pthread_mutex_lock(&g_intf_lock);
+ my_obj = mm_camera_util_get_camera_by_handler(camera_handler);
+
+ CDBG("%s :mm_camera_intf_config_stream stream_id = %d",__func__,stream_id);
+
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+ rc = mm_camera_config_stream(my_obj, ch_id, stream_id, config);
+ } else {
+ pthread_mutex_unlock(&g_intf_lock);
+ }
+ CDBG("%s :X rc = %d",__func__,rc);
+ return rc;
+}
+
+static int32_t mm_camera_intf_bundle_streams(
+ uint32_t camera_handler,
+ uint32_t ch_id,
+ mm_camera_buf_notify_t super_frame_notify_cb,
+ void *user_data,
+ mm_camera_bundle_attr_t *attr,
+ uint8_t num_streams,
+ uint32_t *stream_ids)
+{
+ int32_t rc = -1;
+ mm_camera_obj_t * my_obj = NULL;
+
+ CDBG("%s :E handle = %d, ch_id = %d",__func__,
+ camera_handler,ch_id);
+
+ if (MM_CAMEAR_MAX_STRAEM_BUNDLE < num_streams) {
+ CDBG_ERROR("%s: number of streams (%d) exceeds max (%d)",
+ __func__, num_streams, MM_CAMEAR_MAX_STRAEM_BUNDLE);
+ return rc;
+ }
+
+ pthread_mutex_lock(&g_intf_lock);
+ my_obj = mm_camera_util_get_camera_by_handler(camera_handler);
+
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+ rc = mm_camera_bundle_streams(my_obj, ch_id,
+ super_frame_notify_cb,
+ user_data, attr,
+ num_streams, stream_ids);
+ } else {
+ pthread_mutex_unlock(&g_intf_lock);
+ }
+ CDBG("%s :X rc = %d",__func__,rc);
+ return rc;
+}
+
+static int32_t mm_camera_intf_destroy_bundle(uint32_t camera_handler,
+ uint32_t ch_id)
+{
+ int32_t rc = -1;
+ mm_camera_obj_t * my_obj = NULL;
+
+ CDBG("%s :E handle = %d, ch_id = %d",__func__,
+ camera_handler,ch_id);
+ pthread_mutex_lock(&g_intf_lock);
+ my_obj = mm_camera_util_get_camera_by_handler(camera_handler);
+
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+ rc = mm_camera_destroy_bundle(my_obj, ch_id);
+ } else {
+ pthread_mutex_unlock(&g_intf_lock);
+ }
+ CDBG("%s :X rc = %d",__func__,rc);
+ return rc;
+}
+
+static int32_t mm_camera_intf_start_streams(
+ uint32_t camera_handler,
+ uint32_t ch_id,
+ uint8_t num_streams,
+ uint32_t *stream_ids)
+{
+ int32_t rc = -1;
+ mm_camera_obj_t * my_obj = NULL;
+
+ CDBG("%s :E camera_handler = %d,ch_id = %d, num_streams = %d",
+ __func__,camera_handler,ch_id,num_streams);
+ if (MM_CAMEAR_STRAEM_NUM_MAX < num_streams) {
+ CDBG_ERROR("%s: num of streams (%d) exceeds MAX (%d)",
+ __func__, num_streams, MM_CAMEAR_STRAEM_NUM_MAX);
+ return rc;
+ }
+
+ pthread_mutex_lock(&g_intf_lock);
+ my_obj = mm_camera_util_get_camera_by_handler(camera_handler);
+
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+ rc = mm_camera_start_streams(my_obj, ch_id, num_streams, stream_ids);
+ } else {
+ pthread_mutex_unlock(&g_intf_lock);
+ }
+ CDBG("%s :X rc = %d",__func__,rc);
+ return rc;
+}
+
+static int32_t mm_camera_intf_stop_streams(
+ uint32_t camera_handler,
+ uint32_t ch_id,
+ uint8_t num_streams,
+ uint32_t *stream_ids)
+{
+ int32_t rc = -1;
+ mm_camera_obj_t * my_obj = NULL;
+
+ CDBG("%s :E camera_handler = %d,ch_id = %d, num_streams = %d",
+ __func__,camera_handler,ch_id,num_streams);
+
+ if (MM_CAMEAR_STRAEM_NUM_MAX < num_streams) {
+ CDBG_ERROR("%s: num of streams (%d) exceeds MAX (%d)",
+ __func__, num_streams, MM_CAMEAR_STRAEM_NUM_MAX);
+ return rc;
+ }
+
+ pthread_mutex_lock(&g_intf_lock);
+ my_obj = mm_camera_util_get_camera_by_handler(camera_handler);
+
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+ rc = mm_camera_stop_streams(my_obj, ch_id,
+ num_streams, stream_ids);
+ } else {
+ pthread_mutex_unlock(&g_intf_lock);
+ }
+ CDBG("%s :X rc = %d",__func__,rc);
+ return rc;
+}
+
+static int32_t mm_camera_intf_async_teardown_streams(
+ uint32_t camera_handler,
+ uint32_t ch_id,
+ uint8_t num_streams,
+ uint32_t *stream_ids)
+{
+ int32_t rc = -1;
+ mm_camera_obj_t * my_obj = NULL;
+
+ CDBG("%s :E camera_handler = %d,ch_id = %d, num_streams = %d",
+ __func__,camera_handler,ch_id,num_streams);
+
+ if (MM_CAMEAR_STRAEM_NUM_MAX < num_streams) {
+ CDBG_ERROR("%s: num of streams (%d) exceeds MAX (%d)",
+ __func__, num_streams, MM_CAMEAR_STRAEM_NUM_MAX);
+ return rc;
+ }
+
+ pthread_mutex_lock(&g_intf_lock);
+ my_obj = mm_camera_util_get_camera_by_handler(camera_handler);
+
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+ rc = mm_camera_async_teardown_streams(my_obj, ch_id,
+ num_streams, stream_ids);
+ } else {
+ pthread_mutex_unlock(&g_intf_lock);
+ }
+ CDBG("%s :X rc = %d",__func__,rc);
+ return rc;
+}
+
+static int32_t mm_camera_intf_request_super_buf(
+ uint32_t camera_handler,
+ uint32_t ch_id)
+{
+ int32_t rc = -1;
+ CDBG("%s :E camera_handler = %d,ch_id = %d",
+ __func__,camera_handler,ch_id);
+ mm_camera_obj_t * my_obj = NULL;
+
+ pthread_mutex_lock(&g_intf_lock);
+ my_obj = mm_camera_util_get_camera_by_handler(camera_handler);
+
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+ rc = mm_camera_request_super_buf(my_obj, ch_id);
+ } else {
+ pthread_mutex_unlock(&g_intf_lock);
+ }
+ CDBG("%s :X rc = %d",__func__,rc);
+ return rc;
+}
+
+static int32_t mm_camera_intf_cancel_super_buf_request(
+ uint32_t camera_handler,
+ uint32_t ch_id)
+{
+ int32_t rc = -1;
+ mm_camera_obj_t * my_obj = NULL;
+
+ CDBG("%s :E camera_handler = %d,ch_id = %d",
+ __func__,camera_handler,ch_id);
+ pthread_mutex_lock(&g_intf_lock);
+ my_obj = mm_camera_util_get_camera_by_handler(camera_handler);
+
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+ rc = mm_camera_cancel_super_buf_request(my_obj, ch_id);
+ } else {
+ pthread_mutex_unlock(&g_intf_lock);
+ }
+ CDBG("%s :X rc = %d",__func__,rc);
+ return rc;
+}
+
+static int32_t mm_camera_intf_start_focus(
+ uint32_t camera_handler,
+ uint32_t ch_id,
+ uint32_t sensor_idx,
+ uint32_t focus_mode)
+{
+ int32_t rc = -1;
+ mm_camera_obj_t * my_obj = NULL;
+
+ CDBG("%s :E camera_handler = %d,ch_id = %d, focus_mode = %d",
+ __func__,camera_handler,ch_id,focus_mode);
+ pthread_mutex_lock(&g_intf_lock);
+ my_obj = mm_camera_util_get_camera_by_handler(camera_handler);
+
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+ rc = mm_camera_start_focus(my_obj, ch_id, sensor_idx, focus_mode);
+ } else {
+ pthread_mutex_unlock(&g_intf_lock);
+ }
+ CDBG("%s :X rc = %d",__func__,rc);
+ return rc;
+}
+
+static int32_t mm_camera_intf_abort_focus(
+ uint32_t camera_handler,
+ uint32_t ch_id,
+ uint32_t sensor_idx)
+{
+ int32_t rc = -1;
+ mm_camera_obj_t * my_obj = NULL;
+
+ pthread_mutex_lock(&g_intf_lock);
+ my_obj = mm_camera_util_get_camera_by_handler(camera_handler);
+
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+ rc = mm_camera_abort_focus(my_obj, ch_id, sensor_idx);
+ } else {
+ pthread_mutex_unlock(&g_intf_lock);
+ }
+ CDBG("%s :X rc = %d",__func__,rc);
+ return rc;
+}
+
+static int32_t mm_camera_intf_prepare_snapshot(
+ uint32_t camera_handler,
+ uint32_t ch_id,
+ uint32_t sensor_idx)
+{
+ int32_t rc = -1;
+ mm_camera_obj_t * my_obj = NULL;
+
+ pthread_mutex_lock(&g_intf_lock);
+ my_obj = mm_camera_util_get_camera_by_handler(camera_handler);
+
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+ rc = mm_camera_prepare_snapshot(my_obj, ch_id, sensor_idx);
+ } else {
+ pthread_mutex_unlock(&g_intf_lock);
+ }
+ CDBG("%s :X rc = %d",__func__,rc);
+ return rc;
+}
+
+static int32_t mm_camera_intf_set_stream_parm(
+ uint32_t camera_handle,
+ uint32_t ch_id,
+ uint32_t s_id,
+ mm_camera_stream_parm_t parm_type,
+ void* p_value)
+{
+ int32_t rc = -1;
+ mm_camera_obj_t * my_obj = NULL;
+
+ pthread_mutex_lock(&g_intf_lock);
+ my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+ CDBG("%s :E camera_handle = %d,ch_id = %d,s_id = %d,parm_type = %d",__func__,
+ camera_handle,ch_id,s_id,parm_type);
+
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+ rc = mm_camera_set_stream_parm(my_obj,ch_id,s_id,
+ parm_type,
+ p_value);
+ }else{
+ pthread_mutex_unlock(&g_intf_lock);
+ }
+ CDBG("%s :X rc = %d",__func__,rc);
+ return rc;
+}
+
+static int32_t mm_camera_intf_get_stream_parm(
+ uint32_t camera_handle,
+ uint32_t ch_id,
+ uint32_t s_id,
+ mm_camera_stream_parm_t parm_type,
+ void* p_value)
+{
+ int32_t rc = -1;
+ mm_camera_obj_t * my_obj = NULL;
+
+ pthread_mutex_lock(&g_intf_lock);
+ my_obj = mm_camera_util_get_camera_by_handler(camera_handle);
+
+ CDBG("%s :E camera_handle = %d,ch_id = %d,s_id = %d,parm_type = %d",__func__,
+ camera_handle,ch_id,s_id,parm_type);
+
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->cam_lock);
+ pthread_mutex_unlock(&g_intf_lock);
+ rc = mm_camera_get_stream_parm(my_obj,ch_id,s_id,
+ parm_type,
+ p_value);
+ }else{
+ pthread_mutex_unlock(&g_intf_lock);
+ }
+
+ CDBG("%s :X rc = %d",__func__,rc);
+ return rc;
+}
+mm_camera_info_t * camera_query(uint8_t *num_cameras)
+{
+ int i = 0, rc = 0;
+ int dev_fd = 0;
+ struct media_device_info mdev_info;
+ int num_media_devices = 0;
+ if (!num_cameras) {
+ CDBG_ERROR("%s: num_cameras is NULL\n", __func__);
+ return NULL;
+ }
+
+ CDBG("%s : E",__func__);
+ /* lock the mutex */
+ pthread_mutex_lock(&g_intf_lock);
+ *num_cameras = 0;
+ while (1) {
+ char dev_name[32];
+ snprintf(dev_name, sizeof(dev_name), "/dev/media%d", num_media_devices);
+ dev_fd = open(dev_name, O_RDWR | O_NONBLOCK);
+ if (dev_fd < 0) {
+ CDBG("Done discovering media devices\n");
+ break;
+ }
+ num_media_devices++;
+ rc = ioctl(dev_fd, MEDIA_IOC_DEVICE_INFO, &mdev_info);
+ if (rc < 0) {
+ CDBG_ERROR("Error: ioctl media_dev failed: %s\n", strerror(errno));
+ close(dev_fd);
+ break;
+ }
+
+ if(strncmp(mdev_info.model, QCAMERA_NAME, sizeof(mdev_info.model) != 0)) {
+ close(dev_fd);
+ continue;
+ }
+
+ char * mdev_cfg;
+ int cam_type = 0, mount_angle = 0, info_index = 0;
+ mdev_cfg = strtok(mdev_info.serial, "-");
+ while(mdev_cfg != NULL) {
+ if(info_index == 0) {
+ if(strcmp(mdev_cfg, QCAMERA_NAME))
+ break;
+ } else if(info_index == 1) {
+ mount_angle = atoi(mdev_cfg);
+ } else if(info_index == 2) {
+ cam_type = atoi(mdev_cfg);
+ }
+ mdev_cfg = strtok(NULL, "-");
+ info_index++;
+ }
+
+ if(info_index == 0) {
+ close(dev_fd);
+ continue;
+ }
+
+ int num_entities = 1;
+ while (1) {
+ struct media_entity_desc entity;
+ memset(&entity, 0, sizeof(entity));
+ entity.id = num_entities++;
+ rc = ioctl(dev_fd, MEDIA_IOC_ENUM_ENTITIES, &entity);
+ if (rc < 0) {
+ CDBG("Done enumerating media entities\n");
+ rc = 0;
+ break;
+ }
+ if(entity.type == MEDIA_ENT_T_DEVNODE_V4L && entity.group_id == QCAMERA_VNODE_GROUP_ID) {
+ strncpy(g_cam_ctrl.video_dev_name[*num_cameras],
+ entity.name, sizeof(entity.name));
+ g_cam_ctrl.camera[*num_cameras].video_dev_name =
+ &g_cam_ctrl.video_dev_name[*num_cameras][0];
+ break;
+ }
+ }
+
+ //g_cam_ctrl.camera[*num_cameras].camera_info.camera_id = *num_cameras;
+ g_cam_ctrl.camera[*num_cameras].camera_id = *num_cameras;
+
+ g_cam_ctrl.camera[*num_cameras].
+ camera_info.modes_supported = CAMERA_MODE_2D;
+
+ if(cam_type > 1) {
+ g_cam_ctrl.camera[*num_cameras].
+ camera_info.modes_supported |= CAMERA_MODE_3D;
+ }
+
+ g_cam_ctrl.camera[*num_cameras].camera_info.position =
+ (cam_type == 1) ? FRONT_CAMERA : BACK_CAMERA;
+
+ g_cam_ctrl.camera[*num_cameras].camera_info.sensor_mount_angle =
+ mount_angle;
+
+ g_cam_ctrl.camera[*num_cameras].main_sensor_type = 0;
+
+ CDBG("%s: dev_info[id=%d,name='%s',pos=%d,modes=0x%x,sensor=%d mount_angle = %d]\n",
+ __func__, *num_cameras,
+ g_cam_ctrl.camera[*num_cameras].video_dev_name,
+ g_cam_ctrl.camera[*num_cameras].camera_info.position,
+ g_cam_ctrl.camera[*num_cameras].camera_info.modes_supported,
+ g_cam_ctrl.camera[*num_cameras].main_sensor_type,
+ g_cam_ctrl.camera[*num_cameras].camera_info.sensor_mount_angle);
+
+ *num_cameras += 1;
+ if (dev_fd > 0) {
+ close(dev_fd);
+ }
+ }
+ *num_cameras = *num_cameras;
+ g_cam_ctrl.num_cam = *num_cameras;
+
+ /* unlock the mutex */
+ pthread_mutex_unlock(&g_intf_lock);
+ CDBG("%s: num_cameras=%d\n", __func__, g_cam_ctrl.num_cam);
+ if(rc == 0)
+ return &g_cam_ctrl.camera[0];
+ else
+ return NULL;
+}
+
+/* camera ops v-table */
+static mm_camera_ops_t mm_camera_ops = {
+ .sync = mm_camera_intf_sync,
+ .is_event_supported = mm_camera_intf_is_event_supported,
+ .register_event_notify = mm_camera_intf_register_event_notify,
+ .qbuf = mm_camera_intf_qbuf,
+ .camera_close = mm_camera_intf_close,
+ .query_2nd_sensor_info = mm_camera_intf_query_2nd_sensor_info,
+ .is_parm_supported = mm_camera_intf_is_parm_supported,
+ .set_parm = mm_camera_intf_set_parm,
+ .get_parm = mm_camera_intf_get_parm,
+ .ch_acquire = mm_camera_intf_add_channel,
+ .ch_release = mm_camera_intf_del_channel,
+ .add_stream = mm_camera_intf_add_stream,
+ .del_stream = mm_camera_intf_del_stream,
+ .config_stream = mm_camera_intf_config_stream,
+ .init_stream_bundle = mm_camera_intf_bundle_streams,
+ .destroy_stream_bundle = mm_camera_intf_destroy_bundle,
+ .start_streams = mm_camera_intf_start_streams,
+ .stop_streams = mm_camera_intf_stop_streams,
+ .async_teardown_streams = mm_camera_intf_async_teardown_streams,
+ .request_super_buf = mm_camera_intf_request_super_buf,
+ .cancel_super_buf_request = mm_camera_intf_cancel_super_buf_request,
+ .start_focus = mm_camera_intf_start_focus,
+ .abort_focus = mm_camera_intf_abort_focus,
+ .prepare_snapshot = mm_camera_intf_prepare_snapshot,
+ .set_stream_parm = mm_camera_intf_set_stream_parm,
+ .get_stream_parm = mm_camera_intf_get_stream_parm
+};
+
+/* open camera. */
+mm_camera_vtbl_t * camera_open(uint8_t camera_idx,
+ mm_camear_mem_vtbl_t *mem_vtbl)
+{
+ int32_t rc = 0;
+ mm_camera_obj_t* cam_obj = NULL;
+
+ CDBG("%s: E camera_idx = %d\n", __func__,camera_idx);
+ if (MSM_MAX_CAMERA_SENSORS <= camera_idx) {
+ CDBG_ERROR("%s: Invalid camera_idx (%d)", __func__, camera_idx);
+ return NULL;
+ }
+
+ pthread_mutex_lock(&g_intf_lock);
+ /* opened already */
+ if(NULL != g_cam_ctrl.cam_obj[camera_idx]) {
+ /* Add reference */
+ g_cam_ctrl.cam_obj[camera_idx]->ref_count++;
+ pthread_mutex_unlock(&g_intf_lock);
+ CDBG("%s: opened alreadyn", __func__);
+ return &cam_obj->vtbl;
+ }
+
+ cam_obj = (mm_camera_obj_t *)malloc(sizeof(mm_camera_obj_t));
+ if(NULL == cam_obj) {
+ pthread_mutex_unlock(&g_intf_lock);
+ CDBG("%s: no mem", __func__);
+ return NULL;
+ }
+
+ /* initialize camera obj */
+ memset(cam_obj, 0, sizeof(mm_camera_obj_t));
+ cam_obj->ctrl_fd = -1;
+ cam_obj->ds_fd = -1;
+ cam_obj->ref_count++;
+ cam_obj->my_hdl = mm_camera_util_generate_handler(camera_idx);
+ cam_obj->vtbl.camera_handle = cam_obj->my_hdl; /* set handler */
+ cam_obj->vtbl.camera_info = &g_cam_ctrl.camera[camera_idx];
+ cam_obj->vtbl.ops = &mm_camera_ops;
+ cam_obj->mem_vtbl = mem_vtbl; /* save mem_vtbl */
+ pthread_mutex_init(&cam_obj->cam_lock, NULL);
+
+ rc = mm_camera_open(cam_obj);
+ if(rc != 0) {
+ CDBG_ERROR("%s: mm_camera_open err = %d", __func__, rc);
+ pthread_mutex_destroy(&cam_obj->cam_lock);
+ g_cam_ctrl.cam_obj[camera_idx] = NULL;
+ free(cam_obj);
+ cam_obj = NULL;
+ pthread_mutex_unlock(&g_intf_lock);
+ return NULL;
+ }else{
+ CDBG("%s: Open succeded\n", __func__);
+ g_cam_ctrl.cam_obj[camera_idx] = cam_obj;
+ pthread_mutex_unlock(&g_intf_lock);
+ return &cam_obj->vtbl;
+ }
+}
diff --git a/camera/QCamera/stack/mm-camera-interface/src/mm_camera_sock.c b/camera/QCamera/stack/mm-camera-interface/src/mm_camera_sock.c
new file mode 100644
index 0000000..be3ab02
--- /dev/null
+++ b/camera/QCamera/stack/mm-camera-interface/src/mm_camera_sock.c
@@ -0,0 +1,224 @@
+/*
+Copyright (c) 2012, The Linux Foundation. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following
+ disclaimer in the documentation and/or other materials provided
+ with the distribution.
+ * Neither the name of The Linux Foundation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+
+#include <stdio.h>
+#include <unistd.h>
+#include <stdlib.h>
+#include <errno.h>
+#include <string.h>
+#include <sys/socket.h>
+#include <sys/uio.h>
+#include <linux/un.h>
+
+#include "mm_camera_dbg.h"
+#include "mm_camera_sock.h"
+
+/*===========================================================================
+ * FUNCTION - mm_camera_socket_create -
+ *
+ * DESCRIPTION: opens a domain socket tied to camera ID and socket type
+ * int cam_id: camera ID
+ * mm_camera_sock_type_t sock_type: socket type, TCP/UDP
+ * retured fd related to the domain socket
+ *==========================================================================*/
+int mm_camera_socket_create(int cam_id, mm_camera_sock_type_t sock_type)
+{
+ int socket_fd;
+ struct sockaddr_un sock_addr;
+ int sktype;
+ int rc;
+
+ switch (sock_type)
+ {
+ case MM_CAMERA_SOCK_TYPE_UDP:
+ sktype = SOCK_DGRAM;
+ break;
+ case MM_CAMERA_SOCK_TYPE_TCP:
+ sktype = SOCK_STREAM;
+ break;
+ default:
+ CDBG_ERROR("%s: unknown socket type =%d", __func__, sock_type);
+ return -1;
+ }
+ socket_fd = socket(AF_UNIX, sktype, 0);
+ if (socket_fd < 0) {
+ CDBG_ERROR("%s: error create socket fd =%d", __func__, socket_fd);
+ return socket_fd;
+ }
+
+ memset(&sock_addr, 0, sizeof(sock_addr));
+ sock_addr.sun_family = AF_UNIX;
+ snprintf(sock_addr.sun_path, UNIX_PATH_MAX, "/data/cam_socket%d", cam_id);
+ if((rc = connect(socket_fd, (struct sockaddr *) &sock_addr,
+ sizeof(sock_addr))) != 0) {
+ close(socket_fd);
+ socket_fd = -1;
+ CDBG_ERROR("%s: socket_fd=%d %s ", __func__, socket_fd, strerror(errno));
+ }
+
+ CDBG("%s: socket_fd=%d %s", __func__, socket_fd, sock_addr.sun_path);
+ return socket_fd;
+}
+
+/*===========================================================================
+ * FUNCTION - mm_camera_socket_close -
+ *
+ * DESCRIPTION: close domain socket by its fd
+ *==========================================================================*/
+void mm_camera_socket_close(int fd)
+{
+ if (fd > 0) {
+ close(fd);
+ }
+}
+
+/*===========================================================================
+ * FUNCTION - mm_camera_socket_sendmsg -
+ *
+ * DESCRIPTION: send msg through domain socket
+ * int fd: socket fd
+ * mm_camera_sock_msg_packet_t *msg: pointer to msg to be sent over domain socket
+ * int sendfd: file descriptors to be sent
+ * return the total bytes of sent msg
+ *==========================================================================*/
+int mm_camera_socket_sendmsg(
+ int fd,
+ void *msg,
+ uint32_t buf_size,
+ int sendfd)
+{
+ struct msghdr msgh;
+ struct iovec iov[1];
+ struct cmsghdr * cmsghp = NULL;
+ char control[CMSG_SPACE(sizeof(int))];
+
+ if (msg == NULL) {
+ CDBG("%s: msg is NULL", __func__);
+ return -1;
+ }
+ memset(&msgh, 0, sizeof(msgh));
+ msgh.msg_name = NULL;
+ msgh.msg_namelen = 0;
+
+ iov[0].iov_base = msg;
+ iov[0].iov_len = buf_size;
+ msgh.msg_iov = iov;
+ msgh.msg_iovlen = 1;
+ CDBG("%s: iov_len=%d", __func__, iov[0].iov_len);
+
+ msgh.msg_control = NULL;
+ msgh.msg_controllen = 0;
+
+ /* if sendfd is vlaid, we need to pass it through control msg */
+ if( sendfd > 0) {
+ msgh.msg_control = control;
+ msgh.msg_controllen = sizeof(control);
+ cmsghp = CMSG_FIRSTHDR(&msgh);
+ if (cmsghp != NULL) {
+ CDBG("%s: Got ctrl msg pointer", __func__);
+ cmsghp->cmsg_level = SOL_SOCKET;
+ cmsghp->cmsg_type = SCM_RIGHTS;
+ cmsghp->cmsg_len = CMSG_LEN(sizeof(int));
+ *((int *)CMSG_DATA(cmsghp)) = sendfd;
+ CDBG("%s: cmsg data=%d", __func__, *((int *) CMSG_DATA(cmsghp)));
+ } else {
+ CDBG("%s: ctrl msg NULL", __func__);
+ return -1;
+ }
+ }
+
+ return sendmsg(fd, &(msgh), 0);
+}
+
+/*===========================================================================
+ * FUNCTION - mm_camera_socket_recvmsg -
+ *
+ * DESCRIPTION: receive msg from domain socket.
+ * int fd: socket fd
+ * void *msg: pointer to mm_camera_sock_msg_packet_t to hold incoming msg,
+ * need be allocated by the caller
+ * uint32_t buf_size: the size of the buf that holds incoming msg
+ * int *rcvdfd: pointer to hold recvd file descriptor if not NULL.
+ * return the total bytes of received msg
+ *==========================================================================*/
+int mm_camera_socket_recvmsg(
+ int fd,
+ void *msg,
+ uint32_t buf_size,
+ int *rcvdfd)
+{
+ struct msghdr msgh;
+ struct iovec iov[1];
+ struct cmsghdr *cmsghp = NULL;
+ char control[CMSG_SPACE(sizeof(int))];
+ int rcvd_fd = -1;
+ int rcvd_len = 0;
+
+ if ( (msg == NULL) || (buf_size <= 0) ) {
+ CDBG_ERROR(" %s: msg buf is NULL", __func__);
+ return -1;
+ }
+
+ memset(&msgh, 0, sizeof(msgh));
+ msgh.msg_name = NULL;
+ msgh.msg_namelen = 0;
+ msgh.msg_control = control;
+ msgh.msg_controllen = sizeof(control);
+
+ iov[0].iov_base = msg;
+ iov[0].iov_len = buf_size;
+ msgh.msg_iov = iov;
+ msgh.msg_iovlen = 1;
+
+ if ( (rcvd_len = recvmsg(fd, &(msgh), 0)) <= 0) {
+ CDBG_ERROR(" %s: recvmsg failed", __func__);
+ return rcvd_len;
+ }
+
+ CDBG("%s: msg_ctrl %p len %d", __func__, msgh.msg_control, msgh.msg_controllen);
+
+ if( ((cmsghp = CMSG_FIRSTHDR(&msgh)) != NULL) &&
+ (cmsghp->cmsg_len == CMSG_LEN(sizeof(int))) ) {
+ if (cmsghp->cmsg_level == SOL_SOCKET &&
+ cmsghp->cmsg_type == SCM_RIGHTS) {
+ CDBG("%s: CtrlMsg is valid", __func__);
+ rcvd_fd = *((int *) CMSG_DATA(cmsghp));
+ CDBG("%s: Receieved fd=%d", __func__, rcvd_fd);
+ } else {
+ CDBG_ERROR("%s: Unexpected Control Msg. Line=%d", __func__, __LINE__);
+ }
+ }
+
+ if (rcvdfd) {
+ *rcvdfd = rcvd_fd;
+ }
+
+ return rcvd_len;
+}
+
diff --git a/camera/QCamera/stack/mm-camera-interface/src/mm_camera_stream.c b/camera/QCamera/stack/mm-camera-interface/src/mm_camera_stream.c
new file mode 100644
index 0000000..6698e61
--- /dev/null
+++ b/camera/QCamera/stack/mm-camera-interface/src/mm_camera_stream.c
@@ -0,0 +1,1563 @@
+/*
+Copyright (c) 2011-2012, The Linux Foundation. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following
+ disclaimer in the documentation and/or other materials provided
+ with the distribution.
+ * Neither the name of The Linux Foundation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+
+#include <pthread.h>
+#include <errno.h>
+#include <sys/ioctl.h>
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <fcntl.h>
+#include <poll.h>
+#include <time.h>
+#include <semaphore.h>
+
+#include "mm_camera_dbg.h"
+#include "mm_camera_interface.h"
+#include "mm_camera.h"
+
+#define MM_CAMERA_MAX_NUM_FRAMES 16
+
+/* internal function decalre */
+int32_t mm_stream_qbuf(mm_stream_t *my_obj,
+ mm_camera_buf_def_t *buf);
+int32_t mm_stream_set_ext_mode(mm_stream_t * my_obj);
+int32_t mm_stream_set_fmt(mm_stream_t * my_obj);
+int32_t mm_stream_get_offset(mm_stream_t *my_obj);
+int32_t mm_stream_set_cid(mm_stream_t *my_obj,stream_cid_t *in_value);
+int32_t mm_stream_init_bufs(mm_stream_t * my_obj);
+int32_t mm_stream_deinit_bufs(mm_stream_t * my_obj);
+int32_t mm_stream_request_buf(mm_stream_t * my_obj);
+int32_t mm_stream_unreg_buf(mm_stream_t * my_obj);
+int32_t mm_stream_release(mm_stream_t *my_obj);
+int32_t mm_stream_get_crop(mm_stream_t *my_obj,
+ mm_camera_rect_t *crop);
+int32_t mm_stream_get_cid(mm_stream_t *my_obj,
+ stream_cid_t *out_value);
+int32_t mm_stream_set_parm_acquire(mm_stream_t *my_obj,
+ void *value);
+int32_t mm_stream_get_parm_acquire(mm_stream_t *my_obj,
+ void *value);
+int32_t mm_stream_set_parm_config(mm_stream_t *my_obj,
+ void *value);
+int32_t mm_stream_get_parm_config(mm_stream_t *my_obj,
+ void *value);
+int32_t mm_stream_set_parm_start(mm_stream_t *my_obj,
+ void *value);
+int32_t mm_stream_get_parm_start(mm_stream_t *my_obj,
+ void *value);
+int32_t mm_stream_streamon(mm_stream_t *my_obj);
+int32_t mm_stream_streamoff(mm_stream_t *my_obj);
+int32_t mm_stream_read_msm_frame(mm_stream_t * my_obj,
+ mm_camera_buf_info_t* buf_info);
+int32_t mm_stream_config(mm_stream_t *my_obj,
+ mm_camera_stream_config_t *config);
+uint8_t mm_stream_need_stream_on(mm_stream_t *my_obj);
+int32_t mm_stream_reg_buf(mm_stream_t * my_obj);
+int32_t mm_stream_buf_done(mm_stream_t * my_obj,
+ mm_camera_buf_def_t *frame);
+
+
+/* state machine function declare */
+int32_t mm_stream_fsm_inited(mm_stream_t * my_obj,
+ mm_stream_evt_type_t evt,
+ void * in_val,
+ void * out_val);
+int32_t mm_stream_fsm_acquired(mm_stream_t * my_obj,
+ mm_stream_evt_type_t evt,
+ void * in_val,
+ void * out_val);
+int32_t mm_stream_fsm_cfg(mm_stream_t * my_obj,
+ mm_stream_evt_type_t evt,
+ void * in_val,
+ void * out_val);
+int32_t mm_stream_fsm_buffed(mm_stream_t * my_obj,
+ mm_stream_evt_type_t evt,
+ void * in_val,
+ void * out_val);
+int32_t mm_stream_fsm_reg(mm_stream_t * my_obj,
+ mm_stream_evt_type_t evt,
+ void * in_val,
+ void * out_val);
+int32_t mm_stream_fsm_active_stream_on(mm_stream_t * my_obj,
+ mm_stream_evt_type_t evt,
+ void * in_val,
+ void * out_val);
+int32_t mm_stream_fsm_active_stream_off(mm_stream_t * my_obj,
+ mm_stream_evt_type_t evt,
+ void * in_val,
+ void * out_val);
+
+extern int32_t mm_camera_send_native_ctrl_cmd(mm_camera_obj_t * my_obj,
+ cam_ctrl_type type,
+ uint32_t length,
+ void *value);
+
+static int get_stream_inst_handle(mm_stream_t *my_obj)
+{
+ int rc = 0;
+ uint32_t inst_handle;
+ struct msm_camera_v4l2_ioctl_t v4l2_ioctl;
+
+ v4l2_ioctl.id = MSM_V4L2_PID_INST_HANDLE;
+ v4l2_ioctl.ioctl_ptr = &inst_handle;
+ v4l2_ioctl.len = sizeof(inst_handle);
+ rc = ioctl(my_obj->fd, MSM_CAM_V4L2_IOCTL_PRIVATE_G_CTRL, &v4l2_ioctl);
+ if (rc) {
+ CDBG_ERROR("%s Error getting mctl pp inst handle", __func__);
+ return rc;
+ }
+
+ my_obj->inst_hdl = inst_handle;
+ CDBG("%s: inst handle = %x rc = %d\n", __func__,
+ my_obj->inst_hdl, rc);
+ return rc;
+}
+
+void mm_stream_handle_rcvd_buf(mm_stream_t *my_obj,
+ mm_camera_buf_info_t *buf_info)
+{
+ int32_t i;
+ uint8_t has_cb = 0;
+
+ /* enqueue to super buf thread */
+ if (my_obj->is_bundled) {
+ mm_camera_cmdcb_t* node = NULL;
+
+ /* send sem_post to wake up channel cmd thread to enqueue to super buffer */
+ node = (mm_camera_cmdcb_t *)malloc(sizeof(mm_camera_cmdcb_t));
+ if (NULL != node) {
+ memset(node, 0, sizeof(mm_camera_cmdcb_t));
+ node->cmd_type = MM_CAMERA_CMD_TYPE_DATA_CB;
+ memcpy(&node->u.buf, buf_info, sizeof(mm_camera_buf_info_t));
+
+ /* enqueue to cmd thread */
+ mm_camera_queue_enq(&(my_obj->ch_obj->cmd_thread.cmd_queue), node);
+
+ /* wake up cmd thread */
+ sem_post(&(my_obj->ch_obj->cmd_thread.cmd_sem));
+ } else {
+ CDBG_ERROR("%s: No memory for mm_camera_node_t", __func__);
+ }
+ }
+
+ /* check if has CB */
+ for (i=0 ; i< MM_CAMERA_STREAM_BUF_CB_MAX; i++) {
+ if(NULL != my_obj->buf_cb[i].cb) {
+ has_cb = 1;
+ }
+ }
+ if(has_cb) {
+ mm_camera_cmdcb_t* node = NULL;
+
+ /* send sem_post to wake up cmd thread to dispatch dataCB */
+ node = (mm_camera_cmdcb_t *)malloc(sizeof(mm_camera_cmdcb_t));
+ if (NULL != node) {
+ memset(node, 0, sizeof(mm_camera_cmdcb_t));
+ node->cmd_type = MM_CAMERA_CMD_TYPE_DATA_CB;
+ memcpy(&node->u.buf, buf_info, sizeof(mm_camera_buf_info_t));
+
+ /* enqueue to cmd thread */
+ mm_camera_queue_enq(&(my_obj->cmd_thread.cmd_queue), node);
+
+ /* wake up cmd thread */
+ sem_post(&(my_obj->cmd_thread.cmd_sem));
+ } else {
+ CDBG_ERROR("%s: No memory for mm_camera_node_t", __func__);
+ }
+ }
+}
+
+static void mm_stream_data_notify(void* user_data)
+{
+ mm_stream_t *my_obj = (mm_stream_t*)user_data;
+ int32_t idx = -1, i, rc;
+ uint8_t has_cb = 0;
+ mm_camera_buf_info_t buf_info;
+
+ if (NULL == my_obj) {
+ return;
+ }
+
+ if (MM_STREAM_STATE_ACTIVE_STREAM_ON != my_obj->state) {
+ /* this Cb will only received in active_stream_on state
+ * if not so, return here */
+ CDBG_ERROR("%s: ERROR!! Wrong state (%d) to receive data notify!",
+ __func__, my_obj->state);
+ return;
+ }
+
+ memset(&buf_info, 0, sizeof(mm_camera_buf_info_t));
+
+ pthread_mutex_lock(&my_obj->buf_lock);
+ rc = mm_stream_read_msm_frame(my_obj, &buf_info);
+ if (rc != 0) {
+ pthread_mutex_unlock(&my_obj->buf_lock);
+ return;
+ }
+ idx = buf_info.buf->buf_idx;
+
+ /* update buffer location */
+ my_obj->buf_status[idx].in_kernel = 0;
+
+ /* update buf ref count */
+ if (my_obj->is_bundled) {
+ /* need to add into super buf since bundled, add ref count */
+ my_obj->buf_status[idx].buf_refcnt++;
+ }
+
+ for (i=0; i < MM_CAMERA_STREAM_BUF_CB_MAX; i++) {
+ if(NULL != my_obj->buf_cb[i].cb) {
+ /* for every CB, add ref count */
+ my_obj->buf_status[idx].buf_refcnt++;
+ has_cb = 1;
+ }
+ }
+ pthread_mutex_unlock(&my_obj->buf_lock);
+
+ mm_stream_handle_rcvd_buf(my_obj, &buf_info);
+}
+
+/* special function for dataCB registered at other stream */
+static void mm_stream_buf_notify(mm_camera_super_buf_t *super_buf,
+ void *user_data)
+{
+ mm_stream_t * my_obj = (mm_stream_t*)user_data;
+ mm_camera_buf_info_t buf_info;
+ int8_t i;
+ mm_camera_buf_def_t *buf = super_buf->bufs[0];
+
+ CDBG("%s : E",__func__);
+ if (my_obj == NULL) {
+ return;
+ }
+
+ if (MM_STREAM_STATE_ACTIVE_STREAM_OFF != my_obj->state) {
+ /* this CB will only received in active_stream_off state
+ * if not so, return here */
+ return;
+ }
+
+ /* 1) copy buf into local buf */
+ if (my_obj->buf_num <= 0) {
+ CDBG_ERROR("%s: Local buf is not allocated yet", __func__);
+ return;
+ }
+
+ my_obj->buf[0].buf_idx = 0;
+ my_obj->buf[0].stream_id = my_obj->my_hdl;
+ my_obj->buf[0].frame_idx = buf->frame_idx;
+
+ memcpy(&my_obj->buf[0].ts, &buf->ts, sizeof(buf->ts));
+
+ memcpy(&my_obj->buf[0].planes,&buf->planes,buf->num_planes * sizeof(struct v4l2_plane));
+
+ /* set flag to indicate buf be to sent out is from local */
+ my_obj->is_local_buf = 1;
+
+ /* 2) buf_done the buf from other stream */
+ mm_channel_qbuf(my_obj->ch_obj, buf);
+
+ /* 3) handle received buf */
+ memset(&buf_info, 0, sizeof(mm_camera_buf_info_t));
+ buf_info.frame_idx =my_obj->buf[0].frame_idx;
+ buf_info.buf = &my_obj->buf[0];
+ buf_info.stream_id = my_obj->my_hdl;
+ mm_stream_handle_rcvd_buf(my_obj, &buf_info);
+}
+
+static void mm_stream_dispatch_app_data(mm_camera_cmdcb_t *cmd_cb,
+ void* user_data)
+{
+ int i;
+ mm_stream_t * my_obj = (mm_stream_t *)user_data;
+ mm_camera_buf_info_t* buf_info = NULL;
+ mm_camera_super_buf_t super_buf;
+
+ if (NULL == my_obj) {
+ return;
+ }
+
+ if (MM_CAMERA_CMD_TYPE_DATA_CB != cmd_cb->cmd_type) {
+ CDBG_ERROR("%s: Wrong cmd_type (%d) for dataCB",
+ __func__, cmd_cb->cmd_type);
+ return;
+ }
+
+ buf_info = &cmd_cb->u.buf;
+ memset(&super_buf, 0, sizeof(mm_camera_super_buf_t));
+ super_buf.num_bufs = 1;
+ super_buf.bufs[0] = buf_info->buf;
+ super_buf.camera_handle = my_obj->ch_obj->cam_obj->my_hdl;
+ super_buf.ch_id = my_obj->ch_obj->my_hdl;
+
+
+ pthread_mutex_lock(&my_obj->cb_lock);
+ for(i = 0; i < MM_CAMERA_STREAM_BUF_CB_MAX; i++) {
+ if(NULL != my_obj->buf_cb[i].cb) {
+ if (my_obj->buf_cb[i].cb_count != 0) {
+ /* if <0, means infinite CB
+ * if >0, means CB for certain times
+ * both case we need to call CB */
+ my_obj->buf_cb[i].cb(&super_buf,
+ my_obj->buf_cb[i].user_data);
+ }
+
+ /* if >0, reduce count by 1 every time we called CB until reaches 0
+ * when count reach 0, reset the buf_cb to have no CB */
+ if (my_obj->buf_cb[i].cb_count > 0) {
+ my_obj->buf_cb[i].cb_count--;
+ if (0 == my_obj->buf_cb[i].cb_count) {
+ my_obj->buf_cb[i].cb = NULL;
+ my_obj->buf_cb[i].user_data = NULL;
+ }
+ }
+ }
+ }
+ pthread_mutex_unlock(&my_obj->cb_lock);
+}
+
+/* state machine entry */
+int32_t mm_stream_fsm_fn(mm_stream_t *my_obj,
+ mm_stream_evt_type_t evt,
+ void * in_val,
+ void * out_val)
+{
+ int32_t rc = -1;
+
+ CDBG("%s : E - evt = %d, my_obj->state = %d",__func__,evt,my_obj->state);
+ switch (my_obj->state) {
+ case MM_STREAM_STATE_NOTUSED:
+ CDBG("%s: Not handling evt in unused state", __func__);
+ break;
+ case MM_STREAM_STATE_INITED:
+ rc = mm_stream_fsm_inited(my_obj, evt, in_val, out_val);
+ break;
+ case MM_STREAM_STATE_ACQUIRED:
+ rc = mm_stream_fsm_acquired(my_obj, evt, in_val, out_val);
+ break;
+ case MM_STREAM_STATE_CFG:
+ rc = mm_stream_fsm_cfg(my_obj, evt, in_val, out_val);
+ break;
+ case MM_STREAM_STATE_BUFFED:
+ rc = mm_stream_fsm_buffed(my_obj, evt, in_val, out_val);
+ break;
+ case MM_STREAM_STATE_REG:
+ rc = mm_stream_fsm_reg(my_obj, evt, in_val, out_val);
+ break;
+ case MM_STREAM_STATE_ACTIVE_STREAM_ON:
+ rc = mm_stream_fsm_active_stream_on(my_obj, evt, in_val, out_val);
+ break;
+ case MM_STREAM_STATE_ACTIVE_STREAM_OFF:
+ rc = mm_stream_fsm_active_stream_off(my_obj, evt, in_val, out_val);
+ break;
+ default:
+ CDBG("%s: Not a valid state (%d)", __func__, my_obj->state);
+ break;
+ }
+ CDBG("%s : X rc =%d",__func__,rc);
+ return rc;
+}
+
+int32_t mm_stream_fsm_inited(mm_stream_t *my_obj,
+ mm_stream_evt_type_t evt,
+ void * in_val,
+ void * out_val)
+{
+ int32_t rc = 0;
+ char dev_name[MM_CAMERA_DEV_NAME_LEN];
+
+ CDBG("%s :E evt = %d",__func__,evt);
+ switch(evt) {
+ case MM_STREAM_EVT_ACQUIRE:
+ if ((NULL == my_obj->ch_obj) || (NULL == my_obj->ch_obj->cam_obj)) {
+ CDBG_ERROR("%s: NULL channel or camera obj\n", __func__);
+ rc = -1;
+ break;
+ }
+
+ snprintf(dev_name, sizeof(dev_name), "/dev/%s",
+ mm_camera_util_get_dev_name(my_obj->ch_obj->cam_obj->my_hdl));
+
+ my_obj->fd = open(dev_name, O_RDWR | O_NONBLOCK);
+ if (my_obj->fd <= 0) {
+ CDBG_ERROR("%s: open dev returned %d\n", __func__, my_obj->fd);
+ rc = -1;
+ break;
+ }
+ CDBG("%s: open dev fd = %d, ext_image_mode = %d, sensor_idx = %d\n",
+ __func__, my_obj->fd, my_obj->ext_image_mode, my_obj->sensor_idx);
+ rc = mm_stream_set_ext_mode(my_obj);
+ if (0 == rc) {
+ my_obj->state = MM_STREAM_STATE_ACQUIRED;
+ } else {
+ /* failed setting ext_mode
+ * close fd */
+ if(my_obj->fd > 0) {
+ close(my_obj->fd);
+ my_obj->fd = -1;
+ }
+ break;
+ }
+ rc = get_stream_inst_handle(my_obj);
+ if(rc) {
+ if(my_obj->fd > 0) {
+ close(my_obj->fd);
+ my_obj->fd = -1;
+ }
+ }
+ break;
+ default:
+ CDBG_ERROR("%s: Invalid evt=%d, stream_state=%d",
+ __func__,evt,my_obj->state);
+ rc = -1;
+ break;
+ }
+ return rc;
+}
+
+int32_t mm_stream_fsm_acquired(mm_stream_t *my_obj,
+ mm_stream_evt_type_t evt,
+ void * in_val,
+ void * out_val)
+{
+ int32_t rc = 0;
+
+ CDBG("%s :E evt = %d",__func__,evt);
+ switch(evt) {
+ case MM_STREAM_EVT_SET_FMT:
+ {
+ mm_camera_stream_config_t *config =
+ (mm_camera_stream_config_t *)in_val;
+
+ rc = mm_stream_config(my_obj, config);
+
+ /* change state to configed */
+ my_obj->state = MM_STREAM_STATE_CFG;
+
+ break;
+ }
+ case MM_STREAM_EVT_RELEASE:
+ rc = mm_stream_release(my_obj);
+ /* change state to not used */
+ my_obj->state = MM_STREAM_STATE_NOTUSED;
+ break;
+ case MM_STREAM_EVT_SET_PARM:
+ rc = mm_stream_set_parm_acquire(my_obj, in_val);
+ break;
+ case MM_STREAM_EVT_GET_PARM:
+ rc = mm_stream_get_parm_acquire(my_obj,out_val);
+ break;
+ default:
+ CDBG_ERROR("%s: Invalid evt=%d, stream_state=%d",
+ __func__, evt, my_obj->state);
+ rc = -1;
+ }
+ CDBG("%s :X rc = %d",__func__,rc);
+ return rc;
+}
+
+int32_t mm_stream_fsm_cfg(mm_stream_t * my_obj,
+ mm_stream_evt_type_t evt,
+ void * in_val,
+ void * out_val)
+{
+ int32_t rc = 0;
+ CDBG("%s :E evt = %d",__func__,evt);
+ switch(evt) {
+ case MM_STREAM_EVT_SET_FMT:
+ {
+ mm_camera_stream_config_t *config =
+ (mm_camera_stream_config_t *)in_val;
+
+ rc = mm_stream_config(my_obj, config);
+
+ /* change state to configed */
+ my_obj->state = MM_STREAM_STATE_CFG;
+
+ break;
+ }
+ case MM_STREAM_EVT_RELEASE:
+ rc = mm_stream_release(my_obj);
+ my_obj->state = MM_STREAM_STATE_NOTUSED;
+ break;
+ case MM_STREAM_EVT_SET_PARM:
+ rc = mm_stream_set_parm_config(my_obj, in_val);
+ break;
+ case MM_STREAM_EVT_GET_PARM:
+ rc = mm_stream_get_parm_config(my_obj, out_val);
+ break;
+ case MM_STREAM_EVT_GET_BUF:
+ rc = mm_stream_init_bufs(my_obj);
+ /* change state to buff allocated */
+ if(0 == rc) {
+ my_obj->state = MM_STREAM_STATE_BUFFED;
+ }
+ break;
+ default:
+ CDBG_ERROR("%s: Invalid evt=%d, stream_state=%d",
+ __func__, evt, my_obj->state);
+ rc = -1;
+ }
+ CDBG("%s :X rc = %d",__func__,rc);
+ return rc;
+}
+
+int32_t mm_stream_fsm_buffed(mm_stream_t * my_obj,
+ mm_stream_evt_type_t evt,
+ void * in_val,
+ void * out_val)
+{
+ int32_t rc = 0;
+ CDBG("%s :E evt = %d",__func__,evt);
+ switch(evt) {
+ case MM_STREAM_EVT_PUT_BUF:
+ rc = mm_stream_deinit_bufs(my_obj);
+ /* change state to configed */
+ if(0 == rc) {
+ my_obj->state = MM_STREAM_STATE_CFG;
+ }
+ break;
+ case MM_STREAM_EVT_REG_BUF:
+ rc = mm_stream_reg_buf(my_obj);
+ /* change state to regged */
+ if(0 == rc) {
+ my_obj->state = MM_STREAM_STATE_REG;
+ }
+ break;
+ case MM_STREAM_EVT_SET_PARM:
+ rc = mm_stream_set_parm_config(my_obj, in_val);
+ break;
+ case MM_STREAM_EVT_GET_PARM:
+ rc = mm_stream_get_parm_config(my_obj, out_val);
+ break;
+ default:
+ CDBG_ERROR("%s: Invalid evt=%d, stream_state=%d",
+ __func__, evt, my_obj->state);
+ rc = -1;
+ }
+ CDBG("%s :X rc = %d",__func__,rc);
+ return rc;
+}
+
+int32_t mm_stream_fsm_reg(mm_stream_t * my_obj,
+ mm_stream_evt_type_t evt,
+ void * in_val,
+ void * out_val)
+{
+ int32_t rc = 0;
+ CDBG("%s :E evt = %d",__func__,evt);
+
+ switch(evt) {
+ case MM_STREAM_EVT_UNREG_BUF:
+ rc = mm_stream_unreg_buf(my_obj);
+
+ /* change state to buffed */
+ my_obj->state = MM_STREAM_STATE_BUFFED;
+ break;
+ case MM_STREAM_EVT_START:
+ {
+ /* launch cmd thread if CB is not null */
+ if (NULL != my_obj->buf_cb) {
+ mm_camera_cmd_thread_launch(&my_obj->cmd_thread,
+ mm_stream_dispatch_app_data,
+ (void *)my_obj);
+
+ }
+
+ if(mm_stream_need_stream_on(my_obj)) {
+ rc = mm_stream_streamon(my_obj);
+ if (0 != rc) {
+ /* failed stream on, need to release cmd thread if it's launched */
+ if (NULL != my_obj->buf_cb) {
+ mm_camera_cmd_thread_release(&my_obj->cmd_thread);
+
+ }
+ break;
+ }
+ my_obj->state = MM_STREAM_STATE_ACTIVE_STREAM_ON;
+ } else {
+ /* register one time CB at video fd */
+ CDBG("%s : Video Size snapshot Enabled",__func__);
+ mm_stream_data_cb_t cb;
+ memset(&cb, 0, sizeof(mm_stream_data_cb_t));
+ cb.cb_count = 1; /* one time reigstration */
+ cb.user_data = (void*)my_obj;
+ cb.cb = mm_stream_buf_notify;
+ rc = mm_channel_reg_stream_cb(my_obj->ch_obj, &cb,
+ MSM_V4L2_EXT_CAPTURE_MODE_VIDEO,
+ my_obj->sensor_idx);
+ my_obj->state = MM_STREAM_STATE_ACTIVE_STREAM_OFF;
+ }
+ }
+ break;
+ case MM_STREAM_EVT_SET_PARM:
+ rc = mm_stream_set_parm_config(my_obj, in_val);
+ break;
+ case MM_STREAM_EVT_GET_PARM:
+ rc = mm_stream_get_parm_config(my_obj, out_val);
+ break;
+ default:
+ CDBG_ERROR("%s: Invalid evt=%d, stream_state=%d",
+ __func__, evt, my_obj->state);
+ rc = -1;
+ }
+ CDBG("%s :X rc = %d",__func__,rc);
+ return rc;
+}
+
+int32_t mm_stream_fsm_active_stream_on(mm_stream_t * my_obj,
+ mm_stream_evt_type_t evt,
+ void * in_val,
+ void * out_val)
+{
+ int32_t rc = 0;
+ CDBG("%s :E evt = %d",__func__,evt);
+ switch(evt) {
+ case MM_STREAM_EVT_QBUF:
+ rc = mm_stream_buf_done(my_obj, (mm_camera_buf_def_t *)in_val);
+ break;
+ case MM_STREAM_EVT_STOP:
+ {
+ rc = mm_stream_streamoff(my_obj);
+ if (NULL != my_obj->buf_cb) {
+ mm_camera_cmd_thread_release(&my_obj->cmd_thread);
+
+ }
+ my_obj->state = MM_STREAM_STATE_REG;
+ }
+ break;
+ case MM_STREAM_EVT_SET_PARM:
+ rc = mm_stream_set_parm_start(my_obj, in_val);
+ break;
+ case MM_STREAM_EVT_GET_PARM:
+ rc = mm_stream_get_parm_start(my_obj, out_val);
+ break;
+ default:
+ CDBG_ERROR("%s: Invalid evt=%d, stream_state=%d",
+ __func__, evt, my_obj->state);
+ return -1;
+ }
+ CDBG("%s :X rc = %d",__func__,rc);
+ return rc;
+}
+
+int32_t mm_stream_fsm_active_stream_off(mm_stream_t * my_obj,
+ mm_stream_evt_type_t evt,
+ void * in_val,
+ void * out_val)
+{
+ int32_t rc = 0;
+ CDBG("%s :E evt = %d",__func__,evt);
+ switch(evt) {
+ case MM_STREAM_EVT_QBUF:
+ rc = mm_stream_buf_done(my_obj, (mm_camera_buf_def_t *)in_val);
+ break;
+ case MM_STREAM_EVT_STOP:
+ {
+ if (NULL != my_obj->buf_cb) {
+ rc = mm_camera_cmd_thread_release(&my_obj->cmd_thread);
+
+ }
+ my_obj->state = MM_STREAM_STATE_REG;
+ }
+ break;
+ case MM_STREAM_EVT_SET_PARM:
+ rc = mm_stream_set_parm_config(my_obj, in_val);
+ break;
+ case MM_STREAM_EVT_GET_PARM:
+ rc = mm_stream_get_parm_config(my_obj, out_val);
+ break;
+ default:
+ CDBG_ERROR("%s: Invalid evt=%d, stream_state=%d",
+ __func__, evt, my_obj->state);
+ return -1;
+ }
+ CDBG("%s :X rc = %d",__func__,rc);
+ return rc;
+}
+
+int32_t mm_stream_config(mm_stream_t *my_obj,
+ mm_camera_stream_config_t *config)
+{
+ int32_t rc = 0;
+ memcpy(&my_obj->fmt, &config->fmt, sizeof(mm_camera_image_fmt_t));
+ my_obj->hal_requested_num_bufs = config->num_of_bufs;
+ my_obj->need_stream_on = config->need_stream_on;
+
+ rc = mm_stream_get_offset(my_obj);
+ if(rc != 0) {
+ CDBG_ERROR("%s: Error in offset query",__func__);
+ return rc;
+ }
+ if(mm_stream_need_stream_on(my_obj)) {
+ /* only send fmt to backend if we need streamon */
+ rc = mm_stream_set_fmt(my_obj);
+ }
+ return rc;
+}
+
+int32_t mm_stream_release(mm_stream_t *my_obj)
+{
+ int32_t rc;
+
+ /* close fd */
+ if(my_obj->fd > 0)
+ {
+ close(my_obj->fd);
+ }
+
+ /* destroy mutex */
+ pthread_mutex_destroy(&my_obj->buf_lock);
+ pthread_mutex_destroy(&my_obj->cb_lock);
+
+ /* reset stream obj */
+ memset(my_obj, 0, sizeof(mm_stream_t));
+ my_obj->fd = -1;
+
+ return 0;
+}
+
+uint8_t mm_stream_need_stream_on(mm_stream_t *my_obj)
+{
+ return my_obj->need_stream_on;
+}
+
+int32_t mm_stream_streamon(mm_stream_t *my_obj)
+{
+ int32_t rc;
+ enum v4l2_buf_type buf_type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
+
+ CDBG("%s :E",__func__);
+ /* Add fd to data poll thread */
+ rc = mm_camera_poll_thread_add_poll_fd(&my_obj->ch_obj->poll_thread[0],
+ my_obj->my_hdl,
+ my_obj->fd,
+ mm_stream_data_notify,
+ (void*)my_obj);
+ if (rc < 0) {
+ return rc;
+ }
+ rc = ioctl(my_obj->fd, VIDIOC_STREAMON, &buf_type);
+ if (rc < 0) {
+ CDBG_ERROR("%s: ioctl VIDIOC_STREAMON failed: rc=%d\n",
+ __func__, rc);
+ /* remove fd from data poll thread in case of failure */
+ mm_camera_poll_thread_del_poll_fd(&my_obj->ch_obj->poll_thread[0], my_obj->my_hdl);
+ }
+ CDBG("%s :X rc = %d",__func__,rc);
+ return rc;
+}
+
+int32_t mm_stream_streamoff(mm_stream_t *my_obj)
+{
+ int32_t rc;
+ enum v4l2_buf_type buf_type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
+
+ /* step1: remove fd from data poll thread */
+ mm_camera_poll_thread_del_poll_fd(&my_obj->ch_obj->poll_thread[0], my_obj->my_hdl);
+
+ /* step2: stream off */
+ rc = ioctl(my_obj->fd, VIDIOC_STREAMOFF, &buf_type);
+ if (rc < 0) {
+ CDBG_ERROR("%s: STREAMOFF failed: %s\n",
+ __func__, strerror(errno));
+ }
+ CDBG("%s :X rc = %d",__func__,rc);
+ return rc;
+}
+
+static uint32_t mm_stream_util_get_v4l2_fmt(cam_format_t fmt,
+ uint8_t *num_planes)
+{
+ uint32_t val;
+ switch(fmt) {
+ case CAMERA_YUV_420_NV12:
+ val = V4L2_PIX_FMT_NV12;
+ *num_planes = 2;
+ break;
+ case CAMERA_YUV_420_NV21:
+ val = V4L2_PIX_FMT_NV21;
+ *num_planes = 2;
+ break;
+ case CAMERA_BAYER_SBGGR10:
+ case CAMERA_RDI:
+ val= V4L2_PIX_FMT_SBGGR10;
+ *num_planes = 1;
+ break;
+ case CAMERA_YUV_422_NV61:
+ val= V4L2_PIX_FMT_NV61;
+ *num_planes = 2;
+ break;
+ default:
+ val = 0;
+ *num_planes = 0;
+ break;
+ }
+ return val;
+}
+
+int32_t mm_stream_read_msm_frame(mm_stream_t * my_obj,
+ mm_camera_buf_info_t* buf_info)
+{
+ int32_t idx = -1, rc = 0;
+ struct v4l2_buffer vb;
+ struct v4l2_plane planes[VIDEO_MAX_PLANES];
+ uint32_t i = 0;
+ uint8_t num_planes = 0;
+
+ mm_stream_util_get_v4l2_fmt(my_obj->fmt.fmt,
+ &num_planes);
+
+ memset(&vb, 0, sizeof(vb));
+ vb.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
+ vb.memory = V4L2_MEMORY_USERPTR;
+ vb.m.planes = &planes[0];
+ vb.length = num_planes;
+
+ rc = ioctl(my_obj->fd, VIDIOC_DQBUF, &vb);
+ if (rc < 0) {
+ CDBG_ERROR("%s: VIDIOC_DQBUF ioctl call failed (rc=%d)\n",
+ __func__, rc);
+ } else {
+ int8_t idx = vb.index;
+ buf_info->buf = &my_obj->buf[idx];
+ buf_info->frame_idx = vb.sequence;
+ buf_info->stream_id = my_obj->my_hdl;
+ buf_info->need_pp = my_obj->is_pp_needed;
+
+ buf_info->buf->stream_id = my_obj->my_hdl;
+ buf_info->buf->buf_idx = idx;
+ buf_info->buf->frame_idx = vb.sequence;
+ buf_info->buf->ts.tv_sec = vb.timestamp.tv_sec;
+ buf_info->buf->ts.tv_nsec = vb.timestamp.tv_usec * 1000;
+
+ for(i = 0; i < vb.length; i++) {
+ CDBG("%s plane %d addr offset: %d data offset:%d\n",
+ __func__, i, vb.m.planes[i].reserved[0],
+ vb.m.planes[i].data_offset);
+ buf_info->buf->planes[i].reserved[0] =
+ vb.m.planes[i].reserved[0];
+ buf_info->buf->planes[i].data_offset =
+ vb.m.planes[i].data_offset;
+ }
+
+
+ }
+ CDBG("%s :X rc = %d",__func__,rc);
+ return rc;
+}
+
+int32_t mm_stream_get_crop(mm_stream_t *my_obj,
+ mm_camera_rect_t *crop)
+{
+ struct v4l2_crop crop_info;
+ int32_t rc = 0;
+
+ memset(&crop_info, 0, sizeof(crop_info));
+ crop_info.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
+ rc = ioctl(my_obj->fd, VIDIOC_G_CROP, &crop_info);
+ if (0 == rc) {
+ crop->left = crop_info.c.left;
+ crop->top = crop_info.c.top;
+ crop->width = crop_info.c.width;
+ crop->height = crop_info.c.height;
+ }
+ CDBG("%s :X rc = %d",__func__,rc);
+ return rc;
+}
+
+int32_t mm_stream_set_parm_acquire(mm_stream_t *my_obj,
+ void *in_value)
+{
+ int32_t rc = 0;
+ mm_evt_paylod_stream_parm_t *payload = (mm_evt_paylod_stream_parm_t *)in_value;
+ mm_camera_stream_parm_t parm_type = payload->parm_type;
+
+ CDBG("%s: parm_type = %d",__func__,(int)parm_type);
+ switch(parm_type) {
+ case MM_CAMERA_STREAM_CID:{
+ stream_cid_t *value = (stream_cid_t *)in_value;
+ mm_stream_set_cid(my_obj,value);
+ break;
+ }
+ default:
+ CDBG_ERROR("%s : Parm -%d set is not supported here",__func__,(int)parm_type);
+ break;
+ }
+ return rc;
+}
+int32_t mm_stream_get_parm_acquire(mm_stream_t *my_obj,
+ void *out_value)
+{
+ int32_t rc = 0;
+ mm_evt_paylod_stream_parm_t *payload = (mm_evt_paylod_stream_parm_t *)out_value;
+ mm_camera_stream_parm_t parm_type = payload->parm_type;
+
+ CDBG("%s: parm_type = %d",__func__,(int)parm_type);
+ switch(parm_type) {
+ case MM_CAMERA_STREAM_CID:{
+ stream_cid_t *value = (stream_cid_t *)out_value;
+ rc = mm_stream_get_cid(my_obj,value);
+ break;
+ }
+ case MM_CAMERA_STREAM_CROP:{
+ mm_camera_rect_t *crop = (mm_camera_rect_t *)out_value;
+ rc = mm_stream_get_crop(my_obj, crop);
+ break;
+ }
+ default:
+ CDBG_ERROR("%s : Parm -%d get is not supported here",__func__,(int)parm_type);
+ break;
+ }
+ return rc;
+}
+
+int32_t mm_stream_set_parm_config(mm_stream_t *my_obj,
+ void *in_value)
+{
+ int32_t rc = 0;
+ mm_evt_paylod_stream_parm_t *payload = (mm_evt_paylod_stream_parm_t *)in_value;
+ mm_camera_stream_parm_t parm_type = payload->parm_type;
+ void *value = payload->value;
+
+ CDBG("%s: parm_type = %d",__func__,(int)parm_type);
+ switch(parm_type) {
+ default:
+ CDBG_ERROR("%s : Parm -%d set is not supported here",__func__,(int)parm_type);
+ break;
+ }
+ return rc;
+}
+int32_t mm_stream_get_parm_config(mm_stream_t *my_obj,
+ void *out_value)
+{
+ int32_t rc = 0;
+ mm_evt_paylod_stream_parm_t *payload = (mm_evt_paylod_stream_parm_t *)out_value;
+
+ if(payload == NULL) {
+ CDBG_ERROR("%s : Invalid Argument",__func__);
+ return -1;
+ }
+ CDBG("%s: parm_type = %d",__func__,(int)payload->parm_type);
+ switch(payload->parm_type) {
+ case MM_CAMERA_STREAM_OFFSET:
+ memcpy(payload->value,(void *)&my_obj->frame_offset,sizeof(mm_camera_frame_len_offset));
+ break;
+ case MM_CAMERA_STREAM_CROP:{
+ mm_camera_rect_t *crop = (mm_camera_rect_t *)payload->value;
+ rc = mm_stream_get_crop(my_obj, crop);
+ break;
+ }
+ default:
+ CDBG_ERROR("%s : Parm -%d get is not supported here",__func__,(int)payload->parm_type);
+ break;
+ }
+ return rc;
+}
+
+int32_t mm_stream_set_parm_start(mm_stream_t *my_obj,
+ void *in_value)
+{
+ int32_t rc = 0;
+ mm_evt_paylod_stream_parm_t *payload = (mm_evt_paylod_stream_parm_t *)in_value;
+ mm_camera_stream_parm_t parm_type = payload->parm_type;
+ void *value = payload->value;
+
+ CDBG("%s: parm_type = %d",__func__,(int)parm_type);
+ switch(parm_type) {
+ default:
+ CDBG_ERROR("%s : Parm -%d set is not supported here",__func__,(int)parm_type);
+ break;
+ }
+ return rc;
+}
+int32_t mm_stream_get_parm_start(mm_stream_t *my_obj,
+ void *out_value)
+{
+ int32_t rc = 0;
+ mm_evt_paylod_stream_parm_t *payload = (mm_evt_paylod_stream_parm_t *)out_value;
+
+ if(payload == NULL) {
+ CDBG_ERROR("%s : Invalid Argument",__func__);
+ return -1;
+ }
+ CDBG("%s: parm_type = %d",__func__,(int)payload->parm_type);
+ switch(payload->parm_type) {
+ case MM_CAMERA_STREAM_OFFSET:
+ memcpy(payload->value,(void *)&my_obj->frame_offset,sizeof(mm_camera_frame_len_offset));
+ break;
+ case MM_CAMERA_STREAM_CROP:{
+ mm_camera_rect_t *crop = (mm_camera_rect_t *)payload->value;
+ rc = mm_stream_get_crop(my_obj, crop);
+ break;
+ }
+ default:
+ CDBG_ERROR("%s : Parm -%d get is not supported here",__func__,(int)payload->parm_type);
+ break;
+ }
+ return rc;
+}
+int32_t mm_stream_set_ext_mode(mm_stream_t * my_obj)
+{
+ int32_t rc = 0;
+ struct v4l2_streamparm s_parm;
+
+ s_parm.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
+ s_parm.parm.capture.extendedmode = my_obj->ext_image_mode;
+
+ rc = ioctl(my_obj->fd, VIDIOC_S_PARM, &s_parm);
+ CDBG("%s:stream fd=%d, rc=%d, extended_mode=%d\n",
+ __func__, my_obj->fd, rc,
+ s_parm.parm.capture.extendedmode);
+ return rc;
+}
+
+int32_t mm_stream_qbuf(mm_stream_t *my_obj, mm_camera_buf_def_t *buf)
+{
+ int32_t i, rc = 0;
+ int *ret;
+ struct v4l2_buffer buffer;
+
+ memset(&buffer, 0, sizeof(buffer));
+ buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
+ buffer.memory = V4L2_MEMORY_USERPTR;
+ buffer.index = buf->buf_idx;
+ buffer.m.planes = &buf->planes[0];
+ buffer.length = buf->num_planes;
+
+ CDBG("%s:stream_hdl=%d,fd=%d,frame idx=%d,num_planes = %d\n", __func__,
+ buf->stream_id, buf->fd, buffer.index, buffer.length);
+
+ rc = ioctl(my_obj->fd, VIDIOC_QBUF, &buffer);
+ CDBG("%s: qbuf idx:%d, rc:%d", __func__, buffer.index, rc);
+ return rc;
+}
+
+/* This function let kernel know amount of buffers will be registered */
+int32_t mm_stream_request_buf(mm_stream_t * my_obj)
+{
+ int32_t rc = 0;
+ uint8_t i,reg = 0;
+ struct v4l2_requestbuffers bufreq;
+ uint8_t buf_num = my_obj->buf_num;
+
+ if(buf_num > MM_CAMERA_MAX_NUM_FRAMES) {
+ CDBG_ERROR("%s: buf num %d > max limit %d\n",
+ __func__, buf_num, MM_CAMERA_MAX_NUM_FRAMES);
+ return -1;
+ }
+ pthread_mutex_lock(&my_obj->buf_lock);
+ for(i = 0; i < buf_num; i++){
+ if (my_obj->buf_status[i].initial_reg_flag){
+ reg = 1;
+ break;
+ }
+ }
+ pthread_mutex_unlock(&my_obj->buf_lock);
+ if(!reg) {
+ //No need to register a buffer
+ CDBG_ERROR("No Need to register this buffer");
+ return rc;
+ }
+ memset(&bufreq, 0, sizeof(bufreq));
+ bufreq.count = buf_num;
+ bufreq.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
+ bufreq.memory = V4L2_MEMORY_USERPTR;
+ rc = ioctl(my_obj->fd, VIDIOC_REQBUFS, &bufreq);
+ if (rc < 0) {
+ CDBG_ERROR("%s: fd=%d, ioctl VIDIOC_REQBUFS failed: rc=%d\n",
+ __func__, my_obj->fd, rc);
+ }
+ CDBG("%s :X rc = %d",__func__,rc);
+ return rc;
+}
+
+int32_t mm_stream_get_frame_len_offset(mm_camera_image_fmt_t* img_fmt,
+ camera_mode_t mode,
+ int image_type,
+ mm_camera_frame_len_offset * frame_offset)
+{
+ /* TODO : this function should query the frame len from backend using cmd */
+ /* for now, it's still use hardcoded value */
+ uint32_t width, height;
+ int local_height;
+
+ width = img_fmt->width;
+ height = img_fmt->height;
+
+ switch (img_fmt->fmt) {
+ case CAMERA_YUV_420_NV12:
+ case CAMERA_YUV_420_NV21:
+ frame_offset->num_planes = 2;
+ if (image_type == OUTPUT_TYPE_V) {
+ frame_offset->mp[0].len = PAD_TO_2K(width * height);
+ frame_offset->mp[1].len = PAD_TO_2K(width * height/2);
+ /* TODO: offset to count in meta header*/
+ frame_offset->mp[0].offset = 0;
+ frame_offset->mp[1].offset = 0;
+ } else if (image_type == OUTPUT_TYPE_P) {
+ frame_offset->mp[0].len = PAD_TO_WORD(width * height);
+ frame_offset->mp[1].len = PAD_TO_WORD(width * height/2);
+ /* TODO: offset to count in meta header*/
+ frame_offset->mp[0].offset = 0;
+ frame_offset->mp[1].offset = 0;
+ } else {
+ frame_offset->mp[0].len = PAD_TO_WORD(width * CEILING16(height));
+ frame_offset->mp[1].len = PAD_TO_WORD(width * CEILING16(height)/2);
+ /* TODO: offset to count in meta header*/
+ frame_offset->mp[0].offset = 0;
+ frame_offset->mp[1].offset = 0;
+ }
+ frame_offset->frame_len =
+ PAD_TO_4K(frame_offset->mp[0].len + frame_offset->mp[1].len);
+ break;
+ case CAMERA_BAYER_SBGGR10:
+ frame_offset->num_planes = 1;
+ frame_offset->mp[0].len = PAD_TO_WORD(width * height);
+ frame_offset->frame_len = frame_offset->mp[0].len;
+ break;
+ case CAMERA_YUV_422_NV16:
+ case CAMERA_YUV_422_NV61:
+ if( image_type == OUTPUT_TYPE_S || image_type == OUTPUT_TYPE_V) {
+ local_height = CEILING16(height);
+ } else {
+ local_height = height;
+ }
+ frame_offset->num_planes = 2;
+ frame_offset->mp[0].len = PAD_TO_WORD(width * height);
+ frame_offset->mp[1].len = PAD_TO_WORD(width * height);
+ /* TODO: offset to count in meta header*/
+ frame_offset->mp[0].offset = 0;
+ frame_offset->mp[1].offset = 0;
+ frame_offset->frame_len =
+ frame_offset->mp[0].len + frame_offset->mp[1].len;
+ break;
+ default:
+ CDBG("%s: format %d not supported.\n",
+ __func__, img_fmt->fmt);
+ frame_offset->frame_len = 0;
+ }
+ CDBG("%s:fmt=%d,image_type=%d,width=%d,height=%d,frame_len=%d\n",
+ __func__, img_fmt->fmt, image_type, width, height, frame_offset->frame_len);
+ return 0;
+}
+
+int32_t mm_stream_init_bufs(mm_stream_t * my_obj)
+{
+ int32_t i, rc = 0, j;
+ int image_type;
+ mm_camear_mem_vtbl_t *mem_vtbl = NULL;
+ mm_camera_frame_len_offset frame_offset;
+ uint8_t *reg_flags = NULL;
+
+ /* deinit buf if it's not NULL*/
+ if (NULL != my_obj->buf) {
+ mm_stream_deinit_bufs(my_obj);
+ }
+
+ my_obj->buf_num = my_obj->hal_requested_num_bufs;
+ /* if stream needs do pp, allocate extra one buf for pp*/
+ if (my_obj->is_pp_needed) {
+ my_obj->buf_num++;
+ }
+
+ my_obj->buf =
+ (mm_camera_buf_def_t*)malloc(sizeof(mm_camera_buf_def_t) * my_obj->buf_num);
+ my_obj->buf_status =
+ (mm_stream_buf_status_t*)malloc(sizeof(mm_stream_buf_status_t) * my_obj->buf_num);
+ reg_flags = (uint8_t *)malloc(sizeof(uint8_t) * my_obj->buf_num);
+
+ if (NULL == my_obj->buf ||
+ NULL == my_obj->buf_status ||
+ NULL == reg_flags) {
+ CDBG_ERROR("%s: No memory for buf", __func__);
+ rc = -1;
+ goto error_malloc;
+ }
+
+ memset(my_obj->buf, 0, sizeof(mm_camera_buf_def_t) * my_obj->buf_num);
+ memset(my_obj->buf_status, 0, sizeof(mm_stream_buf_status_t) * my_obj->buf_num);
+ memset(reg_flags, 0, sizeof(uint8_t) * my_obj->buf_num);
+
+ mem_vtbl = my_obj->ch_obj->cam_obj->mem_vtbl;
+ rc = mem_vtbl->get_buf(my_obj->ch_obj->cam_obj->my_hdl,
+ my_obj->ch_obj->my_hdl,
+ my_obj->my_hdl,
+ mem_vtbl->user_data,
+ &my_obj->frame_offset,
+ my_obj->buf_num,
+ reg_flags,
+ my_obj->buf);
+
+ if (0 != rc) {
+ CDBG_ERROR("%s: Error get buf, rc = %d\n", __func__, rc);
+ goto error_malloc;
+ }
+
+ for (i=0; i < my_obj->buf_num; i++) {
+ my_obj->buf_status[i].initial_reg_flag = reg_flags[i];
+ my_obj->buf[i].stream_id = my_obj->my_hdl;
+ }
+
+ free(reg_flags);
+ reg_flags = NULL;
+
+ for (i=0; i < my_obj->buf_num; i++) {
+ if (my_obj->buf[i].fd > 0) {
+ if(0 >= (rc = mm_camera_map_buf(my_obj->ch_obj->cam_obj,
+ my_obj->ext_image_mode,
+ i,
+ my_obj->buf[i].fd,
+ my_obj->buf[i].frame_len)))
+ {
+ CDBG_ERROR("%s: Error mapping buf (rc = %d)", __func__, rc);
+ goto error_map;
+ }
+ } else {
+ CDBG_ERROR("%s: Invalid fd for buf idx (%d)", __func__, i);
+ }
+ }
+
+ return 0;
+
+error_map:
+ /* error, unmapping previously mapped bufs */
+ for (j=0; j<i; j++) {
+ if (my_obj->buf[j].fd > 0) {
+ mm_camera_unmap_buf(my_obj->ch_obj->cam_obj,
+ my_obj->ext_image_mode,
+ j);
+ }
+ }
+
+ /* put buf back */
+ mem_vtbl->put_buf(my_obj->ch_obj->cam_obj->my_hdl,
+ my_obj->ch_obj->my_hdl,
+ my_obj->my_hdl,
+ mem_vtbl->user_data,
+ my_obj->buf_num,
+ my_obj->buf);
+
+error_malloc:
+ if (NULL != my_obj->buf) {
+ free(my_obj->buf);
+ my_obj->buf = NULL;
+ }
+ if (NULL != my_obj->buf_status) {
+ free(my_obj->buf_status);
+ my_obj->buf_status = NULL;
+ }
+ if (NULL != reg_flags) {
+ free(reg_flags);
+ reg_flags = NULL;
+ }
+
+ return rc;
+}
+
+/* return buffers to surface or release buffers allocated */
+int32_t mm_stream_deinit_bufs(mm_stream_t * my_obj)
+{
+ int32_t rc = 0, i;
+ mm_camear_mem_vtbl_t *mem_vtbl = NULL;
+
+ if (NULL == my_obj->buf) {
+ CDBG("%s: Buf is NULL, no need to deinit", __func__);
+ return rc;
+ }
+
+ /* IOMMU unmapping */
+ for (i=0; i<my_obj->buf_num; i++) {
+ if (my_obj->buf[i].fd > 0) {
+ rc = mm_camera_unmap_buf(my_obj->ch_obj->cam_obj,
+ my_obj->ext_image_mode,
+ i);
+ if (rc < 0 ) {
+ CDBG_ERROR("%s: Error unmapping bufs at idx(%d) rc=%d",
+ __func__, i, rc);
+ }
+ }
+ }
+
+ /* release bufs */
+ mem_vtbl = my_obj->ch_obj->cam_obj->mem_vtbl;
+ if (NULL != mem_vtbl) {
+ rc = mem_vtbl->put_buf(my_obj->ch_obj->cam_obj->my_hdl,
+ my_obj->ch_obj->my_hdl,
+ my_obj->my_hdl,
+ mem_vtbl->user_data,
+ my_obj->buf_num,
+ my_obj->buf);
+ } else {
+ CDBG_ERROR("%s: mem table is NULL, cannot release buf", __func__);
+ rc = -1;
+ }
+
+ free(my_obj->buf);
+ my_obj->buf = NULL;
+ free(my_obj->buf_status);
+ my_obj->buf_status = NULL;
+
+ return rc;
+}
+
+int32_t mm_stream_reg_buf(mm_stream_t * my_obj)
+{
+ int32_t rc = 0;
+ uint8_t i;
+
+ rc = mm_stream_request_buf(my_obj);
+ if (rc != 0) {
+ return rc;
+ }
+
+ pthread_mutex_lock(&my_obj->buf_lock);
+ for(i = 0; i < my_obj->buf_num; i++){
+ my_obj->buf[i].buf_idx = i;
+
+ /* check if need to qbuf initially */
+ if (my_obj->buf_status[i].initial_reg_flag) {
+ rc = mm_stream_qbuf(my_obj, &my_obj->buf[i]);
+ if (rc != 0) {
+ CDBG_ERROR("%s: VIDIOC_QBUF rc = %d\n", __func__, rc);
+ return rc;
+ }
+ }
+
+ my_obj->buf_status[i].buf_refcnt = 0;
+ my_obj->buf_status[i].in_kernel = 1;
+ }
+ pthread_mutex_unlock(&my_obj->buf_lock);
+ return rc;
+}
+
+int32_t mm_stream_unreg_buf(mm_stream_t * my_obj)
+{
+ struct v4l2_requestbuffers bufreq;
+ int32_t i, rc = 0,reg = 0;
+
+ pthread_mutex_lock(&my_obj->buf_lock);
+ if (NULL != my_obj->buf_status) {
+ for(i = 0; i < my_obj->buf_num; i++){
+ if (my_obj->buf_status[i].initial_reg_flag){
+ reg = 1;
+ break;
+ }
+ }
+ }
+ pthread_mutex_unlock(&my_obj->buf_lock);
+ if(!reg) {
+ //No need to unregister a buffer
+ goto end;
+ }
+ bufreq.count = 0;
+ bufreq.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
+ bufreq.memory = V4L2_MEMORY_USERPTR;
+ rc = ioctl(my_obj->fd, VIDIOC_REQBUFS, &bufreq);
+ if (rc < 0) {
+ CDBG_ERROR("%s: fd=%d, VIDIOC_REQBUFS failed, rc=%d\n",
+ __func__, my_obj->fd, rc);
+ }
+
+end:
+ /* reset buf reference count */
+ pthread_mutex_lock(&my_obj->buf_lock);
+ if (NULL != my_obj->buf_status) {
+ for(i = 0; i < my_obj->buf_num; i++){
+ my_obj->buf_status[i].buf_refcnt = 0;
+ my_obj->buf_status[i].in_kernel = 0;
+ }
+ }
+ pthread_mutex_unlock(&my_obj->buf_lock);
+
+ return rc;
+}
+
+int32_t mm_stream_set_fmt(mm_stream_t *my_obj)
+{
+ int32_t rc = 0;
+ struct v4l2_format fmt;
+
+ if(my_obj->fmt.width == 0 || my_obj->fmt.height == 0) {
+ CDBG_ERROR("%s:invalid input[w=%d,h=%d,fmt=%d]\n",
+ __func__, my_obj->fmt.width, my_obj->fmt.height, my_obj->fmt.fmt);
+ return -1;
+ }
+
+ memset(&fmt, 0, sizeof(fmt));
+ fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
+ fmt.fmt.pix_mp.width = my_obj->fmt.width;
+ fmt.fmt.pix_mp.height= my_obj->fmt.height;
+ fmt.fmt.pix_mp.field = V4L2_FIELD_NONE;
+ fmt.fmt.pix_mp.pixelformat =
+ mm_stream_util_get_v4l2_fmt(my_obj->fmt.fmt,
+ &(fmt.fmt.pix_mp.num_planes));
+ rc = ioctl(my_obj->fd, VIDIOC_S_FMT, &fmt);
+ CDBG("%s:fd=%d, ext_image_mode=%d, rc=%d\n",
+ __func__, my_obj->fd, my_obj->ext_image_mode, rc);
+
+ return rc;
+}
+
+int32_t mm_stream_get_offset(mm_stream_t *my_obj)
+{
+ int32_t rc = 0;
+ cam_frame_resolution_t frame_offset;
+
+ memset(&my_obj->frame_offset,0,sizeof(mm_camera_frame_len_offset));
+
+ frame_offset.format = my_obj->fmt.fmt;
+ frame_offset.image_mode = my_obj->ext_image_mode;
+ frame_offset.rotation = my_obj->fmt.rotation;
+ frame_offset.width = my_obj->fmt.width;
+ frame_offset.height = my_obj->fmt.height;
+
+ switch (my_obj->ext_image_mode) {
+ case MSM_V4L2_EXT_CAPTURE_MODE_PREVIEW:
+ case MSM_V4L2_EXT_CAPTURE_MODE_MAIN:
+ case MSM_V4L2_EXT_CAPTURE_MODE_RAW:
+ case MSM_V4L2_EXT_CAPTURE_MODE_THUMBNAIL:
+ case MSM_V4L2_EXT_CAPTURE_MODE_RDI:
+ frame_offset.padding_format = CAMERA_PAD_TO_WORD;
+ break;
+ case MSM_V4L2_EXT_CAPTURE_MODE_VIDEO:
+ default:
+ frame_offset.padding_format = CAMERA_PAD_TO_2K;
+ break;
+ }
+
+ CDBG("%s: format = %d, image_mode = %d, padding_format = %d, rotation = %d,"
+ "width = %d height = %d",
+ __func__,frame_offset.format,frame_offset.image_mode,frame_offset.padding_format,
+ frame_offset.rotation,frame_offset.width,frame_offset.height);
+
+ rc = mm_camera_send_native_ctrl_cmd(my_obj->ch_obj->cam_obj,
+ CAMERA_GET_PARM_FRAME_RESOLUTION,
+ sizeof(cam_frame_resolution_t),
+ &frame_offset);
+ if(rc != 0) {
+ CDBG_ERROR("%s: Failed to get the stream offset and frame length",__func__);
+ return rc;
+ }
+ my_obj->fmt.width = frame_offset.width;
+ my_obj->fmt.height = frame_offset.height;
+ memcpy(&my_obj->frame_offset,&frame_offset.frame_offset,sizeof(mm_camera_frame_len_offset));
+ CDBG("%s: Frame length = %d width = %d, height = %d, rc = %d",
+ __func__,my_obj->frame_offset.frame_len,my_obj->fmt.width,my_obj->fmt.height,rc);
+ return rc;
+}
+
+
+int32_t mm_stream_set_cid(mm_stream_t *my_obj,stream_cid_t *value)
+{
+ int32_t rc = 0;
+ cam_cid_info_t cam_cid_info;
+
+ cam_cid_info.num_cids = 1;
+ cam_cid_info.cid_entries[0].cid = value->cid;
+ cam_cid_info.cid_entries[0].dt = value->dt;
+ cam_cid_info.cid_entries[0].inst_handle = my_obj->inst_hdl;
+
+ rc = mm_camera_send_native_ctrl_cmd(my_obj->ch_obj->cam_obj,
+ CAMERA_SET_PARM_CID,
+ sizeof(cam_cid_info_t),
+ &cam_cid_info);
+ if(rc != 0) {
+ CDBG_ERROR("%s: Failed to set the CID",__func__);
+ return rc;
+ }
+ return rc;
+}
+
+int32_t mm_stream_get_cid(mm_stream_t *my_obj,stream_cid_t *out_value)
+{
+ //TODO: Need to use sensor structure init in camera query
+ int32_t rc = 0;
+ return rc;
+}
+
+int32_t mm_stream_buf_done(mm_stream_t * my_obj,
+ mm_camera_buf_def_t *frame)
+{
+ int32_t rc = 0;
+
+ if (my_obj->is_local_buf) {
+ /* special case for video-sized live snapshot
+ * buf is local, no need to qbuf to kernel */
+ return 0;
+ }
+
+ pthread_mutex_lock(&my_obj->buf_lock);
+
+ if(my_obj->buf_status[frame->buf_idx].buf_refcnt == 0) {
+ CDBG("%s: Error Trying to free second time?(idx=%d) count=%d, ext_image_mode=%d\n",
+ __func__, frame->buf_idx,
+ my_obj->buf_status[frame->buf_idx].buf_refcnt,
+ my_obj->ext_image_mode);
+ rc = -1;
+ }else{
+ my_obj->buf_status[frame->buf_idx].buf_refcnt--;
+ if (0 == my_obj->buf_status[frame->buf_idx].buf_refcnt) {
+ CDBG("<DEBUG> : Buf done for buffer:%d:%d", my_obj->ext_image_mode, frame->buf_idx);
+ rc = mm_stream_qbuf(my_obj, frame);
+ if(rc < 0) {
+ CDBG_ERROR("%s: mm_camera_stream_qbuf(idx=%d) err=%d\n",
+ __func__, frame->buf_idx, rc);
+ } else {
+ my_obj->buf_status[frame->buf_idx].in_kernel = 1;
+ }
+ }else{
+ CDBG("<DEBUG> : Still ref count pending count :%d",
+ my_obj->buf_status[frame->buf_idx].buf_refcnt);
+ CDBG("<DEBUG> : for buffer:%p:%d, ext_image_mode=%d",
+ my_obj, frame->buf_idx, my_obj->ext_image_mode);
+ }
+ }
+ pthread_mutex_unlock(&my_obj->buf_lock);
+ return rc;
+}
+
+int32_t mm_stream_reg_buf_cb(mm_stream_t *my_obj,
+ mm_stream_data_cb_t *val)
+{
+ int32_t rc = -1;
+ uint8_t i;
+
+ pthread_mutex_lock(&my_obj->cb_lock);
+ for (i=0 ;i < MM_CAMERA_STREAM_BUF_CB_MAX; i++) {
+ if(NULL == my_obj->buf_cb[i].cb) {
+ memcpy(&my_obj->buf_cb[i], val, sizeof(mm_stream_data_cb_t));
+ rc = 0;
+ break;
+ }
+ }
+ pthread_mutex_unlock(&my_obj->cb_lock);
+
+ return rc;
+}
diff --git a/camera/QCamera/stack/mm-camera-interface/src/mm_camera_thread.c b/camera/QCamera/stack/mm-camera-interface/src/mm_camera_thread.c
new file mode 100644
index 0000000..ce69b82
--- /dev/null
+++ b/camera/QCamera/stack/mm-camera-interface/src/mm_camera_thread.c
@@ -0,0 +1,362 @@
+/*
+Copyright (c) 2012, The Linux Foundation. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following
+ disclaimer in the documentation and/or other materials provided
+ with the distribution.
+ * Neither the name of The Linux Foundation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+
+#include <pthread.h>
+#include <errno.h>
+#include <stdbool.h>
+#include <sys/ioctl.h>
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <fcntl.h>
+#include <poll.h>
+#include <semaphore.h>
+
+#include "mm_camera_dbg.h"
+#include "mm_camera_interface.h"
+#include "mm_camera.h"
+
+typedef enum {
+ /* poll entries updated */
+ MM_CAMERA_PIPE_CMD_POLL_ENTRIES_UPDATED,
+ /* exit */
+ MM_CAMERA_PIPE_CMD_EXIT,
+ /* max count */
+ MM_CAMERA_PIPE_CMD_MAX
+} mm_camera_pipe_cmd_type_t;
+
+typedef enum {
+ MM_CAMERA_POLL_TASK_STATE_STOPPED,
+ MM_CAMERA_POLL_TASK_STATE_POLL, /* polling pid in polling state. */
+ MM_CAMERA_POLL_TASK_STATE_MAX
+} mm_camera_poll_task_state_type_t;
+
+typedef struct {
+ uint8_t cmd;
+ mm_camera_event_t event;
+} mm_camera_sig_evt_t;
+
+static int32_t mm_camera_poll_sig(mm_camera_poll_thread_t *poll_cb,
+ uint32_t cmd)
+{
+ /* send through pipe */
+ /* get the mutex */
+ mm_camera_sig_evt_t cmd_evt;
+ int len;
+
+ CDBG("%s: E cmd = %d", __func__,cmd);
+ memset(&cmd_evt, 0, sizeof(cmd_evt));
+ cmd_evt.cmd = cmd;
+ pthread_mutex_lock(&poll_cb->mutex);
+ /* reset the statue to false */
+ poll_cb->status = false;
+ /* send cmd to worker */
+
+ len = write(poll_cb->pfds[1], &cmd_evt, sizeof(cmd_evt));
+ if(len < 1) {
+ CDBG_ERROR("%s: len = %d, errno = %d", __func__, len, errno);
+ }
+ CDBG("%s: begin IN mutex write done, len = %d", __func__, len);
+ /* wait till worker task gives positive signal */
+ if (false == poll_cb->status) {
+ CDBG("%s: wait", __func__);
+ pthread_cond_wait(&poll_cb->cond_v, &poll_cb->mutex);
+ }
+ /* done */
+ pthread_mutex_unlock(&poll_cb->mutex);
+ CDBG("%s: X", __func__);
+ return 0;
+}
+
+static void mm_camera_poll_sig_done(mm_camera_poll_thread_t *poll_cb)
+{
+ pthread_mutex_lock(&poll_cb->mutex);
+ poll_cb->status = true;
+ pthread_cond_signal(&poll_cb->cond_v);
+ CDBG("%s: done, in mutex", __func__);
+ pthread_mutex_unlock(&poll_cb->mutex);
+}
+
+static void mm_camera_poll_set_state(mm_camera_poll_thread_t *poll_cb,
+ mm_camera_poll_task_state_type_t state)
+{
+ poll_cb->state = state;
+}
+
+static void mm_camera_poll_proc_pipe(mm_camera_poll_thread_t *poll_cb)
+{
+ ssize_t read_len;
+ int i;
+ mm_camera_sig_evt_t cmd_evt;
+ read_len = read(poll_cb->pfds[0], &cmd_evt, sizeof(cmd_evt));
+ CDBG("%s: read_fd = %d, read_len = %d, expect_len = %d cmd = %d",
+ __func__, poll_cb->pfds[0], (int)read_len, (int)sizeof(cmd_evt), cmd_evt.cmd);
+ switch (cmd_evt.cmd) {
+ case MM_CAMERA_PIPE_CMD_POLL_ENTRIES_UPDATED:
+ /* we always have index 0 for pipe read */
+ poll_cb->num_fds = 0;
+ poll_cb->poll_fds[poll_cb->num_fds].fd = poll_cb->pfds[0];
+ poll_cb->poll_fds[poll_cb->num_fds].events = POLLIN|POLLRDNORM|POLLPRI;
+ poll_cb->num_fds++;
+
+ if (MM_CAMERA_POLL_TYPE_EVT == poll_cb->poll_type) {
+ if (poll_cb->poll_entries[0].fd > 0) {
+ /* fd is valid, we update poll_fds */
+ poll_cb->poll_fds[poll_cb->num_fds].fd = poll_cb->poll_entries[0].fd;
+ poll_cb->poll_fds[poll_cb->num_fds].events = POLLIN|POLLRDNORM|POLLPRI;
+ poll_cb->num_fds++;
+ }
+ } else if (MM_CAMERA_POLL_TYPE_CH == poll_cb->poll_type) {
+ for(i = 0; i < MM_CAMEAR_STRAEM_NUM_MAX; i++) {
+ if(poll_cb->poll_entries[i].fd > 0) {
+ /* fd is valid, we update poll_fds to this fd */
+ poll_cb->poll_fds[poll_cb->num_fds].fd = poll_cb->poll_entries[i].fd;
+ poll_cb->poll_fds[poll_cb->num_fds].events = POLLIN|POLLRDNORM|POLLPRI;
+ poll_cb->num_fds++;
+ } else {
+ /* fd is invalid, we set the entry to -1 to prevent polling.
+ * According to spec, polling will not poll on entry with fd=-1.
+ * If this is not the case, we need to skip these invalid fds
+ * when updating this array.
+ * We still keep fd=-1 in this array because this makes easier to
+ * map cb associated with this fd once incoming data avail by directly
+ * using the index-1(0 is reserved for pipe read, so need to reduce index by 1) */
+ poll_cb->poll_fds[poll_cb->num_fds].fd = -1;
+ poll_cb->poll_fds[poll_cb->num_fds].events = 0;
+ poll_cb->num_fds++;
+ }
+ }
+ }
+ mm_camera_poll_sig_done(poll_cb);
+ break;
+
+ case MM_CAMERA_PIPE_CMD_EXIT:
+ default:
+ mm_camera_poll_set_state(poll_cb, MM_CAMERA_POLL_TASK_STATE_STOPPED);
+ mm_camera_poll_sig_done(poll_cb);
+ break;
+ }
+}
+
+static void *mm_camera_poll_fn(mm_camera_poll_thread_t *poll_cb)
+{
+ int rc = 0, i;
+
+ CDBG("%s: poll type = %d, num_fd = %d poll_cb = %p\n",
+ __func__, poll_cb->poll_type, poll_cb->num_fds,poll_cb);
+ do {
+ for(i = 0; i < poll_cb->num_fds; i++) {
+ poll_cb->poll_fds[i].events = POLLIN|POLLRDNORM|POLLPRI;
+ }
+
+ rc = poll(poll_cb->poll_fds, poll_cb->num_fds, poll_cb->timeoutms);
+ if(rc > 0) {
+ if ((poll_cb->poll_fds[0].revents & POLLIN) &&
+ (poll_cb->poll_fds[0].revents & POLLRDNORM)) {
+ /* if we have data on pipe, we only process pipe in this iteration */
+ CDBG("%s: cmd received on pipe\n", __func__);
+ mm_camera_poll_proc_pipe(poll_cb);
+ } else {
+ for(i=1; i<poll_cb->num_fds; i++) {
+ /* Checking for ctrl events */
+ if ((poll_cb->poll_type == MM_CAMERA_POLL_TYPE_EVT) &&
+ (poll_cb->poll_fds[i].revents & POLLPRI)) {
+ CDBG("%s: mm_camera_evt_notify\n", __func__);
+ if (NULL != poll_cb->poll_entries[i-1].notify_cb) {
+ poll_cb->poll_entries[i-1].notify_cb(poll_cb->poll_entries[i-1].user_data);
+ }
+ }
+
+ if ((poll_cb->poll_type == MM_CAMERA_POLL_TYPE_CH) &&
+ (poll_cb->poll_fds[i].revents & POLLIN) &&
+ (poll_cb->poll_fds[i].revents & POLLRDNORM)) {
+ CDBG("%s: mm_stream_data_notify\n", __func__);
+ if (NULL != poll_cb->poll_entries[i-1].notify_cb) {
+ poll_cb->poll_entries[i-1].notify_cb(poll_cb->poll_entries[i-1].user_data);
+ }
+ }
+ }
+ }
+ } else {
+ /* in error case sleep 10 us and then continue. hard coded here */
+ usleep(10);
+ continue;
+ }
+ } while (poll_cb->state == MM_CAMERA_POLL_TASK_STATE_POLL);
+ return NULL;
+}
+
+static void *mm_camera_poll_thread(void *data)
+{
+ int rc = 0;
+ int i;
+ void *ret = NULL;
+ mm_camera_poll_thread_t *poll_cb = (mm_camera_poll_thread_t *)data;
+
+ /* add pipe read fd into poll first */
+ poll_cb->poll_fds[poll_cb->num_fds++].fd = poll_cb->pfds[0];
+
+ //poll_cb->poll_fds[poll_cb->num_fds++].fd = (((mm_camera_obj_t *)poll_cb->my_obj)->ctrl_fd);
+
+ mm_camera_poll_sig_done(poll_cb);
+ mm_camera_poll_set_state(poll_cb, MM_CAMERA_POLL_TASK_STATE_POLL);
+ ret = mm_camera_poll_fn(poll_cb);
+ return ret;
+}
+
+int32_t mm_camera_poll_thread_notify_entries_updated(mm_camera_poll_thread_t * poll_cb)
+{
+ /* send poll entries updated signal to poll thread */
+ return mm_camera_poll_sig(poll_cb, MM_CAMERA_PIPE_CMD_POLL_ENTRIES_UPDATED);
+}
+
+int32_t mm_camera_poll_thread_add_poll_fd(mm_camera_poll_thread_t * poll_cb,
+ uint32_t handler,
+ int32_t fd,
+ mm_camera_poll_notify_t notify_cb,
+ void* userdata)
+{
+ int32_t rc = -1;
+ uint8_t idx = 0;
+
+ if (MM_CAMERA_POLL_TYPE_CH == poll_cb->poll_type) {
+ /* get stream idx from handler if CH type */
+ idx = mm_camera_util_get_index_by_handler(handler);
+ } else {
+ /* for EVT type, only idx=0 is valid */
+ idx = 0;
+ }
+
+ if (MM_CAMEAR_STRAEM_NUM_MAX > idx) {
+ poll_cb->poll_entries[idx].fd = fd;
+ poll_cb->poll_entries[idx].handler = handler;
+ poll_cb->poll_entries[idx].notify_cb = notify_cb;
+ poll_cb->poll_entries[idx].user_data = userdata;
+ /* send poll entries updated signal to poll thread */
+ rc = mm_camera_poll_sig(poll_cb, MM_CAMERA_PIPE_CMD_POLL_ENTRIES_UPDATED);
+ } else {
+ CDBG_ERROR("%s: invalid handler %d (%d)",
+ __func__, handler, idx);
+ }
+ return rc;
+}
+
+int32_t mm_camera_poll_thread_del_poll_fd(mm_camera_poll_thread_t * poll_cb,
+ uint32_t handler)
+{
+ int32_t rc = -1;
+ uint8_t idx = 0;
+
+ if (MM_CAMERA_POLL_TYPE_CH == poll_cb->poll_type) {
+ /* get stream idx from handler if CH type */
+ idx = mm_camera_util_get_index_by_handler(handler);
+ } else {
+ /* for EVT type, only idx=0 is valid */
+ idx = 0;
+ }
+
+ if ((MM_CAMEAR_STRAEM_NUM_MAX > idx) &&
+ (handler == poll_cb->poll_entries[idx].handler)) {
+ /* reset poll entry */
+ poll_cb->poll_entries[idx].fd = -1; /* set fd to invalid */
+ poll_cb->poll_entries[idx].handler = 0;
+ poll_cb->poll_entries[idx].notify_cb = NULL;
+
+ /* send poll entries updated signal to poll thread */
+ rc = mm_camera_poll_sig(poll_cb, MM_CAMERA_PIPE_CMD_POLL_ENTRIES_UPDATED);
+ } else {
+ CDBG_ERROR("%s: invalid handler %d (%d)",
+ __func__, handler, idx);
+ }
+
+ return rc;
+}
+
+int32_t mm_camera_poll_thread_launch(mm_camera_poll_thread_t * poll_cb,
+ mm_camera_poll_thread_type_t poll_type)
+{
+ int32_t rc = 0;
+ poll_cb->poll_type = poll_type;
+
+ poll_cb->pfds[0] = 0;
+ poll_cb->pfds[1] = 0;
+ rc = pipe(poll_cb->pfds);
+ if(rc < 0) {
+ CDBG_ERROR("%s: pipe open rc=%d\n", __func__, rc);
+ return -1;
+ }
+
+ poll_cb->timeoutms = -1; /* Infinite seconds */
+
+ CDBG("%s: poll_type = %d, read fd = %d, write fd = %d timeout = %d",
+ __func__, poll_cb->poll_type,
+ poll_cb->pfds[0], poll_cb->pfds[1],poll_cb->timeoutms);
+
+ pthread_mutex_init(&poll_cb->mutex, NULL);
+ pthread_cond_init(&poll_cb->cond_v, NULL);
+
+ /* launch the thread */
+ pthread_mutex_lock(&poll_cb->mutex);
+ poll_cb->status = 0;
+ pthread_create(&poll_cb->pid, NULL, mm_camera_poll_thread, (void *)poll_cb);
+ if(!poll_cb->status) {
+ pthread_cond_wait(&poll_cb->cond_v, &poll_cb->mutex);
+ }
+ pthread_mutex_unlock(&poll_cb->mutex);
+ CDBG("%s: End",__func__);
+ return rc;
+}
+
+int32_t mm_camera_poll_thread_release(mm_camera_poll_thread_t *poll_cb)
+{
+ int32_t rc = 0;
+ if(MM_CAMERA_POLL_TASK_STATE_STOPPED == poll_cb->state) {
+ CDBG_ERROR("%s: err, poll thread is not running.\n", __func__);
+ return rc;
+ }
+
+ /* send exit signal to poll thread */
+ mm_camera_poll_sig(poll_cb, MM_CAMERA_PIPE_CMD_EXIT);
+ /* wait until poll thread exits */
+ if (pthread_join(poll_cb->pid, NULL) != 0) {
+ CDBG_ERROR("%s: pthread dead already\n", __func__);
+ }
+
+ /* close pipe */
+ if(poll_cb->pfds[0]) {
+ close(poll_cb->pfds[0]);
+ }
+ if(poll_cb->pfds[1]) {
+ close(poll_cb->pfds[1]);
+ }
+
+ pthread_mutex_destroy(&poll_cb->mutex);
+ pthread_cond_destroy(&poll_cb->cond_v);
+ memset(poll_cb, 0, sizeof(mm_camera_poll_thread_t));
+ return rc;
+}
diff --git a/camera/QCamera/stack/mm-camera-test/Android.mk b/camera/QCamera/stack/mm-camera-test/Android.mk
new file mode 100644
index 0000000..543fbf8
--- /dev/null
+++ b/camera/QCamera/stack/mm-camera-test/Android.mk
@@ -0,0 +1,61 @@
+LOCAL_PATH:=$(call my-dir)
+USE_BIONIC_HEADER:=true
+include $(CLEAR_VARS)
+
+ifeq ($(call is-board-platform,msm8960),true)
+LOCAL_CFLAGS:= \
+ -DAMSS_VERSION=$(AMSS_VERSION) \
+ $(mmcamera_debug_defines) \
+ $(mmcamera_debug_cflags) \
+ $(USE_SERVER_TREE) \
+
+ifneq ($(strip $(USE_BIONIC_HEADER)),true)
+LOCAL_CFLAGS += -include $(TARGET_OUT_INTERMEDIATES)/KERNEL_OBJ/usr/include/linux/ion.h
+endif
+
+ifeq ($(strip $(TARGET_USES_ION)),true)
+LOCAL_CFLAGS += -DUSE_ION
+endif
+
+LOCAL_CFLAGS += -D_ANDROID_
+
+LOCAL_SRC_FILES:= \
+ src/mm_qcamera_main_menu.c \
+ src/mm_qcamera_display.c \
+ src/mm_qcamera_app.c \
+ src/mm_qcamera_snapshot.c \
+ src/mm_qcamera_video.c \
+ src/mm_qcamera_preview.c \
+ src/mm_qcamera_rdi.c \
+ src/mm_qcamera_unit_test.c \
+ src/mm_qcamera_dual_test.c
+
+LOCAL_C_INCLUDES:=$(LOCAL_PATH)/inc
+LOCAL_C_INCLUDES+= \
+ $(TARGET_OUT_INTERMEDIATES)/include/mm-camera-interface_badger \
+ $(LOCAL_PATH)/../mm-camera-interface/inc \
+ $(LOCAL_PATH)/../common \
+ $(LOCAL_PATH)/../../../ \
+ $(LOCAL_PATH)/../../../inc
+
+ifneq ($(strip $(USE_BIONIC_HEADER)),true)
+LOCAL_C_INCLUDES+= $(TARGET_OUT_INTERMEDIATES)/KERNEL_OBJ/usr/include/media
+LOCAL_C_INCLUDES+= $(TARGET_OUT_INTERMEDIATES)/KERNEL_OBJ/usr/include
+LOCAL_ADDITIONAL_DEPENDENCIES := $(TARGET_OUT_INTERMEDIATES)/KERNEL_OBJ/usr
+endif
+
+ifeq ($(call is-board-platform,msm8960),true)
+LOCAL_CFLAGS += -DCAMERA_ION_HEAP_ID=ION_CP_MM_HEAP_ID
+else
+LOCAL_CFLAGS += -DCAMERA_ION_HEAP_ID=ION_CAMERA_HEAP_ID
+endif
+
+LOCAL_SHARED_LIBRARIES:= \
+ libcutils libdl
+
+LOCAL_MODULE:= mm-qcamera-app-badger
+
+LOCAL_MODULE_TAGS := optional
+
+include $(BUILD_EXECUTABLE)
+endif
diff --git a/camera/QCamera/stack/mm-camera-test/inc/mm_qcamera_app.h b/camera/QCamera/stack/mm-camera-test/inc/mm_qcamera_app.h
new file mode 100644
index 0000000..b32039c
--- /dev/null
+++ b/camera/QCamera/stack/mm-camera-test/inc/mm_qcamera_app.h
@@ -0,0 +1,211 @@
+/*
+Copyright (c) 2011-2012, The Linux Foundation. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following
+ disclaimer in the documentation and/or other materials provided
+ with the distribution.
+ * Neither the name of The Linux Foundation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+
+#ifndef __MM_QCAMERA_APP_H__
+#define __MM_QCAMERA_APP_H__
+
+#define DISABLE_JPEG_ENCODING
+
+#include "mm_qcamera_main_menu.h"
+#include "mm_camera_interface.h"
+#ifndef DISABLE_JPEG_ENCODING
+#include "mm_omx_jpeg_encoder.h"
+#endif
+
+#define MM_APP_MAX_DUMP_FRAME_NUM 1000
+
+#define PREVIEW_BUF_NUM 7
+#define VIDEO_BUF_NUM 7
+
+typedef enum {
+ MM_CAMERA_OK,
+ MM_CAMERA_E_GENERAL,
+ MM_CAMERA_E_NO_MEMORY,
+ MM_CAMERA_E_NOT_SUPPORTED,
+ MM_CAMERA_E_INVALID_INPUT,
+ MM_CAMERA_E_INVALID_OPERATION, /* 5 */
+ MM_CAMERA_E_ENCODE,
+ MM_CAMERA_E_BUFFER_REG,
+ MM_CAMERA_E_PMEM_ALLOC,
+ MM_CAMERA_E_CAPTURE_FAILED,
+ MM_CAMERA_E_CAPTURE_TIMEOUT, /* 10 */
+}mm_camera_status_type_t;
+
+typedef struct {
+ int num;
+ uint32_t frame_len;
+ struct msm_frame frame[MM_CAMERA_MAX_NUM_FRAMES];
+
+ uint32_t fd[MM_CAMERA_MAX_NUM_FRAMES];
+ int main_ion_fd[MM_CAMERA_MAX_NUM_FRAMES];
+ struct ion_allocation_data alloc[MM_CAMERA_MAX_NUM_FRAMES];
+ struct ion_fd_data ion_info_fd[MM_CAMERA_MAX_NUM_FRAMES];
+ int reg[MM_CAMERA_MAX_NUM_FRAMES];
+} mm_camear_app_buf_t;
+
+typedef struct {
+ int id;
+ mm_camera_stream_config_t str_config;
+}mm_camear_stream_t;
+
+typedef enum{
+ CAMERA_STATE_OPEN,
+ CAMERA_STATE_PREVIEW,
+ CAMERA_STATE_RECORD,
+ CAMERA_STATE_SNAPSHOT,
+ CAMERA_STATE_RDI
+}camera_state;
+
+typedef enum{
+ CAMERA_MODE,
+ RECORDER_MODE,
+ ZSL_MODE,
+ RDI_MODE
+}camera_mode;
+
+typedef struct {
+ mm_camera_vtbl_t *cam;
+ mm_camear_mem_vtbl_t *mem_cam;
+ int8_t my_id;
+ //mm_camera_op_mode_type_t op_mode;
+ uint32_t ch_id;
+ cam_ctrl_dimension_t dim;
+ int open_flag;
+ int ionfd;
+ mm_camear_stream_t stream[8];
+ camera_mode cam_mode;
+ camera_state cam_state;
+ int fullSizeSnapshot;
+} mm_camera_app_obj_t;
+
+typedef struct {
+ void *ptr;
+ mm_camera_info_t *(*mm_camera_query) (uint8_t *num_cameras);
+ mm_camera_vtbl_t *(*mm_camera_open) (uint8_t camera_idx,
+ mm_camear_mem_vtbl_t *mem_vtbl);
+
+ /*uint8_t *(*mm_camera_do_mmap)(uint32_t size, int *pmemFd);
+ int (*mm_camera_do_munmap)(int pmem_fd, void *addr, size_t size);
+
+ uint8_t *(*mm_camera_do_mmap_ion)(int ion_fd, struct ion_allocation_data *alloc,
+ struct ion_fd_data *ion_info_fd, int *mapFd);
+ int (*mm_camera_do_munmap_ion) (int ion_fd, struct ion_fd_data *ion_info_fd,
+ void *addr, size_t size);*/
+#if 0
+ uint32_t (*mm_camera_get_msm_frame_len)(cam_format_t fmt_type,
+ camera_mode_t mode,
+ int width,
+ int height,
+ int image_type,
+ uint8_t *num_planes,
+ uint32_t planes[]);
+#ifndef DISABLE_JPEG_ENCODING
+ void (*set_callbacks)(jpegfragment_callback_t fragcallback,
+ jpeg_callback_t eventcallback, void* userdata, void* output_buffer,
+ int * outBufferSize);
+ int8_t (*omxJpegOpen)(void);
+ void (*omxJpegClose)(void);
+ int8_t (*omxJpegStart)(void);
+ int8_t (*omxJpegEncode)(omx_jpeg_encode_params *encode_params);
+ void (*omxJpegFinish)(void);
+ int8_t (*mm_jpeg_encoder_setMainImageQuality)(uint32_t quality);
+#endif
+#endif
+} hal_interface_lib_t;
+
+typedef struct {
+ //mm_camera_vtbl_t *cam;
+ uint8_t num_cameras;
+ mm_camera_app_obj_t *obj[2];
+ mm_camera_info_t *cam_info;
+ int use_overlay;
+ int use_user_ptr;
+ hal_interface_lib_t hal_lib;
+ int cam_open;
+ int run_sanity;
+} mm_camera_app_t;
+
+extern mm_camera_app_t my_cam_app;
+extern USER_INPUT_DISPLAY_T input_display;
+extern int mm_app_dl_render(int frame_fd, struct crop_info * cropinfo);
+extern mm_camera_app_obj_t *mm_app_get_cam_obj(int8_t cam_id);
+extern int mm_app_load_hal();
+extern int mm_app_init();
+extern void mm_app_user_ptr(int use_user_ptr);
+extern int mm_app_open_ch(int cam_id);
+extern void mm_app_close_ch(int cam_id, int ch_type);
+extern int mm_app_set_dim(int8_t cam_id, cam_ctrl_dimension_t *dim);
+extern int mm_app_run_unit_test();
+extern int mm_app_unit_test_entry(mm_camera_app_t *cam_app);
+extern int mm_app_unit_test();
+extern void mm_app_set_dim_def(cam_ctrl_dimension_t *dim);
+extern int mm_app_open(uint8_t camera_idx);
+extern int mm_app_close(int8_t cam_id);
+extern int startPreview(int cam_id);
+extern int mm_app_stop_preview(int cam_id);
+extern int mm_app_stop_video(int cam_id);
+extern int startRecording(int cam_id);
+extern int stopRecording(int cam_id);
+extern int mm_app_take_picture(int cam_id);
+extern int mm_app_take_raw_picture(int cam_id);
+extern int mm_app_get_dim(int8_t cam_id, cam_ctrl_dimension_t *dim);
+extern int mm_app_streamon_preview(int cam_id);
+extern int mm_app_set_snapshot_fmt(int cam_id,mm_camera_image_fmt_t *fmt);
+
+extern int mm_app_dual_test_entry(mm_camera_app_t *cam_app);
+extern int mm_app_dual_test();
+
+extern int mm_camera_app_wait();
+extern void mm_camera_app_done();
+
+extern int mm_stream_initbuf(uint32_t camera_handle,
+ uint32_t ch_id, uint32_t stream_id,
+ void *user_data,
+ mm_camera_frame_len_offset *frame_offset_info,
+ uint8_t num_bufs,
+ uint8_t *initial_reg_flag,
+ mm_camera_buf_def_t *bufs);
+
+extern int mm_stream_deinitbuf(uint32_t camera_handle,
+ uint32_t ch_id, uint32_t stream_id,
+ void *user_data, uint8_t num_bufs,
+ mm_camera_buf_def_t *bufs);
+
+extern void mm_app_preview_notify_cb(mm_camera_super_buf_t *bufs,
+ void *user_data);
+#endif /* __MM_QCAMERA_APP_H__ */
+
+
+
+
+
+
+
+
+
diff --git a/camera/QCamera/stack/mm-camera-test/inc/mm_qcamera_display_dimensions.h b/camera/QCamera/stack/mm-camera-test/inc/mm_qcamera_display_dimensions.h
new file mode 100644
index 0000000..7ff486b
--- /dev/null
+++ b/camera/QCamera/stack/mm-camera-test/inc/mm_qcamera_display_dimensions.h
@@ -0,0 +1,43 @@
+/*
+Copyright (c) 2011-2012, The Linux Foundation. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following
+ disclaimer in the documentation and/or other materials provided
+ with the distribution.
+ * Neither the name of The Linux Foundation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+
+
+#ifndef __MM_QCAMERA_DISPLAY_DIMENSIONS__
+#define __MM_QCAMERA_DISPLAY_DIMENSIONS__
+
+#define PREVIEW_WIDTH 800
+#define PREVIEW_HEIGHT 480
+#define DISPLAY_WIDTH 800
+#define DISPLAY_HEIGHT 480
+#define THUMBNAIL_WIDTH 192
+#define THUMBNAIL_HEIGHT 144
+#define PICTURE_WIDTH 1280
+#define PICTURE_HEIGHT 960
+
+#endif /* __MM_QCAMERA_DISPLAY_DIMENSIONS__ */
diff --git a/camera/QCamera/stack/mm-camera-test/inc/mm_qcamera_main_menu.h b/camera/QCamera/stack/mm-camera-test/inc/mm_qcamera_main_menu.h
new file mode 100644
index 0000000..f4d233b
--- /dev/null
+++ b/camera/QCamera/stack/mm-camera-test/inc/mm_qcamera_main_menu.h
@@ -0,0 +1,464 @@
+/*
+Copyright (c) 2011-2012, The Linux Foundation. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following
+ disclaimer in the documentation and/or other materials provided
+ with the distribution.
+ * Neither the name of The Linux Foundation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+
+#ifndef __MM_QCAMERA_MAIN_MENU_H__
+#define __MM_QCAMERA_MAIN_MENU_H__
+
+#include "mm_camera_interface.h"
+
+
+#define VIDEO_BUFFER_SIZE (PREVIEW_WIDTH * PREVIEW_HEIGHT * 3/2)
+#define THUMBNAIL_BUFFER_SIZE (THUMBNAIL_WIDTH * THUMBNAIL_HEIGHT * 3/2)
+#define SNAPSHOT_BUFFER_SIZE (PICTURE_WIDTH * PICTURE_HEIGHT * 3/2)
+
+/*===========================================================================
+ * Macro
+ *===========================================================================*/
+#define PREVIEW_FRAMES_NUM 4
+#define VIDEO_FRAMES_NUM 4
+#define THUMBNAIL_FRAMES_NUM 1
+#define SNAPSHOT_FRAMES_NUM 1
+#define MAX_NUM_FORMAT 32
+
+typedef enum
+{
+ STOP_CAMERA = 1,
+ PREVIEW_VIDEO_RESOLUTION = 2,
+ TAKE_YUV_SNAPSHOT = 3,
+ TAKE_RAW_SNAPSHOT = 4,
+ TAKE_ZSL_SNAPSHOT = 5,
+ //TAKE_LIVE_SNAPSHOT = 17,
+ START_RECORDING = 6,
+ START_RDI = 7,
+ STOP_RDI = 8,
+ SWITCH_CAMERA = 9,
+ //STOP_RECORDING = 7,
+ //SET_WHITE_BALANCE = 3,
+ //SET_EXP_METERING = 4,
+ //GET_CTRL_VALUE = 5,
+ //TOGGLE_AFR = 6,
+ //SET_ISO = 7,
+ //BRIGHTNESS_GOTO_SUBMENU = 8,
+ //CONTRAST_GOTO_SUBMENU = 9,
+ //EV_GOTO_SUBMENU = 10,
+ //SATURATION_GOTO_SUBMENU = 11,
+ //SET_ZOOM = 12,
+ //SET_SHARPNESS = 13,
+} Camera_main_menu_t;
+
+typedef enum
+{
+ ACTION_NO_ACTION,
+ ACTION_STOP_CAMERA,
+ ACTION_PREVIEW_VIDEO_RESOLUTION,
+ ACTION_TAKE_YUV_SNAPSHOT,
+ ACTION_TAKE_RAW_SNAPSHOT,
+ ACTION_TAKE_ZSL_SNAPSHOT,
+ ACTION_TAKE_LIVE_SNAPSHOT,
+ ACTION_START_RECORDING,
+ ACTION_STOP_RECORDING,
+ ACTION_START_RDI,
+ ACTION_STOP_RDI,
+ ACTION_SWITCH_CAMERA,
+ ACTION_SET_WHITE_BALANCE,
+ ACTION_SET_EXP_METERING,
+ ACTION_GET_CTRL_VALUE,
+ ACTION_TOGGLE_AFR,
+ ACTION_SET_ISO,
+ ACTION_BRIGHTNESS_INCREASE,
+ ACTION_BRIGHTNESS_DECREASE,
+ ACTION_CONTRAST_INCREASE,
+ ACTION_CONTRAST_DECREASE,
+ ACTION_EV_INCREASE,
+ ACTION_EV_DECREASE,
+ ACTION_SATURATION_INCREASE,
+ ACTION_SATURATION_DECREASE,
+ ACTION_SET_ZOOM,
+ ACTION_SHARPNESS_INCREASE,
+ ACTION_SHARPNESS_DECREASE,
+} camera_action_t;
+
+#define INVALID_KEY_PRESS 0
+#define BASE_OFFSET ('Z' - 'A' + 1)
+#define BASE_OFFSET_NUM ('Z' - 'A' + 2)
+#define PAD_TO_WORD(a) (((a)+3)&~3)
+
+
+#define SQCIF_WIDTH 128
+#define SQCIF_HEIGHT 96
+#define QCIF_WIDTH 176
+#define QCIF_HEIGHT 144
+#define QVGA_WIDTH 320
+#define QVGA_HEIGHT 240
+#define HD_THUMBNAIL_WIDTH 256
+#define HD_THUMBNAIL_HEIGHT 144
+#define CIF_WIDTH 352
+#define CIF_HEIGHT 288
+#define VGA_WIDTH 640
+#define VGA_HEIGHT 480
+#define WVGA_WIDTH 800
+#define WVGA_HEIGHT 480
+
+#define MP1_WIDTH 1280
+#define MP1_HEIGHT 960
+#define MP2_WIDTH 1600
+#define MP2_HEIGHT 1200
+#define MP3_WIDTH 2048
+#define MP3_HEIGHT 1536
+#define MP5_WIDTH 2592
+#define MP5_HEIGHT 1944
+
+#define SVGA_WIDTH 800
+#define SVGA_HEIGHT 600
+#define XGA_WIDTH 1024
+#define XGA_HEIGHT 768
+#define HD720_WIDTH 1280
+#define HD720_HEIGHT 720
+#define WXGA_WIDTH 1280
+#define WXGA_HEIGHT 768
+#define HD1080_WIDTH 1920
+#define HD1080_HEIGHT 1080
+
+typedef enum
+{
+ RESOLUTION_MIN = 1,
+ SQCIF = RESOLUTION_MIN,
+ QCIF = 2,
+ QVGA = 3,
+ CIF = 4,
+ VGA = 5,
+ WVGA = 6,
+ SVGA = 7,
+ XGA = 8,
+ HD720 = 9,
+ RESOLUTION_PREVIEW_VIDEO_MAX = HD720,
+ WXGA = 10,
+ MP1 = 11,
+ MP2 = 12,
+ HD1080 = 13,
+ MP3 = 14,
+ MP5 = 15,
+ RESOLUTION_MAX = MP5,
+} Camera_Resolution;
+
+
+typedef enum {
+ WHITE_BALANCE_STATE = 1,
+ WHITE_BALANCE_TEMPERATURE = 2,
+ BRIGHTNESS_CTRL = 3,
+ EV = 4,
+ CONTRAST_CTRL = 5,
+ SATURATION_CTRL = 6,
+ SHARPNESS_CTRL = 7,
+} Get_Ctrl_modes;
+
+typedef enum {
+ WHITE_BALANCE_AUTO = 1,
+ WHITE_BALANCE_OFF = 2,
+ WHITE_BALANCE_DAYLIGHT = 3,
+ WHITE_BALANCE_INCANDESCENT = 4,
+ WHITE_BALANCE_FLUORESCENT = 5,
+} White_Balance_modes;
+
+typedef enum {
+ EXP_METERING_FRAME_AVERAGE = 1,
+ EXP_METERING_CENTER_WEIGHTED = 2,
+ EXP_METERING_SPOT_METERING = 3,
+} Exp_Metering_modes;
+
+typedef enum {
+ ISO_AUTO = 1,
+ ISO_DEBLUR = 2,
+ ISO_100 = 3,
+ ISO_200 = 4,
+ ISO_400 = 5,
+ ISO_800 = 6,
+ ISO_1600 = 7,
+} ISO_modes;
+
+typedef enum
+{
+ MENU_ID_MAIN,
+ MENU_ID_PREVIEWVIDEORESOLUTIONCHANGE,
+ MENU_ID_WHITEBALANCECHANGE,
+ MENU_ID_EXPMETERINGCHANGE,
+ MENU_ID_GET_CTRL_VALUE,
+ MENU_ID_TOGGLEAFR,
+ MENU_ID_ISOCHANGE,
+ MENU_ID_BRIGHTNESSCHANGE,
+ MENU_ID_CONTRASTCHANGE,
+ MENU_ID_EVCHANGE,
+ MENU_ID_SATURATIONCHANGE,
+ MENU_ID_ZOOMCHANGE,
+ MENU_ID_SHARPNESSCHANGE,
+ MENU_ID_SWITCHCAMERA,
+ MENU_ID_RECORD,
+ MENU_ID_INVALID,
+} menu_id_change_t;
+
+typedef enum
+{
+ INCREASE_ZOOM = 1,
+ DECREASE_ZOOM = 2,
+ INCREASE_STEP_ZOOM = 3,
+ DECREASE_STEP_ZOOM = 4,
+} Camera_Zoom;
+
+typedef enum
+{
+ INC_CONTRAST = 1,
+ DEC_CONTRAST = 2,
+} Camera_Contrast_changes;
+
+typedef enum
+{
+ INC_BRIGHTNESS = 1,
+ DEC_BRIGHTNESS = 2,
+} Camera_Brightness_changes;
+
+typedef enum
+{
+ INCREASE_EV = 1,
+ DECREASE_EV = 2,
+} Camera_EV_changes;
+
+typedef enum {
+ INC_SATURATION = 1,
+ DEC_SATURATION = 2,
+} Camera_Saturation_changes;
+
+typedef enum
+{
+ INC_ISO = 1,
+ DEC_ISO = 2,
+} Camera_ISO_changes;
+
+typedef enum
+{
+ INC_SHARPNESS = 1,
+ DEC_SHARPNESS = 2,
+} Camera_Sharpness_changes;
+
+typedef enum {
+ ZOOM_IN = 1,
+ ZOOM_OUT = 2,
+} Zoom_direction;
+
+typedef enum
+{
+ LIVE_SNAPSHOT_MENU = 1,
+ STOP_RECORDING_MENU = 2,
+} Record_changes;
+
+typedef struct{
+ Camera_main_menu_t main_menu;
+ char * menu_name;
+} CAMERA_MAIN_MENU_TBL_T;
+
+typedef struct{
+ Camera_Resolution cs_id;
+ uint16_t width;
+ uint16_t height;
+ char * name;
+ char * str_name;
+} PREVIEW_DIMENSION_TBL_T;
+
+typedef struct {
+ White_Balance_modes wb_id;
+ char * wb_name;
+} WHITE_BALANCE_TBL_T;
+
+typedef struct {
+ int cam_id;
+ char * cam_name;
+} CAMERA_TBL_T;
+
+typedef struct {
+ int act_id;
+ char * act_name;
+} RECORD_TBL_T;
+
+typedef struct {
+ Get_Ctrl_modes get_ctrl_id;
+ char * get_ctrl_name;
+} GET_CTRL_TBL_T;
+
+typedef struct{
+ Exp_Metering_modes exp_metering_id;
+ char * exp_metering_name;
+} EXP_METERING_TBL_T;
+
+typedef struct {
+ ISO_modes iso_modes;
+ char *iso_modes_name;
+} ISO_TBL_T;
+
+typedef struct {
+ Zoom_direction zoom_direction;
+ char * zoom_direction_name;
+} ZOOM_TBL_T;
+
+typedef struct {
+ Camera_Sharpness_changes sharpness_change;
+ char *sharpness_change_name;
+} SHARPNESS_TBL_T;
+
+typedef struct {
+ Camera_Brightness_changes bc_id;
+ char * brightness_name;
+} CAMERA_BRIGHTNESS_TBL_T;
+
+typedef struct {
+ Camera_Contrast_changes cc_id;
+ char * contrast_name;
+} CAMERA_CONTRST_TBL_T;
+
+typedef struct {
+ Camera_EV_changes ec_id;
+ char * EV_name;
+} CAMERA_EV_TBL_T;
+
+typedef struct {
+ Camera_Saturation_changes sc_id;
+ char * saturation_name;
+} CAMERA_SATURATION_TBL_T;
+
+typedef struct {
+ Camera_Sharpness_changes bc_id;
+ char * sharpness_name;
+} CAMERA_SHARPNESS_TBL_T;
+
+
+typedef struct {
+ void *frameThread;
+ int8_t (*setDimension)(int , void *);
+ int8_t (*setDefaultParams)(int );
+ int8_t (*registerPreviewBuf)(int , void *, uint32_t, struct msm_frame *, int8_t );
+ int8_t (*unregisterPreviewBuf)(int , void *, uint32_t, int , unsigned char *);
+ int8_t (*registerVideoBuf)(int , void *, uint32_t, struct msm_frame *, int8_t );
+ int8_t (*unregisterVideoBuf)(int , void *, uint32_t, int , unsigned char *);
+ int8_t (*startPreview)(int );
+ int8_t (*stopPreview)(int );
+ int8_t (*startVideo)(int );
+ int8_t (*stopVideo)(int );
+ int8_t (*startRecording)(int );
+ int8_t (*stopRecording)(int );
+ int8_t (*startSnapshot)(int );
+ int8_t (*startRawSnapshot)(int );
+
+ int8_t (*registerSnapshotBuf)(int , void *, int , int ,
+ unsigned char *, unsigned char *);
+
+ int8_t (*registerRawSnapshotBuf)(int , void *, int , unsigned char *);
+
+ int8_t (*unregisterSnapshotBuf)(int , void *, int , int ,
+ unsigned char *, unsigned char *);
+
+ int8_t (*unregisterRawSnapshotBuf)(int , void *, int , unsigned char *);
+ int8_t (*getPicture)(int fd, struct crop_info *cropInfo );
+ int8_t (*stopSnapshot)(int );
+ int8_t (*jpegEncode)(const char *path, void *, int, int , unsigned char *,
+ unsigned char *, void *, camera_encoding_rotate_t rotate);
+ int8_t (*setZoom)(int , void *);
+ int8_t (*getMaxZoom)(int fd, void *pZm);
+ int8_t (*setSpecialEffect)(int, int effect);
+ int8_t (*setBrightness)(int, int);
+ int8_t (*setContrast)(int, int);
+ int8_t (*setSaturation)(int, int);
+ int8_t (*setEV)(int , int );
+ int8_t (*setAntiBanding)(int , int32_t antibanding);
+ int8_t (*setWhiteBalance)(int , int32_t );
+ int8_t (*setAecMode)(int , camera_auto_exposure_mode_type );
+ int8_t (*setIso)(int , camera_iso_mode_type );
+ int8_t (*setSharpness)(int , int );
+ int8_t (*setAutoFocus)(int , isp3a_af_mode_t, cam_af_ctrl_t *);
+ int8_t (*sethjr) (int fd, int8_t hjr_status);
+ int8_t (*setLensShading) (int fd, int8_t rolloff_status);
+ int8_t (*setLedMode) (int fd, led_mode_t led_mode);
+ int8_t (*getSharpness_AF) (int fd, int32_t *sharpness);
+ int8_t (*setMotionIso) (int fd, motion_iso_t motion_iso);
+ int8_t (*setHue) (int fd, int32_t hue);
+ int8_t (*cancelAF) (int fd);
+ int8_t (*getAfStep) (int fd, int32_t *afStep);
+ int8_t (*setAfStep) (int fd, int32_t afStep);
+ int8_t (*enableAFD) (int fd);
+ int8_t (*prepareSnapshot) (int fd);
+ int8_t (*setFpsMode) (int fd, fps_mode_t fps_mode);
+ int8_t (*setFps) (int fd, uint16_t fps);
+ int8_t (*setAFFocusRect) (int fd, cam_af_focusrect_t af_focus_rect);
+} interface_ctrl_t;
+
+int8_t native_interface_init(interface_ctrl_t *intrfcCtrl, int *camfd);
+int8_t v4l2_interface_init(interface_ctrl_t *intrfcCtrl, int *videofd);
+
+int set_zoom (int zoom_action_param);
+int set_hjr (void);
+int LensShading (void);
+int decrease_contrast (void);
+int increase_contrast (void);
+int decrease_saturation (void);
+int increase_saturation (void);
+int decrease_brightness (void);
+int increase_brightness (void);
+int decrease_EV (void);
+int increase_EV (void);
+int set_iso (int iso_action_param);
+int decrease_sharpness (void);
+int increase_sharpness (void);
+int SpecialEffect (void);
+int set_exp_metering (int exp_metering_action_param);
+int LED_mode_change (void);
+int set_sharpness_AF (void);
+int set_auto_focus (void);
+int set_antibanding (void);
+int set_whitebalance (int wb_action_param);
+int print_current_menu ();
+int set_MotionIso (void);
+int start_preview (void);
+int stop_preview (void);
+static int start_video (void);
+static int stop_video (void);
+int start_recording (void);
+int stop_recording (void);
+int snapshot_resolution (int);
+int preview_video_resolution (int);
+int system_init(void);
+int system_destroy(void);
+int toggle_hue(void);
+int cancel_af(void);
+int get_af_step();
+int set_af_step();
+int enable_afd();
+int prepare_snapshot();
+int set_fps_mode(void);
+int get_ctrl_value (int ctrl_value_mode_param);
+int toggle_afr ();
+int take_yuv_snapshot(int cam_id);
+int take_raw_snapshot();
+#endif /* __MM_QCAMERA_MAIN_MENU_H__ */
diff --git a/camera/QCamera/stack/mm-camera-test/inc/mm_qcamera_unit_test.h b/camera/QCamera/stack/mm-camera-test/inc/mm_qcamera_unit_test.h
new file mode 100644
index 0000000..19c6388
--- /dev/null
+++ b/camera/QCamera/stack/mm-camera-test/inc/mm_qcamera_unit_test.h
@@ -0,0 +1,45 @@
+/*
+Copyright (c) 2011-2012, The Linux Foundation. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following
+ disclaimer in the documentation and/or other materials provided
+ with the distribution.
+ * Neither the name of The Linux Foundation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+
+#ifndef __MM_QCAMERA_APP_UNIT_TEST_H__
+#define __MM_QCAMERA_APP_UNIT_TEST_H__
+
+#include "mm_qcamera_main_menu.h"
+#include "mm_camera_interface.h"
+#include "mm_qcamera_app.h"
+
+typedef int (*mm_app_test_t) (mm_camera_app_t *cam_apps);
+typedef struct {
+ mm_app_test_t f;
+ int r;
+} mm_app_tc_t;
+
+#endif
+
+/* __MM_QCAMERA_APP_UNIT_TEST_H__ */
diff --git a/camera/QCamera/stack/mm-camera-test/src/mm_qcamera_app.c b/camera/QCamera/stack/mm-camera-test/src/mm_qcamera_app.c
new file mode 100644
index 0000000..9a7c971
--- /dev/null
+++ b/camera/QCamera/stack/mm-camera-test/src/mm_qcamera_app.c
@@ -0,0 +1,406 @@
+/*
+Copyright (c) 2011-2012, The Linux Foundation. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following
+ disclaimer in the documentation and/or other materials provided
+ with the distribution.
+ * Neither the name of The Linux Foundation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+
+#include <pthread.h>
+#include "mm_camera_dbg.h"
+#include <errno.h>
+#include <stdbool.h>
+#include <sys/ioctl.h>
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <fcntl.h>
+#include <poll.h>
+#include <dlfcn.h>
+#include "mm_qcamera_main_menu.h"
+#include "mm_qcamera_app.h"
+
+
+mm_camera_app_t my_cam_app;
+
+static pthread_mutex_t app_mutex;
+static int thread_status = 0;
+static pthread_cond_t app_cond_v;
+
+#define MM_QCAMERA_APP_WAIT_TIME 1000000000
+
+int mm_camera_app_timedwait()
+{
+ int rc = 0;
+ pthread_mutex_lock(&app_mutex);
+ if(false == thread_status) {
+ struct timespec tw;
+ memset(&tw, 0, sizeof tw);
+ tw.tv_sec = 0;
+ tw.tv_nsec = time(0) + MM_QCAMERA_APP_WAIT_TIME;
+
+ //pthread_cond_wait(&app_cond_v, &app_mutex);
+ rc = pthread_cond_timedwait(&app_cond_v, &app_mutex,&tw);
+ thread_status = false;
+ }
+ pthread_mutex_unlock(&app_mutex);
+ return rc;
+}
+
+int mm_camera_app_wait()
+{
+ int rc = 0;
+ pthread_mutex_lock(&app_mutex);
+ if(false == thread_status){
+ pthread_cond_wait(&app_cond_v, &app_mutex);
+ thread_status = false;
+ }
+ pthread_mutex_unlock(&app_mutex);
+ return rc;
+}
+
+void mm_camera_app_done()
+{
+ pthread_mutex_lock(&app_mutex);
+ thread_status = true;
+ pthread_cond_signal(&app_cond_v);
+ pthread_mutex_unlock(&app_mutex);
+}
+
+
+mm_camera_app_obj_t *mm_app_get_cam_obj(int8_t cam_id)
+{
+ mm_camera_app_obj_t *temp = my_cam_app.obj[cam_id];
+ return temp;
+}
+
+void mm_app_user_ptr(int use_user_ptr)
+{
+ my_cam_app.use_user_ptr = use_user_ptr;
+}
+
+void mm_app_set_dim_def(cam_ctrl_dimension_t *dim)
+{
+ dim->display_width = WVGA_WIDTH;
+ dim->display_height = WVGA_HEIGHT;
+ input_display.user_input_display_width = dim->display_width;
+ input_display.user_input_display_height = dim->display_height;
+ dim->video_width = WVGA_WIDTH;
+ dim->video_width = CEILING32(dim->video_width);
+ dim->video_height = WVGA_HEIGHT;
+ dim->orig_video_width = dim->video_width;
+ dim->orig_video_height = dim->video_height;
+ dim->picture_width = MP1_WIDTH;
+ dim->picture_height = MP1_HEIGHT;
+ dim->orig_picture_dx = dim->picture_width;
+ dim->orig_picture_dy = dim->picture_height;
+ dim->ui_thumbnail_height = QVGA_HEIGHT;
+ dim->ui_thumbnail_width = QVGA_WIDTH;
+ dim->thumbnail_height = dim->ui_thumbnail_height;
+ dim->thumbnail_width = dim->ui_thumbnail_width;
+ dim->orig_picture_width = dim->picture_width;
+ dim->orig_picture_height = dim->picture_height;
+ dim->orig_thumb_width = dim->thumbnail_width;
+ dim->orig_thumb_height = dim->thumbnail_height;
+ dim->raw_picture_height = MP1_HEIGHT;
+ dim->raw_picture_width = MP1_WIDTH;
+ dim->hjr_xtra_buff_for_bayer_filtering;
+ dim->prev_format = CAMERA_YUV_420_NV21;
+ dim->enc_format = CAMERA_YUV_420_NV12;
+ dim->thumb_format = CAMERA_YUV_420_NV21;
+ dim->main_img_format = CAMERA_YUV_420_NV21;
+ dim->prev_padding_format = CAMERA_PAD_TO_4K;
+ dim->display_luma_width = dim->display_width;
+ dim->display_luma_height = dim->display_height;
+ dim->display_chroma_width = dim->display_width;
+ dim->display_chroma_height = dim->display_height;
+ dim->video_luma_width = dim->orig_video_width;
+ dim->video_luma_height = dim->orig_video_height;
+ dim->video_chroma_width = dim->orig_video_width;
+ dim->video_chroma_height = dim->orig_video_height;
+ dim->thumbnail_luma_width = dim->thumbnail_width;
+ dim->thumbnail_luma_height = dim->thumbnail_height;
+ dim->thumbnail_chroma_width = dim->thumbnail_width;
+ dim->thumbnail_chroma_height = dim->thumbnail_height;
+ dim->main_img_luma_width = dim->picture_width;
+ dim->main_img_luma_height = dim->picture_height;
+ dim->main_img_chroma_width = dim->picture_width;
+ dim->main_img_chroma_height = dim->picture_height;
+}
+
+int mm_app_load_hal()
+{
+ memset(&my_cam_app, 0, sizeof(my_cam_app));
+ memset(&my_cam_app.hal_lib, 0, sizeof(hal_interface_lib_t));
+#if defined(_MSM7630_)
+ my_cam_app.hal_lib.ptr = dlopen("/usr/lib/hw/camera.msm7630.so", RTLD_LAZY);
+#elif defined(_MSM7627A_)
+ my_cam_app.hal_lib.ptr = dlopen("/usr/lib/hw/camera.msm7627A.so", RTLD_LAZY);
+#else
+ //my_cam_app.hal_lib.ptr = dlopen("hw/camera.msm8960.so", RTLD_NOW);
+ my_cam_app.hal_lib.ptr = dlopen("libmmcamera_interface_badger.so", RTLD_NOW);
+#endif
+ if (!my_cam_app.hal_lib.ptr) {
+ CDBG_ERROR("%s Error opening HAL library %s\n", __func__, dlerror());
+ return -1;
+ }
+ *(void **)&(my_cam_app.hal_lib.mm_camera_query) =
+ dlsym(my_cam_app.hal_lib.ptr,
+ "camera_query");
+ *(void **)&(my_cam_app.hal_lib.mm_camera_open) =
+ dlsym(my_cam_app.hal_lib.ptr,
+ "camera_open");
+ return 0;
+}
+
+int mm_app_init()
+{
+ int rc = MM_CAMERA_OK;
+ CDBG("%s:BEGIN\n", __func__);
+ my_cam_app.cam_info = (mm_camera_info_t *)my_cam_app.hal_lib.mm_camera_query(&my_cam_app.num_cameras);
+ if(my_cam_app.cam_info == NULL) {
+ CDBG_ERROR("%s: Failed to query camera\n", __func__);
+ rc = -1;
+ }
+ CDBG("%s:END, num_cameras = %d\n", __func__, my_cam_app.num_cameras);
+ return rc;
+}
+
+static void notify_evt_cb(uint32_t camera_handle,
+ mm_camera_event_t *evt,void *user_data)
+{
+ CDBG("%s:E evt = %d",__func__,evt->event_type);
+
+ switch(evt->event_type)
+ {
+ case MM_CAMERA_EVT_TYPE_CH:
+ break;
+ case MM_CAMERA_EVT_TYPE_CTRL:
+ break;
+ case MM_CAMERA_EVT_TYPE_STATS:
+ break;
+ case MM_CAMERA_EVT_TYPE_INFO:
+ break;
+ default:
+ break;
+ }
+ CDBG("%s:X",__func__);
+}
+
+int mm_app_open(uint8_t cam_id)
+{
+ int rc = MM_CAMERA_OK;
+ mm_camera_app_obj_t *pme = NULL;
+ int i;
+ mm_camera_event_type_t evt;
+
+ pme = mm_app_get_cam_obj(cam_id);
+
+ CDBG("%s:BEGIN\n", __func__);
+ if(pme != NULL) {
+ CDBG("%s:cam already open.nop\n",__func__);
+ goto end;
+ }
+ my_cam_app.cam_open = cam_id;
+ my_cam_app.obj[cam_id] = (mm_camera_app_obj_t *)malloc(sizeof(mm_camera_app_obj_t));
+ pme = my_cam_app.obj[cam_id];
+
+ pme->mem_cam = (mm_camear_mem_vtbl_t *)malloc(sizeof(mm_camear_mem_vtbl_t));
+ memset(pme->mem_cam,0,sizeof(mm_camear_mem_vtbl_t));
+ pme->mem_cam->user_data = pme;
+
+ pme->cam = my_cam_app.hal_lib.mm_camera_open(cam_id,pme->mem_cam);
+ if(pme->cam == NULL) {
+ CDBG("%s:dev open error=%d\n", __func__, rc);
+ memset(pme,0, sizeof(pme));
+ return -1;
+ }
+ CDBG("Open Camera id = %d handle = %d",cam_id,pme->cam->camera_handle);
+
+ pme->ch_id = cam_id;
+ pme->open_flag = true;
+ mm_app_set_dim_def(&pme->dim);
+
+ for (i = 0; i < MM_CAMERA_EVT_TYPE_MAX; i++) {
+ evt = (mm_camera_event_type_t) i;
+ pme->cam->ops->register_event_notify(pme->cam->camera_handle,notify_evt_cb,pme,evt);
+ }
+ pme->cam_state = CAMERA_STATE_OPEN;
+ pme->cam_mode = CAMERA_MODE;
+ pme->fullSizeSnapshot = 0;
+
+ pme->ch_id = pme->cam->ops->ch_acquire(pme->cam->camera_handle);
+ CDBG("Channel Acquired Successfully %d",pme->ch_id);
+end:
+ CDBG("%s:END, rc=%d\n", __func__, rc);
+ return rc;
+}
+
+int mm_app_close(int8_t cam_id)
+{
+ int rc = MM_CAMERA_OK;
+ mm_camera_app_obj_t *pme = mm_app_get_cam_obj(cam_id);
+
+ CDBG("%s:BEGIN\n", __func__);
+ if(!pme->cam) {
+ CDBG("%s:cam already closed. nop\n",__func__);
+ goto end;
+ }
+
+ pme->cam->ops->ch_release(pme->cam->camera_handle,pme->ch_id);
+ pme->cam->ops->camera_close(pme->cam->camera_handle);
+ pme->open_flag = false;
+ pme->cam = NULL;
+ pme->my_id = 0;
+ free(pme->mem_cam);
+ pme->mem_cam = NULL;
+ memset(&pme->dim, 0, sizeof(pme->dim));
+ memset(&pme->dim, 0, sizeof(pme->dim));
+
+ free(pme);
+ pme = NULL;
+ my_cam_app.obj[cam_id] = NULL;
+
+end:
+ CDBG("%s:END, rc=%d\n", __func__, rc);
+ return rc;
+}
+
+void switchRes(int cam_id)
+{
+ int rc = MM_CAMERA_OK;
+ mm_camera_app_obj_t *pme = mm_app_get_cam_obj(cam_id);
+ switch(pme->cam_state) {
+ case CAMERA_STATE_RECORD:
+ if(MM_CAMERA_OK != stopRecording(cam_id)){
+ CDBG_ERROR("%s:Cannot stop video err=%d\n", __func__, rc);
+ return -1;
+ }
+ case CAMERA_STATE_PREVIEW:
+ if(MM_CAMERA_OK != mm_app_stop_preview(cam_id)){
+ CDBG_ERROR("%s: Cannot switch to camera mode=%d\n", __func__);
+ return -1;
+ }
+ break;
+ case CAMERA_STATE_SNAPSHOT:
+ default:
+ break;
+ }
+}
+void switchCamera(int cam_id)
+{
+ int rc = MM_CAMERA_OK;
+ mm_camera_app_obj_t *pme = mm_app_get_cam_obj(cam_id);
+
+ if(my_cam_app.cam_open == cam_id){
+ return;
+ }
+
+ switch(pme->cam_state) {
+ case CAMERA_STATE_RECORD:
+ if(MM_CAMERA_OK != stopRecording(cam_id)){
+ CDBG_ERROR("%s:Cannot stop video err=%d\n", __func__, rc);
+ return -1;
+ }
+ case CAMERA_STATE_PREVIEW:
+ if(MM_CAMERA_OK != mm_app_stop_preview(cam_id)){
+ CDBG_ERROR("%s: Cannot switch to camera mode=%d\n", __func__);
+ return -1;
+ }
+ break;
+ case CAMERA_STATE_SNAPSHOT:
+ default:
+ break;
+ }
+
+ mm_app_close(my_cam_app.cam_open);
+ mm_app_open(cam_id);
+}
+
+int mm_app_set_dim(int8_t cam_id, cam_ctrl_dimension_t *dim)
+{
+ int rc = MM_CAMERA_OK;
+ mm_camera_app_obj_t *pme = mm_app_get_cam_obj(cam_id);
+
+ CDBG("%s:BEGIN\n", __func__);
+
+ memcpy(&pme->dim, dim, sizeof(cam_ctrl_dimension_t));
+ if(MM_CAMERA_OK != (rc = pme->cam->ops->set_parm(
+ pme->cam->camera_handle,MM_CAMERA_PARM_DIMENSION, &pme->dim)))
+ {
+ CDBG_ERROR("%s: set dimension err=%d\n", __func__, rc);
+ }
+ CDBG("%s:END, rc=%d\n", __func__, rc);
+ return rc;
+}
+
+int mm_app_get_dim(int8_t cam_id, cam_ctrl_dimension_t *dim)
+{
+ int rc = MM_CAMERA_OK;
+#if 0
+ mm_camera_app_obj_t *pme = mm_app_get_cam_obj(cam_id);
+ CDBG("%s:BEGIN\n", __func__);
+ if(pme->open_flag != true) {
+ CDBG("%s: dev not open yet\n", __func__);
+ rc = -MM_CAMERA_E_INVALID_OPERATION;
+ goto end;
+ }
+ /* now we only use the upper portion. TBD: needs to be fixed later */
+ //memcpy(&pme->dim, dim, sizeof(cam_ctrl_dimension_t));
+ if(MM_CAMERA_OK != (rc = pme->cam->cfg->get_parm(pme->cam,
+ MM_CAMERA_PARM_DIMENSION, &pme->dim))) {
+ CDBG("%s: set dimension err=%d\n", __func__, rc);
+ }
+ CDBG("%s: raw_w=%d,raw_h=%d\n",
+ __func__, pme->dim.orig_picture_width, pme->dim.orig_picture_height);
+ if(dim)
+ memcpy(dim, &pme->dim, sizeof(cam_ctrl_dimension_t));
+
+end:
+ CDBG("%s:END, rc=%d\n", __func__, rc);
+#endif
+ return rc;
+}
+
+void mm_app_close_ch(int cam_id, int ch_type)
+{
+ mm_camera_app_obj_t *pme = mm_app_get_cam_obj(cam_id);
+
+ pme->cam->ops->ch_release(pme->cam->camera_handle,pme->ch_id);
+ CDBG("%s:END,cam_id = %d, ch = %d\n", __func__, cam_id, ch_type);
+
+}
+
+int mm_app_unit_test()
+{
+ my_cam_app.run_sanity = 1;
+ mm_app_unit_test_entry(&my_cam_app);
+ return 0;
+}
+
+int mm_app_dual_test()
+{
+ my_cam_app.run_sanity = 1;
+ mm_app_dual_test_entry(&my_cam_app);
+ return 0;
+}
diff --git a/camera/QCamera/stack/mm-camera-test/src/mm_qcamera_display.c b/camera/QCamera/stack/mm-camera-test/src/mm_qcamera_display.c
new file mode 100644
index 0000000..3640f96
--- /dev/null
+++ b/camera/QCamera/stack/mm-camera-test/src/mm_qcamera_display.c
@@ -0,0 +1,574 @@
+/*
+Copyright (c) 2011-2012, The Linux Foundation. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following
+ disclaimer in the documentation and/or other materials provided
+ with the distribution.
+ * Neither the name of The Linux Foundation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+
+#include <fcntl.h>
+#include <stdio.h>
+#include <stdbool.h>
+#include <unistd.h>
+#include <stdlib.h>
+#include <pthread.h>
+#include <sys/ioctl.h>
+struct file;
+struct inode;
+#include <linux/android_pmem.h>
+#include <sys/mman.h>
+#include <errno.h>
+#include <sys/time.h>
+#include <string.h>
+
+#include <inttypes.h>
+#include <linux/msm_mdp.h>
+#include <linux/fb.h>
+#include <linux/videodev2.h>
+#include "mm_camera_dbg.h"
+#include "QCamera_Intf.h"
+
+#ifdef DRAW_RECTANGLES
+extern roi_info_t camframe_roi;
+
+#undef CAM_FRM_DRAW_RECT
+#define CAM_FRM_DRAW_RECT
+#endif
+
+#ifdef CAM_FRM_DRAW_FD_RECT
+#undef CAM_FRM_DRAW_RECT
+#define CAM_FRM_DRAW_RECT
+#endif
+
+struct fb_var_screeninfo vinfo;
+struct fb_fix_screeninfo finfo;
+int fb_fd = 0;
+union {
+ char dummy[sizeof(struct mdp_blit_req_list) +
+ sizeof(struct mdp_blit_req) * 1];
+ struct mdp_blit_req_list list;
+} yuv;
+
+static pthread_t cam_frame_fb_thread_id;
+static int camframe_fb_exit;
+
+static int is_camframe_fb_thread_ready;
+USER_INPUT_DISPLAY_T input_display;
+
+unsigned use_overlay = 0;
+struct msmfb_overlay_data ov_front, ov_back, *ovp_front, *ovp_back;
+struct mdp_overlay overlay, *overlayp;
+int vid_buf_front_id, vid_buf_back_id;
+static unsigned char please_initialize = 1;
+int num_of_ready_frames = 0;
+
+static pthread_cond_t sub_thread_ready_cond = PTHREAD_COND_INITIALIZER;
+static pthread_mutex_t sub_thread_ready_mutex = PTHREAD_MUTEX_INITIALIZER;
+pthread_cond_t camframe_fb_cond = PTHREAD_COND_INITIALIZER;
+pthread_mutex_t camframe_fb_mutex = PTHREAD_MUTEX_INITIALIZER;
+static void notify_camframe_fb_thread();
+
+void use_overlay_fb_display_driver(void)
+{
+ use_overlay = 1;
+}
+
+void overlay_set_params(struct mdp_blit_req *e)
+{
+ int result;
+
+ if (please_initialize) {
+ overlayp = &overlay;
+ ovp_front = &ov_front;
+ ovp_back = &ov_back;
+
+ overlayp->id = MSMFB_NEW_REQUEST;
+ }
+
+ overlayp->src.width = e->src.width;
+ overlayp->src.height = e->src.height;
+ overlayp->src.format = e->src.format;
+
+ overlayp->src_rect.x = e->src_rect.x;
+ overlayp->src_rect.y = e->src_rect.y;
+ overlayp->src_rect.w = e->src_rect.w;
+ overlayp->src_rect.h = e->src_rect.h;
+
+ overlayp->dst_rect.x = e->dst_rect.x;
+ overlayp->dst_rect.y = e->dst_rect.y;
+ /* ROTATOR is enabled in overlay library, swap dimensions
+ here to take care of that */
+ overlayp->dst_rect.w = e->dst_rect.h;
+ overlayp->dst_rect.h = e->dst_rect.w;
+
+ if (overlayp->dst_rect.w > 480)
+ overlayp->dst_rect.w = 480;
+ if (overlayp->dst_rect.h > 800)
+ overlayp->dst_rect.h = 800;
+
+ overlayp->z_order = 0; // FB_OVERLAY_VID_0;
+ overlayp->alpha = e->alpha;
+ overlayp->transp_mask = 0; /* 0xF81F */
+ overlayp->flags = e->flags;
+ overlayp->is_fg = 1;
+
+ if (please_initialize) {
+ CDBG("src.width %d height %d; src_rect.x %d y %d w %d h %d; dst_rect.x %d y %d w %d h %d\n",
+ overlayp->src.width, overlayp->src.height,
+ overlayp->src_rect.x, overlayp->src_rect.y, overlayp->src_rect.w, overlayp->src_rect.h,
+ overlayp->dst_rect.x, overlayp->dst_rect.y, overlayp->dst_rect.w, overlayp->dst_rect.h
+ );
+
+ result = ioctl(fb_fd, MSMFB_OVERLAY_SET, overlayp);
+ if (result < 0) {
+ CDBG("ERROR: MSMFB_OVERLAY_SET failed!, result =%d\n", result);
+ }
+ }
+
+ if (please_initialize) {
+ vid_buf_front_id = overlayp->id; /* keep return id */
+
+ ov_front.id = overlayp->id;
+ ov_back.id = overlayp->id;
+ please_initialize = 0;
+ }
+
+ return;
+}
+
+void overlay_set_frame(struct msm_frame *frame)
+{
+ ov_front.data.offset = 0;
+ ov_front.data.memory_id = frame->fd;
+ return;
+}
+
+/*===========================================================================
+ * FUNCTION test_app_camframe_callback
+ * DESCRIPTION display frame
+ *==========================================================================*/
+void test_app_camframe_callback(struct msm_frame *frame)
+{
+ int result = 0;
+ struct mdp_blit_req *e;
+ struct timeval td1, td2;
+ struct timezone tz;
+
+ common_crop_t *crop = (common_crop_t *) (frame->cropinfo);
+
+ /* Initialize yuv structure */
+ yuv.list.count = 1;
+
+ e = &yuv.list.req[0];
+
+ e->src.width = input_display.user_input_display_width;
+ e->src.height = input_display.user_input_display_height;
+ e->src.format = MDP_Y_CRCB_H2V2;
+ e->src.offset = 0;
+ e->src.memory_id = frame->fd;
+
+ e->dst.width = vinfo.xres;
+ e->dst.height = vinfo.yres;
+ e->dst.format = MDP_RGB_565;
+ e->dst.offset = 0;
+ e->dst.memory_id = fb_fd;
+
+ e->transp_mask = 0xffffffff;
+ e->flags = 0;
+ e->alpha = 0xff;
+
+ /* Starting doing MDP Cropping */
+ if (frame->path == OUTPUT_TYPE_P) {
+
+ if (crop->in2_w != 0 || crop->in2_h != 0) {
+
+ e->src_rect.x = (crop->out2_w - crop->in2_w + 1) / 2 - 1;
+
+ e->src_rect.y = (crop->out2_h - crop->in2_h + 1) / 2 - 1;
+
+ e->src_rect.w = crop->in2_w;
+ e->src_rect.h = crop->in2_h;
+
+ CDBG("e->src_rect.x = %d\n", e->src_rect.x);
+ CDBG("e->src_rect.y = %d\n", e->src_rect.y);
+ CDBG("e->src_rect.w = %d\n", e->src_rect.w);
+ CDBG("e->src_rect.h = %d\n", e->src_rect.h);
+
+ e->dst_rect.x = 0;
+ e->dst_rect.y = 0;
+ e->dst_rect.w = input_display.user_input_display_width;
+ e->dst_rect.h = input_display.user_input_display_height;
+ } else {
+ e->src_rect.x = 0;
+ e->src_rect.y = 0;
+ e->src_rect.w = input_display.user_input_display_width;
+ e->src_rect.h = input_display.user_input_display_height;
+
+ e->dst_rect.x = 0;
+ e->dst_rect.y = 0;
+ e->dst_rect.w = input_display.user_input_display_width;
+ e->dst_rect.h = input_display.user_input_display_height;
+ }
+ if (use_overlay) overlay_set_params(e);
+ } else {
+
+ }
+
+ gettimeofday(&td1, &tz);
+
+ if (use_overlay) overlay_set_frame(frame);
+ else {
+ result = ioctl(fb_fd, MSMFB_BLIT, &yuv.list);
+ if (result < 0) {
+ CDBG("MSM_FBIOBLT failed! line=%d\n", __LINE__);
+ }
+ }
+
+ gettimeofday(&td2, &tz);
+ CDBG("Profiling: MSMFB_BLIT takes %ld microseconds\n",
+ ((td2.tv_sec - td1.tv_sec) * 1000000 + (td2.tv_usec - td1.tv_usec)));
+
+ td1 = td2;
+ notify_camframe_fb_thread();
+ /* add frame back to the free queue*/
+ //camframe_add_frame(CAM_PREVIEW_FRAME, frame);
+}
+
+void notify_camframe_fb_thread()
+{
+ pthread_mutex_lock(&camframe_fb_mutex);
+
+ num_of_ready_frames ++;
+ pthread_cond_signal(&camframe_fb_cond);
+
+ pthread_mutex_unlock(&camframe_fb_mutex);
+}
+
+void camframe_fb_thread_ready_signal(void);
+
+void *camframe_fb_thread(void *data)
+{
+ int result = 0;
+ static struct timeval td1, td2;
+ struct timezone tz;
+
+#ifdef _ANDROID_
+ fb_fd = open(ANDROID_FB0, O_RDWR);
+ CDBG("%s:android dl '%s', fd=%d\n", __func__, ANDROID_FB0, fb_fd);
+#else
+ fb_fd = open(LE_FB0, O_RDWR);
+ CDBG("%s:LE_FB0 dl, '%s', fd=%d\n", __func__, LE_FB0, fb_fd);
+#endif
+ if (fb_fd < 0) {
+ CDBG_ERROR("cannot open framebuffer %s or %s file node\n",
+ ANDROID_FB0, LE_FB0);
+ goto fail1;
+ }
+
+ if (ioctl(fb_fd, FBIOGET_VSCREENINFO, &vinfo) < 0) {
+ CDBG_ERROR("cannot retrieve vscreenInfo!\n");
+ goto fail;
+ }
+
+ if (ioctl(fb_fd, FBIOGET_FSCREENINFO, &finfo) < 0) {
+ CDBG_ERROR("can't retrieve fscreenInfo!\n");
+ goto fail;
+ }
+
+ vinfo.activate = FB_ACTIVATE_VBL;
+
+ camframe_fb_thread_ready_signal();
+
+ pthread_mutex_lock(&camframe_fb_mutex);
+ while (!camframe_fb_exit) {
+ CDBG("cam_frame_fb_thread: num_of_ready_frames: %d\n", num_of_ready_frames);
+ if (num_of_ready_frames <= 0) {
+ pthread_cond_wait(&camframe_fb_cond, &camframe_fb_mutex);
+ }
+ if (num_of_ready_frames > 0) {
+ num_of_ready_frames --;
+
+ gettimeofday(&td1, &tz);
+ if (use_overlay) {
+ result = ioctl(fb_fd, MSMFB_OVERLAY_PLAY, ovp_front);
+ } else {
+ result = ioctl(fb_fd, FBIOPAN_DISPLAY, &vinfo);
+ }
+
+ gettimeofday(&td2, &tz);
+
+ CDBG("Profiling: frame timestamp after FBIO display = %ld ms\n",
+ (td2.tv_sec*1000) + (td2.tv_usec/1000));
+
+ CDBG("cam_frame_fb_thread: elapse time for FBIOPAN_DISPLAY = %ld, return = %d\n",
+ (td2.tv_sec - td1.tv_sec) * 1000000 + td2.tv_usec - td1.tv_usec, result);
+
+ if (result < 0) {
+ CDBG("DISPLAY: Failed\n");
+ }
+ }
+ }
+
+ pthread_mutex_unlock(&camframe_fb_mutex);
+
+ if (use_overlay) {
+ if (ioctl(fb_fd, MSMFB_OVERLAY_UNSET, &vid_buf_front_id)) {
+ CDBG("\nERROR! MSMFB_OVERLAY_UNSET failed! (Line %d)\n", __LINE__);
+ goto fail;
+ }
+ }
+
+ return NULL;
+
+ fail:
+ close(fb_fd);
+ fail1:
+ camframe_fb_exit = -1;
+ camframe_fb_thread_ready_signal();
+ return NULL;
+}
+
+int launch_camframe_fb_thread(void)
+{
+
+ camframe_fb_exit = 0;
+ is_camframe_fb_thread_ready = 0;
+ pthread_create(&cam_frame_fb_thread_id, NULL, camframe_fb_thread, NULL);
+
+ /* Waiting for launching sub thread ready signal. */
+ CDBG("launch_camframe_fb_thread(), call pthread_cond_wait\n");
+
+ pthread_mutex_lock(&sub_thread_ready_mutex);
+ if (!is_camframe_fb_thread_ready) {
+ pthread_cond_wait(&sub_thread_ready_cond, &sub_thread_ready_mutex);
+ }
+ pthread_mutex_unlock(&sub_thread_ready_mutex);
+
+ CDBG("launch_camframe_fb_thread(), call pthread_cond_wait done\n");
+ CDBG("%s:fb rc=%d\n", __func__, camframe_fb_exit);
+ return camframe_fb_exit;
+}
+
+void release_camframe_fb_thread(void)
+{
+ camframe_fb_exit = 1;
+ please_initialize = 1;
+
+ /* Notify the camframe fb thread to wake up */
+ if (cam_frame_fb_thread_id != 0) {
+ pthread_mutex_lock(&camframe_fb_mutex);
+ pthread_cond_signal(&camframe_fb_cond);
+ pthread_mutex_unlock(&camframe_fb_mutex);
+ if (pthread_join(cam_frame_fb_thread_id, NULL) != 0) {
+ CDBG("cam_frame_fb_thread exit failure!\n");
+ }
+ close(fb_fd);
+ }
+}
+
+void camframe_fb_thread_ready_signal(void)
+{
+ /* Send the signal to control thread to indicate that the cam frame fb
+ * ready.
+ */
+ CDBG("cam_frame_fb_thread() is ready, call pthread_cond_signal\n");
+
+ pthread_mutex_lock(&sub_thread_ready_mutex);
+ is_camframe_fb_thread_ready = 1;
+ pthread_cond_signal(&sub_thread_ready_cond);
+ pthread_mutex_unlock(&sub_thread_ready_mutex);
+
+ CDBG("cam_frame_fb_thread() is ready, call pthread_cond_signal done\n");
+}
+
+#ifdef CAM_FRM_DRAW_RECT
+void draw_rect(char *buf, int buf_w,
+ int x, int y, int dx, int dy)
+{
+ int i;
+ int left = x;
+ int right = x+dx;
+ int top = y;
+ int bottom = y+dy;
+
+ for (i = left; i < right; i++) {
+ buf[top*buf_w+i] = 0xff;
+ buf[bottom*buf_w+i] = 0xff;
+ }
+ for (i = top; i < bottom; i++) {
+ buf[i*buf_w+left] = 0xff;
+ buf[i*buf_w+right] = 0xff;
+ }
+}
+#endif
+
+void draw_rectangles(struct msm_frame* newFrame)
+{
+ struct fd_roi_t *p_fd_roi;
+#ifdef DRAW_RECTANGLES
+ uint8_t i;
+ for (i = 0; i < camframe_roi.num_roi; i++) {
+ CDBG("%s: camframe_roi: i=%d, x=%d, y=%d, dx=%d, dy=%d\n", __func__,
+ i, camframe_roi.roi[i].x, camframe_roi.roi[i].y,
+ camframe_roi.roi[i].dx, camframe_roi.roi[i].dy);
+ draw_rect((char*)newFrame->buffer, 640,
+ camframe_roi.roi[i].x, camframe_roi.roi[i].y,
+ camframe_roi.roi[i].dx, camframe_roi.roi[i].dy);
+ }
+#endif
+
+#ifdef CAM_FRM_DRAW_FD_RECT
+ p_fd_roi = (struct fd_roi_t *)newFrame->roi_info.info;
+ if(p_fd_roi && p_fd_roi->rect_num > 0){
+ int i;
+ for(i =0; i < p_fd_roi->rect_num; i++)
+ {
+ draw_rect((char*)newFrame->buffer, 800,
+ p_fd_roi->faces[i].x, p_fd_roi->faces[i].y,
+ p_fd_roi->faces[i].dx, p_fd_roi->faces[i].dy);
+ }
+ }
+#endif
+}
+
+/*===========================================================================
+ * FUNCTION - v4l2_render -
+ *
+ * DESCRIPTION:
+ *==========================================================================*/
+int v4l2_render(int frame_fd, struct v4l2_buffer *vb, struct v4l2_crop *crop)
+{
+ struct mdp_blit_req *e;
+ /* Initialize yuv structure */
+ yuv.list.count = 1;
+ e = &yuv.list.req[0];
+
+ e->src.width = input_display.user_input_display_width;
+ e->src.height = input_display.user_input_display_height;
+ e->src.format = MDP_Y_CBCR_H2V2;
+ e->src.offset = 0;
+ e->src.memory_id = frame_fd;
+
+ e->dst.width = vinfo.xres;
+ e->dst.height = vinfo.yres;
+ e->dst.format = MDP_RGB_565;
+ e->dst.offset = 0;
+ e->dst.memory_id = fb_fd;
+
+ e->transp_mask = 0xffffffff;
+ e->flags = 0;
+ e->alpha = 0xff;
+
+ if (crop != NULL && (crop->c.width != 0 || crop->c.height != 0)) {
+ e->src_rect.x = crop->c.left;
+ e->src_rect.y = crop->c.top;
+ e->src_rect.w = crop->c.width;
+ e->src_rect.h = crop->c.height;
+
+ e->dst_rect.x = 0;
+ e->dst_rect.y = 0;
+ e->dst_rect.w = input_display.user_input_display_width;
+ e->dst_rect.h = input_display.user_input_display_height;
+ } else {
+ e->dst_rect.x = 0;
+ e->dst_rect.y = 0;
+ e->dst_rect.w = input_display.user_input_display_width;
+ e->dst_rect.h = input_display.user_input_display_height;
+
+ e->src_rect.x = 0;
+ e->src_rect.y = 0;
+ e->src_rect.w = input_display.user_input_display_width;
+ e->src_rect.h = input_display.user_input_display_height;
+ }
+ overlay_set_params(e);
+ ov_front.data.offset = 0;
+ ov_front.data.memory_id = frame_fd;
+ notify_camframe_fb_thread();
+
+ return true;
+}
+
+int mm_app_dl_render(int frame_fd, struct crop_info * cropinfo)
+{
+ struct mdp_blit_req *e;
+ int croplen = 0;
+ //struct crop_info *cropinfo;
+ common_crop_t *crop;
+
+ //cropinfo = (struct crop_info *)vb->input;
+ if(cropinfo != NULL) {
+ crop = (common_crop_t *)cropinfo->info;
+ }
+ /* Initialize yuv structure */
+ yuv.list.count = 1;
+ e = &yuv.list.req[0];
+
+ e->src.width = input_display.user_input_display_width;
+ e->src.height = input_display.user_input_display_height;
+ e->src.format = MDP_Y_CRCB_H2V2;
+ e->src.offset = 0;
+ e->src.memory_id = frame_fd;
+
+ e->dst.width = vinfo.xres;
+ e->dst.height = vinfo.yres;
+ e->dst.format = MDP_RGB_565;
+ e->dst.offset = 0;
+ e->dst.memory_id = fb_fd;
+
+ e->transp_mask = 0xffffffff;
+ e->flags = 0;
+ e->alpha = 0xff;
+
+ if (cropinfo != NULL && (crop->in2_w != 0 || crop->in2_h != 0)) {
+ e->src_rect.x = (crop->out2_w - crop->in2_w + 1) / 2 - 1;
+ e->src_rect.y = (crop->out2_h - crop->in2_h + 1) / 2 - 1;
+ e->src_rect.w = crop->in2_w;
+ e->src_rect.h = crop->in2_h;
+
+ e->dst_rect.x = 0;
+ e->dst_rect.y = 0;
+ e->dst_rect.w = input_display.user_input_display_width;
+ e->dst_rect.h = input_display.user_input_display_height;
+ } else {
+ e->dst_rect.x = 0;
+ e->dst_rect.y = 0;
+ e->dst_rect.w = input_display.user_input_display_width;
+ e->dst_rect.h = input_display.user_input_display_height;
+
+ e->src_rect.x = 0;
+ e->src_rect.y = 0;
+ e->src_rect.w = input_display.user_input_display_width;
+ e->src_rect.h = input_display.user_input_display_height;
+ }
+
+ overlay_set_params(e);
+
+ ov_front.data.offset = 0;
+ ov_front.data.memory_id = frame_fd;
+ notify_camframe_fb_thread();
+
+ return true;
+}
+
+
diff --git a/camera/QCamera/stack/mm-camera-test/src/mm_qcamera_dual_test.c b/camera/QCamera/stack/mm-camera-test/src/mm_qcamera_dual_test.c
new file mode 100644
index 0000000..88a1319
--- /dev/null
+++ b/camera/QCamera/stack/mm-camera-test/src/mm_qcamera_dual_test.c
@@ -0,0 +1,565 @@
+/*
+Copyright (c) 2012, The Linux Foundation. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following
+ disclaimer in the documentation and/or other materials provided
+ with the distribution.
+ * Neither the name of The Linux Foundation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+
+#include <pthread.h>
+#include "mm_camera_dbg.h"
+#include <errno.h>
+#include <sys/ioctl.h>
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <fcntl.h>
+#include <poll.h>
+#include "mm_qcamera_unit_test.h"
+
+#define MM_QCAMERA_APP_UTEST_MAX_MAIN_LOOP 4
+#define MM_QCAM_APP_TEST_NUM 128
+
+#define MM_QCAMERA_APP_INTERATION 5
+#define MM_QCAMERA_APP_WAIT_TIME 1000000000
+
+static mm_app_tc_t mm_app_tc[MM_QCAM_APP_TEST_NUM];
+static int num_test_cases = 0;
+
+int mm_app_dtc_0(mm_camera_app_t *cam_apps)
+{
+ int rc = MM_CAMERA_OK;
+ int i,j;
+ int result = 0;
+ int front_camera = 1;
+ int back_camera = 0;
+
+ printf("\n Verifying Preview on back Camera and RDI on Front camera...\n");
+
+ if(mm_app_open(back_camera) != MM_CAMERA_OK) {
+ CDBG_ERROR("%s:mm_app_open() back camera err=%d\n",__func__, rc);
+ rc = -1;
+ goto end;
+ }
+ if(system_dimension_set(back_camera) != MM_CAMERA_OK){
+ CDBG_ERROR("%s:system_dimension_set() err=%d\n",__func__, rc);
+ rc = -1;
+ goto end;
+ }
+
+ if( MM_CAMERA_OK != (rc = startPreview(back_camera))) {
+ CDBG_ERROR("%s: back camera startPreview() err=%d\n", __func__, rc);
+ goto end;
+ }
+
+ mm_camera_app_wait();
+ if(mm_app_open(front_camera) != MM_CAMERA_OK) {
+ CDBG_ERROR("%s:mm_app_open() front camera err=%d\n",__func__, rc);
+ rc = -1;
+ goto end;
+ }
+ if(system_dimension_set(front_camera) != MM_CAMERA_OK){
+ CDBG_ERROR("%s:system_dimension_set() err=%d\n",__func__, rc);
+ rc = -1;
+ goto end;
+ }
+
+ if( MM_CAMERA_OK != (rc = startRdi(front_camera))) {
+ CDBG_ERROR("%s: startPreview() backcamera err=%d\n", __func__, rc);
+ goto end;
+ }
+ mm_camera_app_wait();
+
+ if( MM_CAMERA_OK != (rc = stopRdi(front_camera))) {
+ CDBG_ERROR("%s: startPreview() backcamera err=%d\n", __func__, rc);
+ goto end;
+ }
+
+ if( MM_CAMERA_OK != (rc = stopPreview(my_cam_app.cam_open))) {
+ CDBG("%s: startPreview() err=%d\n", __func__, rc);
+ goto end;
+ }
+
+ if( mm_app_close(my_cam_app.cam_open) != MM_CAMERA_OK) {
+ CDBG_ERROR("%s:mm_app_close() err=%d\n",__func__, rc);
+ rc = -1;
+ goto end;
+ }
+end:
+ if(rc == 0) {
+ printf("\nPassed\n");
+ }else{
+ printf("\nFailed\n");
+ }
+ CDBG("%s:END, rc = %d\n", __func__, rc);
+ return rc;
+}
+
+int mm_app_dtc_1(mm_camera_app_t *cam_apps)
+{
+ int rc = MM_CAMERA_OK;
+ int i,j;
+ int result = 0;
+
+ printf("\n Verifying Snapshot on front and back camera...\n");
+ for(i = 0; i < cam_apps->num_cameras; i++) {
+ if( mm_app_open(i) != MM_CAMERA_OK) {
+ CDBG_ERROR("%s:mm_app_open() err=%d\n",__func__, rc);
+ rc = -1;
+ goto end;
+ }
+ if(system_dimension_set(my_cam_app.cam_open) != MM_CAMERA_OK){
+ CDBG_ERROR("%s:system_dimension_set() err=%d\n",__func__, rc);
+ rc = -1;
+ goto end;
+ }
+
+ if( MM_CAMERA_OK != (rc = startPreview(my_cam_app.cam_open))) {
+ CDBG_ERROR("%s: startPreview() err=%d\n", __func__, rc);
+ break;
+ }
+ for(j = 0; j < MM_QCAMERA_APP_INTERATION; j++) {
+ if( MM_CAMERA_OK != (rc = takePicture_yuv(my_cam_app.cam_open))) {
+ CDBG_ERROR("%s: TakePicture() err=%d\n", __func__, rc);
+ break;
+ }
+ /*if(mm_camera_app_timedwait() == ETIMEDOUT) {
+ CDBG_ERROR("%s: Snapshot/Preview Callback not received in time or qbuf Faile\n", __func__);
+ break;
+ }*/
+ mm_camera_app_wait();
+ result++;
+ }
+ if( MM_CAMERA_OK != (rc = stopPreview(my_cam_app.cam_open))) {
+ CDBG("%s: startPreview() err=%d\n", __func__, rc);
+ break;
+ }
+ if( mm_app_close(my_cam_app.cam_open) != MM_CAMERA_OK) {
+ CDBG_ERROR("%s:mm_app_close() err=%d\n",__func__, rc);
+ rc = -1;
+ goto end;
+ }
+ if(result != MM_QCAMERA_APP_INTERATION) {
+ printf("%s: Snapshot Start/Stop Fails for Camera %d in %d iteration", __func__, i,j);
+ rc = -1;
+ break;
+ }
+
+ result = 0;
+ }
+end:
+ if(rc == 0) {
+ printf("\t***Passed***\n");
+ }else{
+ printf("\t***Failed***\n");
+ }
+ CDBG("%s:END, rc = %d\n", __func__, rc);
+ return rc;
+}
+
+int mm_app_dtc_2(mm_camera_app_t *cam_apps)
+{
+ int rc = MM_CAMERA_OK;
+ int i,j;
+ int result = 0;
+
+ printf("\n Verifying Video on front and back camera...\n");
+ for(i = 0; i < cam_apps->num_cameras; i++) {
+ if( mm_app_open(i) != MM_CAMERA_OK) {
+ CDBG_ERROR("%s:mm_app_open() err=%d\n",__func__, rc);
+ rc = -1;
+ goto end;
+ }
+ if(system_dimension_set(my_cam_app.cam_open) != MM_CAMERA_OK){
+ CDBG_ERROR("%s:system_dimension_set() err=%d\n",__func__, rc);
+ rc = -1;
+ goto end;
+ }
+
+ if( MM_CAMERA_OK != (rc = startPreview(my_cam_app.cam_open))) {
+ CDBG_ERROR("%s: startPreview() err=%d\n", __func__, rc);
+ break;
+ }
+ for(j = 0; j < MM_QCAMERA_APP_INTERATION; j++) {
+ if( MM_CAMERA_OK != (rc = startRecording(my_cam_app.cam_open))) {
+ CDBG_ERROR("%s: StartVideorecording() err=%d\n", __func__, rc);
+ break;
+ }
+
+ /*if(mm_camera_app_timedwait() == ETIMEDOUT) {
+ CDBG_ERROR("%s: Video Callback not received in time\n", __func__);
+ break;
+ }*/
+ mm_camera_app_wait();
+ if( MM_CAMERA_OK != (rc = stopRecording(my_cam_app.cam_open))) {
+ CDBG_ERROR("%s: Stopvideorecording() err=%d\n", __func__, rc);
+ break;
+ }
+ result++;
+ }
+ if( MM_CAMERA_OK != (rc = stopPreview(my_cam_app.cam_open))) {
+ CDBG("%s: startPreview() err=%d\n", __func__, rc);
+ break;
+ }
+ if( mm_app_close(my_cam_app.cam_open) != MM_CAMERA_OK) {
+ CDBG_ERROR("%s:mm_app_close() err=%d\n",__func__, rc);
+ rc = -1;
+ goto end;
+ }
+ if(result != MM_QCAMERA_APP_INTERATION) {
+ printf("%s: Video Start/Stop Fails for Camera %d in %d iteration", __func__, i,j);
+ rc = -1;
+ break;
+ }
+
+ result = 0;
+ }
+end:
+ if(rc == 0) {
+ printf("\nPassed\n");
+ }else{
+ printf("\nFailed\n");
+ }
+ CDBG("%s:END, rc = %d\n", __func__, rc);
+ return rc;
+}
+
+int mm_app_dtc_3(mm_camera_app_t *cam_apps)
+{
+ int rc = MM_CAMERA_OK;
+ int i,j;
+ int result = 0;
+
+ printf("\n Verifying RDI Stream on front and back camera...\n");
+ if(cam_apps->num_cameras == 0) {
+ CDBG_ERROR("%s:Query Failed: Num of cameras = %d\n",__func__, cam_apps->num_cameras);
+ rc = -1;
+ goto end;
+ }
+ for(i = 0; i < cam_apps->num_cameras; i++) {
+ if( mm_app_open(i) != MM_CAMERA_OK) {
+ CDBG_ERROR("%s:mm_app_open() err=%d\n",__func__, rc);
+ rc = -1;
+ goto end;
+ }
+ if(system_dimension_set(my_cam_app.cam_open) != MM_CAMERA_OK){
+ CDBG_ERROR("%s:system_dimension_set() err=%d\n",__func__, rc);
+ rc = -1;
+ goto end;
+ }
+ for(j = 0; j < MM_QCAMERA_APP_INTERATION; j++) {
+ if( MM_CAMERA_OK != (rc = startRdi(my_cam_app.cam_open))) {
+ CDBG_ERROR("%s: StartVideorecording() err=%d\n", __func__, rc);
+ break;
+ }
+
+ /*if(mm_camera_app_timedwait() == ETIMEDOUT) {
+ CDBG_ERROR("%s: Video Callback not received in time\n", __func__);
+ break;
+ }*/
+ mm_camera_app_wait();
+ if( MM_CAMERA_OK != (rc = stopRdi(my_cam_app.cam_open))) {
+ CDBG_ERROR("%s: Stopvideorecording() err=%d\n", __func__, rc);
+ break;
+ }
+ result++;
+ }
+ if( mm_app_close(my_cam_app.cam_open) != MM_CAMERA_OK) {
+ CDBG_ERROR("%s:mm_app_close() err=%d\n",__func__, rc);
+ rc = -1;
+ goto end;
+ }
+ if(result != MM_QCAMERA_APP_INTERATION) {
+ printf("%s: Video Start/Stop Fails for Camera %d in %d iteration", __func__, i,j);
+ rc = -1;
+ break;
+ }
+
+ result = 0;
+ }
+end:
+ if(rc == 0) {
+ printf("\nPassed\n");
+ }else{
+ printf("\nFailed\n");
+ }
+ CDBG("%s:END, rc = %d\n", __func__, rc);
+ return rc;
+}
+
+int mm_app_dtc_4(mm_camera_app_t *cam_apps)
+{
+ int rc = MM_CAMERA_OK;
+ int i;
+ printf("Running %s - open/close ,video0, open/close preview channel only\n", __func__);
+#if 0
+ for(i = 0; i < MM_QCAMERA_APP_UTEST_MAX_MAIN_LOOP; i++) {
+ if ( 0 != (rc = mm_app_open(cam_id, MM_CAMERA_OP_MODE_NOTUSED))) {
+ CDBG("%s: open cam %d at opmode = %d err, loop=%d, rc=%d\n", __func__, cam_id, MM_CAMERA_OP_MODE_NOTUSED, i, rc);
+ goto end;
+ }
+ if(0 != (rc = mm_app_open_preview(cam_id))) {
+ goto end;
+ }
+ if(0 != (rc = mm_app_close_preview(cam_id))) {
+ goto end;
+ }
+ if ( 0 != (rc = mm_app_close(cam_id))) {
+ CDBG("%s: close cam %d at opmode = %d err,loop=%d, rc=%d\n", __func__, cam_id, MM_CAMERA_OP_MODE_NOTUSED, i, rc);
+ goto end;
+ }
+ }
+end:
+#endif
+ CDBG("%s:END, rc=%d\n", __func__, rc);
+ return rc;
+}
+
+
+
+int mm_app_dtc_5(mm_camera_app_t *cam_apps)
+{
+ int rc = MM_CAMERA_OK;
+ int i;
+ printf("Running %s - open/close ,video0, open/close snapshot channel only\n", __func__);
+#if 0
+ for(i = 0; i < MM_QCAMERA_APP_UTEST_MAX_MAIN_LOOP; i++) {
+ if ( 0 != (rc = mm_app_open(cam_id, MM_CAMERA_OP_MODE_NOTUSED))) {
+ CDBG("%s: open cam %d at opmode = %d err, loop=%d, rc=%d\n",
+ __func__, cam_id, MM_CAMERA_OP_MODE_NOTUSED, i, rc);
+ goto end;
+ }
+ if(0 != (rc = mm_app_open_snapshot(cam_id))) {
+ goto end;
+ }
+ if(0 != (rc = mm_app_close_snapshot(cam_id))) {
+ goto end;
+ }
+ if ( 0 != (rc = mm_app_close(cam_id))) {
+ CDBG("%s: close cam %d at opmode = %d err,loop=%d, rc=%d\n",
+ __func__, cam_id, MM_CAMERA_OP_MODE_NOTUSED, i, rc);
+ goto end;
+ }
+ }
+end:
+#endif
+ CDBG("%s:END, rc=%d\n", __func__, rc);
+ return rc;
+}
+
+int mm_app_dtc_6(mm_camera_app_t *cam_apps)
+{
+ int rc = MM_CAMERA_OK;
+ int i;
+ printf("Running %s - simple preview \n", __func__);
+#if 0
+ if ( 0 != (rc = mm_app_open(cam_id, MM_CAMERA_OP_MODE_NOTUSED))) {
+ CDBG("%s: open cam %d at opmode = %d err, loop=%d, rc=%d\n",
+ __func__, cam_id, MM_CAMERA_OP_MODE_NOTUSED, i, rc);
+ goto end;
+ }
+
+ for(i = 0; i < MM_QCAMERA_APP_UTEST_MAX_MAIN_LOOP; i++) {
+ if(0 != (rc = mm_app_init_preview(cam_id))) {
+ goto end;
+ }
+ if(0 != (rc = mm_app_start_preview(cam_id))) {
+ goto end;
+ }
+ /* sleep 8 seconds */
+ usleep(8000000);
+ if(0 != (rc = mm_app_stop_preview(cam_id))) {
+ goto end;
+ }
+ if(0 != (rc=mm_app_deinit_preview(cam_id))) {
+ goto end;
+ }
+ if(0 != (rc = mm_app_close_preview(cam_id))) {
+ goto end;
+ }
+ }
+ if ( 0 != (rc = mm_app_close(cam_id))) {
+ CDBG("%s: close cam %d at opmode = %d err,loop=%d, rc=%d\n",
+ __func__, cam_id, MM_CAMERA_OP_MODE_NOTUSED, i, rc);
+ goto end;
+ }
+end:
+#endif
+ CDBG("%s:END, rc=%d\n", __func__, rc);
+ return rc;
+}
+
+int mm_app_dtc_7(mm_camera_app_t *cam_apps)
+{
+ int rc = MM_CAMERA_OK;
+ int i;
+ printf("Running %s - simple preview and recording \n", __func__);
+#if 0
+ if ( 0 != (rc = mm_app_open(cam_id, MM_CAMERA_OP_MODE_NOTUSED))) {
+ CDBG("%s: open cam %d at opmode = %d err, loop=%d, rc=%d\n",
+ __func__, cam_id, MM_CAMERA_OP_MODE_NOTUSED, i, rc);
+ goto end;
+ }
+
+ for(i = 0; i < MM_QCAMERA_APP_UTEST_MAX_MAIN_LOOP; i++) {
+ if(0 != (rc = mm_app_init_preview(cam_id))) {
+ goto end;
+ }
+ if(0 != (rc = mm_app_start_preview(cam_id))) {
+ goto end;
+ }
+ /* sleep 8 seconds */
+ usleep(8000000);
+ if(0 != (rc = mm_app_start_recording(cam_id))) {
+ goto end;
+ }
+ usleep(1000000);
+ if(0 != (rc = mm_app_stop_recording(cam_id))) {
+ goto end;
+ }
+ usleep(8000000);
+ if(0 != (rc = mm_app_stop_preview(cam_id))) {
+ goto end;
+ }
+ if(0 != (rc=mm_app_deinit_preview(cam_id))) {
+ goto end;
+ }
+ if(0 != (rc = mm_app_close_preview(cam_id))) {
+ goto end;
+ }
+ }
+ if ( 0 != (rc = mm_app_close(cam_id))) {
+ CDBG("%s: close cam %d at opmode = %d err,loop=%d, rc=%d\n",
+ __func__, cam_id, MM_CAMERA_OP_MODE_NOTUSED, i, rc);
+ goto end;
+ }
+end:
+#endif
+ CDBG("%s:END, rc=%d\n", __func__, rc);
+ return rc;
+}
+
+int mm_app_dtc_8(mm_camera_app_t *cam_apps)
+{
+ int rc = MM_CAMERA_OK;
+ int i;
+ printf("Running %s - preview, recording, and snapshot, then preview again \n", __func__);
+#if 0
+ if ( 0 != (rc = mm_app_open(cam_id, MM_CAMERA_OP_MODE_NOTUSED))) {
+ CDBG("%s: open cam %d at opmode = %d err, loop=%d, rc=%d\n",
+ __func__, cam_id, MM_CAMERA_OP_MODE_NOTUSED, i, rc);
+ goto end;
+ }
+
+ for(i = 0; i < MM_QCAMERA_APP_UTEST_MAX_MAIN_LOOP; i++) {
+ if(0 != (rc = mm_app_init_preview(cam_id))) {
+ goto end;
+ }
+ if(0 != (rc = mm_app_start_preview(cam_id))) {
+ goto end;
+ }
+ /* sleep 8 seconds */
+ usleep(8000000);
+ if(0 != (rc = mm_app_start_recording(cam_id))) {
+ goto end;
+ }
+ usleep(1000000);
+ if(0 != (rc = mm_app_stop_recording(cam_id))) {
+ goto end;
+ }
+ if(0 != (rc = mm_app_stop_preview(cam_id))) {
+ goto end;
+ }
+ if(0!=(rc=mm_app_init_snapshot(cam_id))) {
+ goto end;
+ }
+ if(0 != (rc=mm_app_take_picture(cam_id))) {
+ goto end;
+ }
+ if( 0 != (rc = mm_app_deinit_snahspot(cam_id))) {
+ goto end;
+ }
+ if(0 != (rc = mm_app_start_preview(cam_id))) {
+ goto end;
+ }
+ usleep(8000000);
+ if(0 != (rc=mm_app_deinit_preview(cam_id))) {
+ goto end;
+ }
+ if(0 != (rc = mm_app_close_preview(cam_id))) {
+ goto end;
+ }
+ }
+ if ( 0 != (rc = mm_app_close(cam_id))) {
+ CDBG("%s: close cam %d at opmode = %d err,loop=%d, rc=%d\n",
+ __func__, cam_id, MM_CAMERA_OP_MODE_NOTUSED, i, rc);
+ goto end;
+ }
+end:
+#endif
+ CDBG("%s:END, rc=%d\n", __func__, rc);
+ return rc;
+}
+
+
+int mm_app_gen_dual_test_cases()
+{
+ int tc = 0;
+ memset(mm_app_tc, 0, sizeof(mm_app_tc));
+ if(tc < MM_QCAM_APP_TEST_NUM) mm_app_tc[tc++].f = mm_app_dtc_0;
+ /*if(tc < MM_QCAM_APP_TEST_NUM) mm_app_tc[tc++].f = mm_app_dtc_1;
+ if(tc < MM_QCAM_APP_TEST_NUM) mm_app_tc[tc++].f = mm_app_dtc_2;
+ if(tc < MM_QCAM_APP_TEST_NUM) mm_app_tc[tc++].f = mm_app_dtc_3;
+ if(tc < MM_QCAM_APP_TEST_NUM) mm_app_tc[tc++].f = mm_app_dtc_4;
+ if(tc < MM_QCAM_APP_TEST_NUM) mm_app_tc[tc++].f = mm_app_dtc_5;
+ if(tc < MM_QCAM_APP_TEST_NUM) mm_app_tc[tc++].f = mm_app_dtc_6;
+ if(tc < MM_QCAM_APP_TEST_NUM) mm_app_tc[tc++].f = mm_app_dtc_7;
+ if(tc < MM_QCAM_APP_TEST_NUM) mm_app_tc[tc++].f = mm_app_dtc_8;*/
+ return tc;
+}
+
+int mm_app_dual_test_entry(mm_camera_app_t *cam_app)
+{
+ int rc = MM_CAMERA_OK;
+ int i, tc = 0;
+ int cam_id = 0;
+
+ tc = mm_app_gen_dual_test_cases();
+ CDBG("Running %d test cases\n",tc);
+ for(i = 0; i < tc; i++) {
+ mm_app_tc[i].r = mm_app_tc[i].f(cam_app);
+ if(mm_app_tc[i].r != MM_CAMERA_OK) {
+ printf("%s: test case %d error = %d, abort unit testing engine!!!!\n",
+ __func__, i, mm_app_tc[i].r);
+ rc = mm_app_tc[i].r;
+ goto end;
+ }
+ }
+end:
+ printf("nTOTAL_TSET_CASE = %d, NUM_TEST_RAN = %d, rc=%d\n", tc, i, rc);
+ return rc;
+}
+
+
+
+
diff --git a/camera/QCamera/stack/mm-camera-test/src/mm_qcamera_main_menu.c b/camera/QCamera/stack/mm-camera-test/src/mm_qcamera_main_menu.c
new file mode 100644
index 0000000..da70d34
--- /dev/null
+++ b/camera/QCamera/stack/mm-camera-test/src/mm_qcamera_main_menu.c
@@ -0,0 +1,1929 @@
+/*
+Copyright (c) 2011-2012, The Linux Foundation. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following
+ disclaimer in the documentation and/or other materials provided
+ with the distribution.
+ * Neither the name of The Linux Foundation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+
+#include <pthread.h>
+#include <fcntl.h>
+#include <stdio.h>
+#include <stdbool.h>
+#include <string.h>
+#include <unistd.h>
+#include <termios.h>
+#include <assert.h>
+#include <stdlib.h>
+#include <ctype.h>
+#include <signal.h>
+#include <errno.h>
+#include <sys/mman.h>
+#include <sys/time.h>
+#include <sys/ioctl.h>
+#include <linux/fb.h>
+#ifdef _ANDROID_
+#include <cutils/log.h>
+#endif
+#include <dlfcn.h>
+
+#include "mm_camera_dbg.h"
+#include "mm_qcamera_main_menu.h"
+#include "mm_qcamera_display_dimensions.h"
+#include "mm_qcamera_app.h"
+
+#define CAMERA_OPENED 0
+
+#define VIDEO_BUFFER_SIZE (PREVIEW_WIDTH * PREVIEW_HEIGHT * 3/2)
+#define THUMBNAIL_BUFFER_SIZE (THUMBNAIL_WIDTH * THUMBNAIL_HEIGHT * 3/2)
+#define SNAPSHOT_BUFFER_SIZE (PICTURE_WIDTH * PICTURE_HEIGHT * 3/2)
+/*===========================================================================
+ * Macro
+ *===========================================================================*/
+#define PREVIEW_FRAMES_NUM 4
+#define VIDEO_FRAMES_NUM 4
+#define THUMBNAIL_FRAMES_NUM 1
+#define SNAPSHOT_FRAMES_NUM 1
+#define MAX_NUM_FORMAT 32
+/*===========================================================================
+ * Defines
+ *===========================================================================*/
+
+const CAMERA_MAIN_MENU_TBL_T camera_main_menu_tbl[] = {
+ {STOP_CAMERA, "Stop preview/video and exit camera."},
+ {PREVIEW_VIDEO_RESOLUTION, "Preview/Video Resolution: SQCIF/QCIF/"
+ "QVGA/CIF/VGA/WVGA... Default WVGA."},
+ {TAKE_YUV_SNAPSHOT, "Take a snapshot"},
+ {TAKE_RAW_SNAPSHOT, "Take a raw snapshot"},
+ {TAKE_ZSL_SNAPSHOT, "Take a ZSL snapshot"},
+ {START_RECORDING, "Start RECORDING"},
+ {START_RDI, "Start RDI stream"},
+ {STOP_RDI, "Stop RDI stream"},
+ {SWITCH_CAMERA, "Switch Camera"},
+#if 0
+ {SET_WHITE_BALANCE, "Set white balance mode: Auto/Off/Daylight/Incandescent/Fluorescent. Default Auto."},
+ {SET_EXP_METERING, "Set exposure metering mode: FrameAverage/CenterWeighted/SpotMetering. Default CenterWeighted"},
+ {GET_CTRL_VALUE, "Get control value menu"},
+ {TOGGLE_AFR, "Toggle auto frame rate. Default fixed frame rate"},
+ {SET_ISO, "ISO changes."},
+ {BRIGHTNESS_GOTO_SUBMENU, "Brightness changes."},
+ {CONTRAST_GOTO_SUBMENU, "Contrast changes."},
+ {EV_GOTO_SUBMENU, "EV changes."},
+ {SATURATION_GOTO_SUBMENU, "Saturation changes."},
+ {SET_ZOOM, "Set Digital Zoom."},
+ {SET_SHARPNESS, "Set Sharpness."},
+#endif
+};
+
+const PREVIEW_DIMENSION_TBL_T preview_video_dimension_tbl[] = {
+ { SQCIF, SQCIF_WIDTH, SQCIF_HEIGHT, "SQCIF", "Preview/Video Resolution: SQCIF <128x96>"},
+ { QCIF, QCIF_WIDTH, QCIF_HEIGHT, "QCIF", "Preview/Video Resolution: QCIF <176x144>"},
+ { QVGA, QVGA_WIDTH, QVGA_HEIGHT, "QVGA", "Preview/Video Resolution: QVGA <320x240>"},
+ { CIF, CIF_WIDTH, CIF_HEIGHT, "CIF", "Preview/Video Resolution: CIF <352x288>"},
+ { VGA, VGA_WIDTH, VGA_HEIGHT, "VGA", "Preview/Video Resolution: VGA <640x480>"},
+ { WVGA, WVGA_WIDTH, WVGA_HEIGHT, "WVGA", "Preview/Video Resolution: WVGA <800x480>"},
+ { SVGA, SVGA_WIDTH, SVGA_HEIGHT, "SVGA", "Preview/Video Resolution: SVGA <800x600>"},
+ { XGA, XGA_WIDTH, XGA_HEIGHT, "XGA", "Preview/Video Resolution: XGA <1024x768>"},
+ { HD720, HD720_WIDTH, HD720_HEIGHT, "HD720", "Preview/Video Resolution: HD720 <1280x720>"},
+};
+
+const CAMERA_BRIGHTNESS_TBL_T brightness_change_tbl[] = {
+ {INC_BRIGHTNESS, "Increase Brightness by one step."},
+ {DEC_BRIGHTNESS, "Decrease Brightness by one step."},
+};
+
+const CAMERA_CONTRST_TBL_T contrast_change_tbl[] = {
+ {INC_CONTRAST, "Increase Contrast by one step."},
+ {DEC_CONTRAST, "Decrease Contrast by one step."},
+};
+
+const CAMERA_EV_TBL_T camera_EV_tbl[] = {
+ {INCREASE_EV, "Increase EV by one step."},
+ {DECREASE_EV, "Decrease EV by one step."},
+};
+
+const CAMERA_SATURATION_TBL_T camera_saturation_tbl[] = {
+ {INC_SATURATION, "Increase Satuation by one step."},
+ {DEC_SATURATION, "Decrease Satuation by one step."},
+};
+
+const CAMERA_SHARPNESS_TBL_T camera_sharpness_tbl[] = {
+ {INC_SHARPNESS, "Increase Sharpness."},
+ {DEC_SHARPNESS, "Decrease Sharpness."},
+};
+
+const WHITE_BALANCE_TBL_T white_balance_tbl[] = {
+ { MM_CAMERA_WHITE_BALANCE_AUTO, "White Balance - Auto"},
+ { MM_CAMERA_WHITE_BALANCE_OFF, "White Balance - Off"},
+ { MM_CAMERA_WHITE_BALANCE_DAYLIGHT, "White Balance - Daylight"},
+ { MM_CAMERA_WHITE_BALANCE_INCANDESCENT, "White Balance - Incandescent"},
+ { MM_CAMERA_WHITE_BALANCE_FLUORESCENT, "White Balance - Fluorescent"},
+};
+
+const CAMERA_TBL_T cam_tbl[] = {
+ { 1, "Back Camera"},
+ { 2, "Front Camera"},
+};
+
+const RECORD_TBL_T record_tbl[] = {
+ { LIVE_SNAPSHOT_MENU, "Take Live snapshot"},
+ { STOP_RECORDING_MENU, "Stop Recording"},
+};
+
+const GET_CTRL_TBL_T get_ctrl_tbl[] = {
+ { WHITE_BALANCE_STATE, "Get white balance state (auto/off)"},
+ { WHITE_BALANCE_TEMPERATURE, "Get white balance temperature"},
+ { BRIGHTNESS_CTRL, "Get brightness value"},
+ { EV, "Get exposure value"},
+ { CONTRAST_CTRL, "Get contrast value"},
+ { SATURATION_CTRL, "Get saturation value"},
+ { SHARPNESS_CTRL, "Get sharpness value"},
+};
+
+const EXP_METERING_TBL_T exp_metering_tbl[] = {
+ { EXP_METERING_FRAME_AVERAGE, "Exposure Metering - Frame Average"},
+ { EXP_METERING_CENTER_WEIGHTED, "Exposure Metering - Center Weighted"},
+ { EXP_METERING_SPOT_METERING, "Exposure Metering - Spot Metering"},
+};
+
+const ISO_TBL_T iso_tbl[] = {
+ { ISO_AUTO, "ISO: Auto"},
+ { ISO_DEBLUR, "ISO: Deblur"},
+ { ISO_100, "ISO: 100"},
+ { ISO_200, "ISO: 200"},
+ { ISO_400, "ISO: 400"},
+ { ISO_800, "ISO: 800"},
+ { ISO_1600, "ISO: 1600"},
+};
+
+const ZOOM_TBL_T zoom_tbl[] = {
+ { ZOOM_IN, "Zoom In one step"},
+ { ZOOM_OUT, "Zoom Out one step"},
+};
+
+
+struct v4l2_fmtdesc enumfmtdesc[MAX_NUM_FORMAT];
+struct v4l2_format current_fmt;
+
+/*===========================================================================
+ * Forward declarations
+ *===========================================================================*/
+static int set_fps(int fps);
+static int start_snapshot (void);
+static int stop_snapshot (void);
+/*===========================================================================
+ * Static global variables
+ *===========================================================================*/
+USER_INPUT_DISPLAY_T input_display;
+static int camframe_status = 0;
+
+#ifdef _ANDROID_
+char *sdcard_path = "/data";
+#else
+char *sdcard_path = ".";
+#endif
+
+//void *libqcamera = NULL;
+//void (**LINK_jpegfragment_callback)(uint8_t * buff_ptr , uint32_t buff_size);
+//void (**LINK_jpeg_callback)(void);
+
+int num_supported_fmts = 0;
+int memoryType = V4L2_MEMORY_MMAP; /* default */
+int preview_video_resolution_flag = 0;
+int effect = CAMERA_EFFECT_OFF;
+int brightness = CAMERA_DEF_BRIGHTNESS;
+int contrast = CAMERA_DEF_CONTRAST;
+int saturation = CAMERA_DEF_SATURATION;
+int sharpness = CAMERA_DEF_SHARPNESS;
+int32_t ev_num = 0;
+uint8_t ezTune = false;
+int pmemThumbnailfd = 0;
+int pmemSnapshotfd = 0;
+int pmemRawSnapshotfd = 0;
+int fdSnapshot = 0;
+int fdThumbnail = 0;
+char snapshotBuf[256] = { 0};
+char thumbnailBuf[256] = { 0};
+uint32_t snapshot_buff_size = 0;
+uint32_t raw_snapshot_buffer_size = 0;
+static int thumbnailCntr = 0, snapshotCntr = 0;
+unsigned char *thumbnail_buf = NULL, *main_img_buf = NULL, *raw_img_buf = NULL;
+int32_t *sharpness_AF = NULL;
+struct crop_info cropInfo;
+common_crop_t cropInfo_s;
+
+interface_ctrl_t intrfcCtrl;
+config3a_wb_t autoWB = CAMERA_WB_AUTO;
+isp3a_af_mode_t af_mode = AF_MODE_AUTO;
+cam_af_focusrect_t afFocusRect = AUTO;
+
+cam_af_ctrl_t af_ctrl;
+camera_iso_mode_type iso = CAMERA_ISO_AUTO;
+camera_antibanding_type antibanding = CAMERA_ANTIBANDING_OFF;
+camera_auto_exposure_mode_type aec_mode = CAMERA_AEC_CENTER_WEIGHTED;
+led_mode_t led_mode = LED_MODE_OFF;
+motion_iso_t motion_iso = MOTION_ISO_OFF;
+int32_t hue = CAMERA_DEF_HUE;
+fps_mode_t fps_mode = FPS_MODE_AUTO;
+
+struct v4l2_cropcap cropcap;
+struct v4l2_queryctrl zoom_queryctrl;
+struct v4l2_queryctrl sharpness_queryctrl;
+int zoom_level;
+
+Camera_Resolution Resolution;
+//int32_t g_camParmInfo_current_value = 0;
+//extern unsigned long preview_frames_buf;
+//extern void test_app_mmcamera_videoframe_callback(struct msm_frame *frame); // video_cam.c
+
+/* To flush free video buffers queue */
+//void (*LINK_cam_frame_flush_free_video)(void);
+static int submain();
+
+//struct v4l2_frame_buffer frames[PREVIEW_FRAMES_NUM];
+//struct v4l2_frame_buffer video_frames[VIDEO_FRAMES_NUM];
+
+//pthread_t frame_thread;
+
+void test_app_camframe_timeout_callback(void)
+{
+ camframe_status = -1;
+}
+
+/*===========================================================================
+ * FUNCTION - keypress_to_event -
+ *
+ * DESCRIPTION:
+ *==========================================================================*/
+int keypress_to_event(char keypress)
+{
+ char out_buf = INVALID_KEY_PRESS;
+ if ((keypress >= 'A' && keypress <= 'Z') ||
+ (keypress >= 'a' && keypress <= 'z')) {
+ out_buf = tolower(keypress);
+ out_buf = out_buf - 'a' + 1;
+ } else if (keypress >= '1' && keypress <= '9') {
+ out_buf = keypress;
+ out_buf = keypress - '1' + BASE_OFFSET_NUM;
+ }
+ return out_buf;
+}
+
+int next_menu(menu_id_change_t current_menu_id, char keypress, camera_action_t * action_id_ptr, int * action_param)
+{
+ char output_to_event;
+ menu_id_change_t next_menu_id = MENU_ID_INVALID;
+ * action_id_ptr = ACTION_NO_ACTION;
+
+ output_to_event = keypress_to_event(keypress);
+ CDBG("current_menu_id=%d\n",current_menu_id);
+ CDBG("output_to_event=%d\n",output_to_event);
+ switch(current_menu_id) {
+ case MENU_ID_MAIN:
+ switch(output_to_event) {
+ case STOP_CAMERA:
+ * action_id_ptr = ACTION_STOP_CAMERA;
+ CDBG("STOP_CAMERA\n");
+ break;
+
+ case PREVIEW_VIDEO_RESOLUTION:
+ next_menu_id = MENU_ID_PREVIEWVIDEORESOLUTIONCHANGE;
+ CDBG("next_menu_id = MENU_ID_PREVIEWVIDEORESOLUTIONCHANGE = %d\n", next_menu_id);
+ break;
+#if 0
+ case SET_WHITE_BALANCE:
+ next_menu_id = MENU_ID_WHITEBALANCECHANGE;
+ CDBG("next_menu_id = MENU_ID_WHITEBALANCECHANGE = %d\n", next_menu_id);
+ break;
+
+ case SET_EXP_METERING:
+ next_menu_id = MENU_ID_EXPMETERINGCHANGE;
+ CDBG("next_menu_id = MENU_ID_EXPMETERINGCHANGE = %d\n", next_menu_id);
+ break;
+
+ case GET_CTRL_VALUE:
+ next_menu_id = MENU_ID_GET_CTRL_VALUE;
+ CDBG("next_menu_id = MENU_ID_GET_CTRL_VALUE = %d\n", next_menu_id);
+ break;
+
+ case BRIGHTNESS_GOTO_SUBMENU:
+ next_menu_id = MENU_ID_BRIGHTNESSCHANGE;
+ CDBG("next_menu_id = MENU_ID_BRIGHTNESSCHANGE = %d\n", next_menu_id);
+ break;
+
+ case CONTRAST_GOTO_SUBMENU:
+ next_menu_id = MENU_ID_CONTRASTCHANGE;
+ break;
+
+ case EV_GOTO_SUBMENU:
+ next_menu_id = MENU_ID_EVCHANGE;
+ break;
+
+ case SATURATION_GOTO_SUBMENU:
+ next_menu_id = MENU_ID_SATURATIONCHANGE;
+ break;
+
+ case TOGGLE_AFR:
+ * action_id_ptr = ACTION_TOGGLE_AFR;
+ CDBG("next_menu_id = MENU_ID_TOGGLEAFR = %d\n", next_menu_id);
+ break;
+
+ case SET_ISO:
+ next_menu_id = MENU_ID_ISOCHANGE;
+ CDBG("next_menu_id = MENU_ID_ISOCHANGE = %d\n", next_menu_id);
+ break;
+
+ case SET_ZOOM:
+ next_menu_id = MENU_ID_ZOOMCHANGE;
+ CDBG("next_menu_id = MENU_ID_ZOOMCHANGE = %d\n", next_menu_id);
+ break;
+
+ case SET_SHARPNESS:
+ next_menu_id = MENU_ID_SHARPNESSCHANGE;
+ CDBG("next_menu_id = MENU_ID_SHARPNESSCHANGE = %d\n", next_menu_id);
+ break;
+#endif
+ case TAKE_YUV_SNAPSHOT:
+ * action_id_ptr = ACTION_TAKE_YUV_SNAPSHOT;
+ CDBG("Taking YUV snapshot\n");
+ break;
+
+ case TAKE_RAW_SNAPSHOT:
+ * action_id_ptr = ACTION_TAKE_RAW_SNAPSHOT;
+ CDBG("Taking RAW snapshot\n");
+ break;
+ case START_RECORDING:
+ *action_id_ptr = ACTION_START_RECORDING;
+ next_menu_id = MENU_ID_RECORD;
+ CDBG("Start recording\n");
+ break;
+ /*case STOP_RECORDING:
+ * action_id_ptr = ACTION_STOP_RECORDING;
+ CDBG("Stop recording\n");
+ break;*/
+ case SWITCH_CAMERA:
+ next_menu_id = MENU_ID_SWITCHCAMERA;
+ CDBG("SWitch Camera\n");
+ break;
+ case TAKE_ZSL_SNAPSHOT:
+ * action_id_ptr = ACTION_TAKE_ZSL_SNAPSHOT;
+ CDBG("Taking ZSL snapshot\n");
+ break;
+ case START_RDI:
+ * action_id_ptr = ACTION_START_RDI;
+ break;
+ case STOP_RDI:
+ * action_id_ptr = ACTION_STOP_RDI;
+ break;
+ default:
+ next_menu_id = MENU_ID_MAIN;
+ CDBG("next_menu_id = MENU_ID_MAIN = %d\n", next_menu_id);
+ break;
+ }
+ break;
+
+ case MENU_ID_PREVIEWVIDEORESOLUTIONCHANGE:
+ printf("MENU_ID_PREVIEWVIDEORESOLUTIONCHANGE\n");
+ * action_id_ptr = ACTION_PREVIEW_VIDEO_RESOLUTION;
+ if (output_to_event > RESOLUTION_PREVIEW_VIDEO_MAX ||
+ output_to_event < RESOLUTION_MIN) {
+ next_menu_id = current_menu_id;
+ }
+ else {
+ next_menu_id = MENU_ID_MAIN;
+ * action_param = output_to_event;
+ }
+ break;
+
+ case MENU_ID_WHITEBALANCECHANGE:
+ printf("MENU_ID_WHITEBALANCECHANGE\n");
+ * action_id_ptr = ACTION_SET_WHITE_BALANCE;
+ if (output_to_event > 0 &&
+ output_to_event <= sizeof(white_balance_tbl)/sizeof(white_balance_tbl[0])) {
+ next_menu_id = MENU_ID_MAIN;
+ * action_param = output_to_event;
+ }
+ else {
+ next_menu_id = current_menu_id;
+ }
+ break;
+
+ case MENU_ID_EXPMETERINGCHANGE:
+ printf("MENU_ID_EXPMETERINGCHANGE\n");
+ * action_id_ptr = ACTION_SET_EXP_METERING;
+ if (output_to_event > 0 &&
+ output_to_event <= sizeof(exp_metering_tbl)/sizeof(exp_metering_tbl[0])) {
+ next_menu_id = MENU_ID_MAIN;
+ * action_param = output_to_event;
+ }
+ else {
+ next_menu_id = current_menu_id;
+ }
+ break;
+
+ case MENU_ID_GET_CTRL_VALUE:
+ printf("MENU_ID_GET_CTRL_VALUE\n");
+ * action_id_ptr = ACTION_GET_CTRL_VALUE;
+ if (output_to_event > 0 &&
+ output_to_event <= sizeof(get_ctrl_tbl)/sizeof(get_ctrl_tbl[0])) {
+ next_menu_id = MENU_ID_MAIN;
+ * action_param = output_to_event;
+ }
+ else {
+ next_menu_id = current_menu_id;
+ }
+ break;
+
+ case MENU_ID_BRIGHTNESSCHANGE:
+ switch (output_to_event) {
+ case INC_BRIGHTNESS:
+ * action_id_ptr = ACTION_BRIGHTNESS_INCREASE;
+ next_menu_id = MENU_ID_MAIN;
+ break;
+
+ case DEC_BRIGHTNESS:
+ * action_id_ptr = ACTION_BRIGHTNESS_DECREASE;
+ next_menu_id = MENU_ID_MAIN;
+ break;
+
+ default:
+ next_menu_id = MENU_ID_BRIGHTNESSCHANGE;
+ break;
+ }
+ break;
+
+ case MENU_ID_CONTRASTCHANGE:
+ switch (output_to_event) {
+ case INC_CONTRAST:
+ * action_id_ptr = ACTION_CONTRAST_INCREASE;
+ next_menu_id = MENU_ID_MAIN;
+ break;
+
+ case DEC_CONTRAST:
+ * action_id_ptr = ACTION_CONTRAST_DECREASE;
+ next_menu_id = MENU_ID_MAIN;
+ break;
+
+ default:
+ next_menu_id = MENU_ID_CONTRASTCHANGE;
+ break;
+ }
+ break;
+
+ case MENU_ID_EVCHANGE:
+ switch (output_to_event) {
+ case INCREASE_EV:
+ * action_id_ptr = ACTION_EV_INCREASE;
+ next_menu_id = MENU_ID_MAIN;
+ break;
+
+ case DECREASE_EV:
+ * action_id_ptr = ACTION_EV_DECREASE;
+ next_menu_id = MENU_ID_MAIN;
+ break;
+
+ default:
+ next_menu_id = MENU_ID_EVCHANGE;
+ break;
+ }
+ break;
+
+ case MENU_ID_SATURATIONCHANGE:
+ switch (output_to_event) {
+ case INC_SATURATION:
+ * action_id_ptr = ACTION_SATURATION_INCREASE;
+ next_menu_id = MENU_ID_MAIN;
+ break;
+
+ case DEC_SATURATION:
+ * action_id_ptr = ACTION_SATURATION_DECREASE;
+ next_menu_id = MENU_ID_MAIN;
+ break;
+
+ default:
+ next_menu_id = MENU_ID_EVCHANGE;
+ break;
+ }
+ break;
+
+ case MENU_ID_ISOCHANGE:
+ printf("MENU_ID_ISOCHANGE\n");
+ * action_id_ptr = ACTION_SET_ISO;
+ if (output_to_event > 0 &&
+ output_to_event <= sizeof(iso_tbl)/sizeof(iso_tbl[0])) {
+ next_menu_id = MENU_ID_MAIN;
+ * action_param = output_to_event;
+ } else {
+ next_menu_id = current_menu_id;
+ }
+ break;
+
+ case MENU_ID_ZOOMCHANGE:
+ * action_id_ptr = ACTION_SET_ZOOM;
+ if (output_to_event > 0 &&
+ output_to_event <= sizeof(zoom_tbl)/sizeof(zoom_tbl[0])) {
+ next_menu_id = MENU_ID_MAIN;
+ * action_param = output_to_event;
+ } else {
+ next_menu_id = current_menu_id;
+ }
+ break;
+
+ case MENU_ID_SHARPNESSCHANGE:
+ switch (output_to_event) {
+ case INC_SHARPNESS:
+ * action_id_ptr = ACTION_SHARPNESS_INCREASE;
+ next_menu_id = MENU_ID_MAIN;
+ break;
+ case DEC_SHARPNESS:
+ * action_id_ptr = ACTION_SHARPNESS_DECREASE;
+ next_menu_id = MENU_ID_MAIN;
+ break;
+ default:
+ next_menu_id = MENU_ID_SHARPNESSCHANGE;
+ break;
+ }
+ break;
+ case MENU_ID_SWITCHCAMERA:
+ * action_id_ptr = ACTION_SWITCH_CAMERA;
+ if (output_to_event >= 0 &&
+ output_to_event <= sizeof(cam_tbl)/sizeof(cam_tbl[0])) {
+ next_menu_id = MENU_ID_MAIN;
+ * action_param = output_to_event;
+ } else {
+ next_menu_id = current_menu_id;
+ }
+ break;
+ case MENU_ID_RECORD:
+ switch (output_to_event) {
+ case LIVE_SNAPSHOT_MENU:
+ * action_id_ptr = ACTION_TAKE_LIVE_SNAPSHOT;
+ next_menu_id = MENU_ID_RECORD;
+ break;
+
+ default:
+ case STOP_RECORDING_MENU:
+ * action_id_ptr = ACTION_STOP_RECORDING;
+ next_menu_id = MENU_ID_MAIN;
+ break;
+ }
+ default:
+ CDBG("menu id is wrong: %d\n", current_menu_id);
+ break;
+ }
+
+ return next_menu_id;
+}
+
+/*===========================================================================
+ * FUNCTION - print_menu_preview_video -
+ *
+ * DESCRIPTION:
+ * ===========================================================================*/
+static void print_menu_preview_video(void) {
+ unsigned int i;
+
+ printf("\n");
+ printf("===========================================\n");
+ printf(" Camera is in preview/video mode now \n");
+ printf("===========================================\n\n");
+
+ char menuNum = 'A';
+ for (i = 0; i < sizeof(camera_main_menu_tbl)/sizeof(camera_main_menu_tbl[0]); i++) {
+ if (i == BASE_OFFSET) {
+ menuNum = '1';
+ }
+
+ printf("%c. %s\n", menuNum, camera_main_menu_tbl[i].menu_name);
+ menuNum++;
+ }
+
+ printf("\nPlease enter your choice: ");
+
+ return;
+}
+
+static void camera_preview_video_resolution_change_tbl(void) {
+ unsigned int i;
+
+ printf("\n");
+ printf("==========================================================\n");
+ printf(" Camera is in preview/video resolution mode \n");
+ printf("==========================================================\n\n");
+
+ char previewVideomenuNum = 'A';
+ for (i = 0; i < sizeof(preview_video_dimension_tbl) /
+ sizeof(preview_video_dimension_tbl[0]); i++) {
+ printf("%c. %s\n", previewVideomenuNum,
+ preview_video_dimension_tbl[i].str_name);
+ previewVideomenuNum++;
+ }
+
+ printf("\nPlease enter your choice for Preview/Video Resolution: ");
+ return;
+}
+
+static void camera_preview_video_wb_change_tbl(void) {
+ unsigned int i;
+ printf("\n");
+ printf("==========================================================\n");
+ printf(" Camera is in white balance change mode \n");
+ printf("==========================================================\n\n");
+
+ char submenuNum = 'A';
+ for (i = 0 ; i < sizeof(white_balance_tbl) /
+ sizeof(white_balance_tbl[0]); i++) {
+ //printf("%c. %s\n", submenuNum, white_balance_tbl[i].wb_name);
+ submenuNum++;
+ }
+ printf("\nPlease enter your choice for White Balance modes: ");
+ return;
+}
+
+static void camera_preview_video_get_ctrl_value_tbl(void) {
+ unsigned int i;
+ printf("\n");
+ printf("==========================================================\n");
+ printf(" Camera is in get control value mode \n");
+ printf("==========================================================\n\n");
+
+ char submenuNum = 'A';
+ for (i = 0 ; i < sizeof(get_ctrl_tbl) /
+ sizeof(get_ctrl_tbl[0]); i++) {
+ printf("%c. %s\n", submenuNum, get_ctrl_tbl[i].get_ctrl_name);
+ submenuNum++;
+ }
+ printf("\nPlease enter your choice for control value you want to get: ");
+ return;
+}
+
+static void camera_preview_video_exp_metering_change_tbl(void) {
+ unsigned int i;
+ printf("\n");
+ printf("==========================================================\n");
+ printf(" Camera is in exposure metering change mode \n");
+ printf("==========================================================\n\n");
+
+ char submenuNum = 'A';
+ for (i = 0 ; i < sizeof(exp_metering_tbl) /
+ sizeof(exp_metering_tbl[0]); i++) {
+ printf("%c. %s\n", submenuNum, exp_metering_tbl[i].exp_metering_name);
+ submenuNum++;
+ }
+ printf("\nPlease enter your choice for exposure metering modes: ");
+ return;
+}
+
+static void camera_contrast_change_tbl(void) {
+ unsigned int i;
+
+ printf("\n");
+ printf("==========================================================\n");
+ printf(" Camera is in change contrast resolution mode \n");
+ printf("==========================================================\n\n");
+
+ char contrastmenuNum = 'A';
+ for (i = 0; i < sizeof(contrast_change_tbl) /
+ sizeof(contrast_change_tbl[0]); i++) {
+ printf("%c. %s\n", contrastmenuNum,
+ contrast_change_tbl[i].contrast_name);
+ contrastmenuNum++;
+ }
+
+ printf("\nPlease enter your choice for contrast Change: ");
+ return;
+}
+
+static void camera_EV_change_tbl(void) {
+ unsigned int i;
+
+ printf("\n");
+ printf("===========================================\n");
+ printf(" Camera is in EV change mode now \n");
+ printf("===========================================\n\n");
+
+ char submenuNum = 'A';
+ for (i = 0; i < sizeof(camera_EV_tbl)/sizeof(camera_EV_tbl[0]); i++) {
+ printf("%c. %s\n", submenuNum, camera_EV_tbl[i].EV_name);
+ submenuNum++;
+ }
+
+ printf("\nPlease enter your choice for EV changes: ");
+ return;
+}
+
+static void camera_preview_video_zoom_change_tbl(void) {
+ unsigned int i;
+ struct v4l2_control ctrl;
+
+ memset(&ctrl, 0, sizeof(ctrl));
+ ctrl.id = V4L2_CID_ZOOM_ABSOLUTE;
+#if 0 /* TBD */
+ if (ioctl(camfd, VIDIOC_G_CTRL, &ctrl) >= 0) {
+ zoom_level = ctrl.value;
+ printf("\n");
+ printf("==========================================================\n");
+ printf(" Camera is in zoom change mode: %d, [%d..%d] \n",
+ ctrl.value, zoom_queryctrl.minimum, zoom_queryctrl.maximum);
+ printf("==========================================================\n\n");
+
+ char submenuNum = 'A';
+ for (i = 0 ; i < sizeof(zoom_tbl) /
+ sizeof(zoom_tbl[0]); i++) {
+ printf("%c. %s\n", submenuNum, zoom_tbl[i].zoom_direction_name);
+ submenuNum++;
+ }
+ printf("\nPlease enter your choice for zoom change direction: ");
+ } else {
+ printf("\nVIDIOC_G_CTRL error: %d\n", errno);
+ }
+#endif /* TBD */
+ return;
+}
+
+static void camera_brightness_change_tbl(void) {
+ unsigned int i;
+
+ printf("\n");
+ printf("==========================================================\n");
+ printf(" Camera is in change brightness mode \n");
+ printf("==========================================================\n\n");
+
+ char brightnessmenuNum = 'A';
+ for (i = 0; i < sizeof(brightness_change_tbl) /
+ sizeof(brightness_change_tbl[0]); i++) {
+ printf("%c. %s\n", brightnessmenuNum,
+ brightness_change_tbl[i].brightness_name);
+ brightnessmenuNum++;
+ }
+
+ printf("\nPlease enter your choice for Brightness Change: ");
+ return;
+}
+
+static void camera_saturation_change_tbl(void) {
+ unsigned int i;
+
+ printf("\n");
+ printf("==========================================================\n");
+ printf(" Camera is in change saturation mode \n");
+ printf("==========================================================\n\n");
+
+ char saturationmenuNum = 'A';
+ for (i = 0; i < sizeof(camera_saturation_tbl) /
+ sizeof(camera_saturation_tbl[0]); i++) {
+ printf("%c. %s\n", saturationmenuNum,
+ camera_saturation_tbl[i].saturation_name);
+ saturationmenuNum++;
+ }
+
+ printf("\nPlease enter your choice for Saturation Change: ");
+ return;
+}
+
+char * set_preview_video_dimension_tbl(Camera_Resolution cs_id, uint16_t * width, uint16_t * height)
+{
+ unsigned int i;
+ char * ptr = NULL;
+ for (i = 0; i < sizeof(preview_video_dimension_tbl) /
+ sizeof(preview_video_dimension_tbl[0]); i++) {
+ if (cs_id == preview_video_dimension_tbl[i].cs_id) {
+ *width = preview_video_dimension_tbl[i].width;
+ *height = preview_video_dimension_tbl[i].height;
+ ptr = preview_video_dimension_tbl[i].name;
+ break;
+ }
+ }
+ return ptr;
+}
+
+static void camera_preview_video_iso_change_tbl(void) {
+ unsigned int i;
+ printf("\n");
+ printf("==========================================================\n");
+ printf(" Camera is in ISO change mode \n");
+ printf("==========================================================\n\n");
+
+ char submenuNum = 'A';
+ for (i = 0 ; i < sizeof(iso_tbl) /
+ sizeof(iso_tbl[0]); i++) {
+ printf("%c. %s\n", submenuNum, iso_tbl[i].iso_modes_name);
+ submenuNum++;
+ }
+ printf("\nPlease enter your choice for iso modes: ");
+ return;
+}
+
+static void camera_preview_video_sharpness_change_tbl(void) {
+ unsigned int i;
+ printf("\n");
+ printf("==========================================================\n");
+ printf(" Camera is in sharpness change mode \n");
+ printf("==========================================================\n\n");
+
+ char submenuNum = 'A';
+ for (i = 0 ; i < sizeof(camera_sharpness_tbl) /
+ sizeof(camera_sharpness_tbl[0]); i++) {
+ printf("%c. %s\n", submenuNum, camera_sharpness_tbl[i].sharpness_name);
+ submenuNum++;
+ }
+ printf("\nPlease enter your choice for sharpness modes: ");
+ return;
+}
+
+static void camera_record_tbl(void) {
+ unsigned int i;
+ printf("\n");
+ printf("==========================================================\n");
+ printf(" Camera is in record mode \n");
+ printf("==========================================================\n\n");
+
+ char submenuNum = 'A';
+ for (i = 0 ; i < sizeof(record_tbl) /
+ sizeof(record_tbl[0]); i++) {
+ printf("%c. %s\n", submenuNum, record_tbl[i].act_name);
+ submenuNum++;
+ }
+ printf("\nPlease enter your choice: ");
+ return;
+}
+
+static void camera_switch_tbl(void) {
+ unsigned int i;
+ printf("\n");
+ printf("==========================================================\n");
+ printf(" Camera is in switch camera mode \n");
+ printf("==========================================================\n\n");
+
+ char submenuNum = 'A';
+ for (i = 0 ; i < sizeof(cam_tbl) /
+ sizeof(cam_tbl[0]); i++) {
+ printf("%c. %s\n", submenuNum, cam_tbl[i].cam_name);
+ submenuNum++;
+ }
+ printf("\nPlease enter your choice for camera modes: ");
+ return;
+}
+/*===========================================================================
+ * FUNCTION - increase_contrast -
+ *
+ * DESCRIPTION:
+ * ===========================================================================*/
+int increase_contrast (void) {
+ ++contrast;
+ if (contrast > CAMERA_MAX_CONTRAST) {
+ contrast = CAMERA_MAX_CONTRAST;
+ printf("Reached max CONTRAST. \n");
+ } else
+ printf("Increase CONTRAST to %d\n", contrast);
+
+ /*intrfcCtrl.setContrast(camfd, contrast);*/
+
+ struct v4l2_queryctrl queryctrl;
+ struct v4l2_control control;
+
+ memset (&queryctrl, 0, sizeof (queryctrl));
+ queryctrl.id = V4L2_CID_CONTRAST;
+#if 0 /* TBD */
+ if (-1 == ioctl (camfd, VIDIOC_QUERYCTRL, &queryctrl)) {
+ if (errno != EINVAL) {
+ perror ("VIDIOC_QUERYCTRL");
+ exit (EXIT_FAILURE);
+ } else {
+ printf ("V4L2_CID_contrast is not supported\n");
+ }
+ } else if (queryctrl.flags & V4L2_CTRL_FLAG_DISABLED) {
+ printf ("V4L2_CID_contrast is not supported\n");
+ } else {
+ memset (&control, 0, sizeof (control));
+ control.id = V4L2_CID_CONTRAST;
+ /* Decreasing the contrast */
+ control.value = contrast;
+
+ // if (-1 == ioctl (camfd, VIDIOC_S_CTRL, &control)) {
+ // perror ("VIDIOC_S_CTRL");
+ // return -1;
+ // }
+ }
+#endif /* TBD */
+ return 0;
+}
+
+/*===========================================================================
+ * FUNCTION - decrease_contrast -
+ *
+ * DESCRIPTION:
+ * ===========================================================================*/
+int decrease_contrast (void) {
+ --contrast;
+ if (contrast < CAMERA_MIN_CONTRAST) {
+ contrast = CAMERA_MIN_CONTRAST;
+ printf("Reached min CONTRAST. \n");
+ } else
+ printf("Decrease CONTRAST to %d\n", contrast);
+
+ /*intrfcCtrl.setContrast(camfd, contrast);*/
+ struct v4l2_queryctrl queryctrl;
+ struct v4l2_control control;
+
+ memset (&queryctrl, 0, sizeof (queryctrl));
+ queryctrl.id = V4L2_CID_CONTRAST;
+#if 0 /* TBD */
+ if (-1 == ioctl (camfd, VIDIOC_QUERYCTRL, &queryctrl)) {
+ if (errno != EINVAL) {
+ perror ("VIDIOC_QUERYCTRL");
+ exit (EXIT_FAILURE);
+ } else {
+ printf ("V4L2_CID_contrast is not supported\n");
+ }
+ } else if (queryctrl.flags & V4L2_CTRL_FLAG_DISABLED) {
+ printf ("V4L2_CID_contrast is not supported\n");
+ } else {
+ memset (&control, 0, sizeof (control));
+ control.id = V4L2_CID_CONTRAST;
+ /* Decreasing the contrast */
+ control.value = contrast;
+
+ // if (-1 == ioctl (camfd, VIDIOC_S_CTRL, &control)) {
+ // perror ("VIDIOC_S_CTRL");
+ // return -1;
+ // }
+ }
+#endif /* TBD */
+ return 0;
+}
+
+/*===========================================================================
+ * FUNCTION - decrease_brightness -
+ *
+ * DESCRIPTION:
+ * ===========================================================================*/
+int decrease_brightness (void) {
+ brightness -= CAMERA_BRIGHTNESS_STEP;
+ if (brightness < CAMERA_MIN_BRIGHTNESS) {
+ brightness = CAMERA_MIN_BRIGHTNESS;
+ printf("Reached min BRIGHTNESS. \n");
+ } else
+ printf("Decrease BRIGHTNESS to %d\n", brightness);
+
+ struct v4l2_queryctrl queryctrl;
+ struct v4l2_control control;
+
+ memset (&queryctrl, 0, sizeof (queryctrl));
+ queryctrl.id = V4L2_CID_BRIGHTNESS;
+#if 0 /* TBD */
+ if (-1 == ioctl (camfd, VIDIOC_QUERYCTRL, &queryctrl)) {
+ if (errno != EINVAL) {
+ perror ("VIDIOC_QUERYCTRL");
+ exit (EXIT_FAILURE);
+ } else {
+ printf ("V4L2_CID_BRIGHTNESS is not supported\n");
+ }
+ } else if (queryctrl.flags & V4L2_CTRL_FLAG_DISABLED) {
+ printf ("V4L2_CID_BRIGHTNESS is not supported\n");
+ } else {
+ memset (&control, 0, sizeof (control));
+ control.id = V4L2_CID_BRIGHTNESS;
+ /* Decreasing the Brightness */
+ control.value = brightness;
+
+ if (-1 == ioctl (camfd, VIDIOC_S_CTRL, &control)) {
+ perror ("VIDIOC_S_CTRL");
+ return -1;
+ }
+ }
+#endif /* TBD */
+ return 0;
+}
+
+/*===========================================================================
+ * FUNCTION - increase_brightness -
+ *
+ * DESCRIPTION:
+ * ===========================================================================*/
+int increase_brightness (void) {
+ brightness += CAMERA_BRIGHTNESS_STEP;
+ if (brightness > CAMERA_MAX_BRIGHTNESS) {
+ brightness = CAMERA_MAX_BRIGHTNESS;
+ printf("Reached max BRIGHTNESS. \n");
+ } else
+ printf("Increase BRIGHTNESS to %d\n", brightness);
+
+ struct v4l2_queryctrl queryctrl;
+ struct v4l2_control control;
+
+ memset (&queryctrl, 0, sizeof (queryctrl));
+ queryctrl.id = V4L2_CID_BRIGHTNESS;
+#if 0 /* TBD */
+ if (-1 == ioctl (camfd, VIDIOC_QUERYCTRL, &queryctrl)) {
+ if (errno != EINVAL) {
+ perror ("VIDIOC_QUERYCTRL");
+ exit (EXIT_FAILURE);
+ } else {
+ printf ("V4L2_CID_BRIGHTNESS is not supported\n");
+ }
+ } else if (queryctrl.flags & V4L2_CTRL_FLAG_DISABLED) {
+ printf ("V4L2_CID_BRIGHTNESS is not supported\n");
+ } else {
+ memset (&control, 0, sizeof (control));
+ control.id = V4L2_CID_BRIGHTNESS;
+ /* Increasing the Brightness */
+ control.value = brightness;
+
+ if (-1 == ioctl (camfd, VIDIOC_S_CTRL, &control)) {
+ perror ("VIDIOC_S_CTRL");
+ return -1;
+ }
+ }
+#endif /* TBD */
+ return 0;
+}
+
+/*===========================================================================
+ * FUNCTION - increase_EV -
+ *
+ * DESCRIPTION:
+ * ===========================================================================*/
+int increase_EV (void) {
+ int32_t ev = 0;
+ if (++ev_num <= 12) {
+ ev = (ev_num << 16) | 6;
+ printf("Increase EV to %d\n", ev_num);
+ } else {
+ printf("Reached max EV. \n");
+ ev = ev_num;
+ }
+
+ struct v4l2_queryctrl queryctrl;
+ struct v4l2_control control;
+
+ memset (&queryctrl, 0, sizeof (queryctrl));
+ queryctrl.id = V4L2_CID_EXPOSURE;
+#if 0 /* TBD */
+ if (-1 == ioctl (camfd, VIDIOC_QUERYCTRL, &queryctrl)) {
+ if (errno != EINVAL) {
+ perror ("VIDIOC_QUERYCTRL");
+ exit (EXIT_FAILURE);
+ } else {
+ printf ("V4L2_CID_EXPOSURE is not supported\n");
+ }
+ } else if (queryctrl.flags & V4L2_CTRL_FLAG_DISABLED) {
+ printf ("V4L2_CID_EXPOSURE is not supported\n");
+ } else {
+ memset (&control, 0, sizeof (control));
+ control.id = V4L2_CID_EXPOSURE;
+ /* Increasing the EV*/
+ control.value = ev;
+
+ if (-1 == ioctl (camfd, VIDIOC_S_CTRL, &control)) {
+ perror ("VIDIOC_S_CTRL");
+ return -1;
+ }
+ }
+#endif /* TBD */
+ return 0;
+}
+
+/*===========================================================================
+ * FUNCTION - decrease_EV -
+ *
+ * DESCRIPTION:
+ * ===========================================================================*/
+int decrease_EV (void) {
+ int32_t ev = 0;
+ if (--ev_num > -12) {
+ ev = (ev_num << 16) | 6;
+ printf("Decrease EV to %d\n", ev_num);
+ } else {
+ printf("Reached min EV. \n");
+ ev = ev_num;
+ }
+
+ struct v4l2_queryctrl queryctrl;
+ struct v4l2_control control;
+
+ memset (&queryctrl, 0, sizeof (queryctrl));
+ queryctrl.id = V4L2_CID_EXPOSURE;
+#if 0 /* TBD */
+ if (-1 == ioctl (camfd, VIDIOC_QUERYCTRL, &queryctrl)) {
+ if (errno != EINVAL) {
+ perror ("VIDIOC_QUERYCTRL");
+ exit (EXIT_FAILURE);
+ } else {
+ printf ("V4L2_CID_EXPOSURE is not supported\n");
+ }
+ } else if (queryctrl.flags & V4L2_CTRL_FLAG_DISABLED) {
+ printf ("V4L2_CID_EXPOSURE is not supported\n");
+ } else {
+ memset (&control, 0, sizeof (control));
+ control.id = V4L2_CID_EXPOSURE;
+ /* Increasing the EV*/
+ control.value = ev;
+
+ if (-1 == ioctl (camfd, VIDIOC_S_CTRL, &control)) {
+ perror ("VIDIOC_S_CTRL");
+ return -1;
+ }
+ }
+#endif /* TBD */
+ return 0;
+}
+
+/*===========================================================================
+ * FUNCTION - increase_contrast -
+ *
+ * DESCRIPTION:
+ * ===========================================================================*/
+int increase_saturation (void) {
+ ++saturation;
+ if (saturation > CAMERA_MAX_SATURATION) {
+ saturation = CAMERA_MAX_SATURATION;
+ printf("Reached max saturation. \n");
+ } else
+ printf("Increase saturation to %d\n", saturation);
+
+ /*intrfcCtrl.setContrast(camfd, contrast);*/
+
+ struct v4l2_queryctrl queryctrl;
+ struct v4l2_control control;
+
+ memset (&queryctrl, 0, sizeof (queryctrl));
+ queryctrl.id = V4L2_CID_SATURATION;
+#if 0 /* TBD */
+ if (-1 == ioctl (camfd, VIDIOC_QUERYCTRL, &queryctrl)) {
+ if (errno != EINVAL) {
+ perror ("VIDIOC_QUERYCTRL");
+ exit (EXIT_FAILURE);
+ } else {
+ printf ("V4L2_CID_saturation is not supported\n");
+ }
+ } else if (queryctrl.flags & V4L2_CTRL_FLAG_DISABLED) {
+ printf ("V4L2_CID_saturation is not supported\n");
+ } else {
+ memset (&control, 0, sizeof (control));
+ control.id = V4L2_CID_SATURATION;
+ /* Decreasing the contrast */
+ control.value = saturation;
+
+ if (-1 == ioctl (camfd, VIDIOC_S_CTRL, &control)) {
+ perror ("VIDIOC_S_CTRL");
+ return -1;
+ }
+ }
+#endif /* TBD */
+ return 0;
+}
+
+/*===========================================================================
+ * FUNCTION - decrease_saturation -
+ *
+ * DESCRIPTION:
+ * ===========================================================================*/
+int decrease_saturation (void) {
+ --saturation;
+ if (saturation < CAMERA_MIN_SATURATION) {
+ saturation = CAMERA_MIN_SATURATION;
+ printf("Reached min saturation. \n");
+ } else
+ printf("Decrease saturation to %d\n", saturation);
+
+ /*intrfcCtrl.setContrast(camfd, contrast);*/
+ struct v4l2_queryctrl queryctrl;
+ struct v4l2_control control;
+
+ memset (&queryctrl, 0, sizeof (queryctrl));
+ queryctrl.id = V4L2_CID_SATURATION;
+#if 0 /* TBD */
+ if (-1 == ioctl (camfd, VIDIOC_QUERYCTRL, &queryctrl)) {
+ if (errno != EINVAL) {
+ perror ("VIDIOC_QUERYCTRL");
+ exit (EXIT_FAILURE);
+ } else {
+ printf ("V4L2_CID_saturation is not supported\n");
+ }
+ } else if (queryctrl.flags & V4L2_CTRL_FLAG_DISABLED) {
+ printf ("V4L2_CID_saturation is not supported\n");
+ } else {
+ memset (&control, 0, sizeof (control));
+ control.id = V4L2_CID_SATURATION;
+ /* Decreasing the contrast */
+ control.value = saturation;
+
+ if (-1 == ioctl (camfd, VIDIOC_S_CTRL, &control)) {
+ perror ("VIDIOC_S_CTRL");
+ return -1;
+ }
+ }
+#endif /* TBD */
+ return 0;
+}
+
+int takePicture_yuv(int cam_id)
+{
+ int rc = 0;
+ CDBG("%s:BEGIN\n", __func__);
+ if(0 != (rc = mm_app_take_picture(cam_id))) {
+ CDBG_ERROR("%s: mm_app_take_picture() err=%d\n", __func__, rc);
+ }
+ return rc;
+}
+
+int takePicture_zsl(int cam_id)
+{
+ int rc = 0;
+ CDBG("%s:BEGIN\n", __func__);
+ if(0 != (rc = mm_app_take_zsl(cam_id))) {
+ CDBG_ERROR("%s: mm_app_take_picture() err=%d\n", __func__, rc);
+ }
+ return rc;
+}
+
+int takePicture_live(int cam_id)
+{
+ int rc = 0;
+ CDBG("%s:BEGIN\n", __func__);
+ if(0 != (rc = mm_app_take_live_snapshot(cam_id))) {
+ CDBG_ERROR("%s: mm_app_take_picture() err=%d\n", __func__, rc);
+ }
+ return rc;
+}
+
+int takePicture_raw(int cam_id)
+{
+ int rc = 0;
+ CDBG("%s:BEGIN\n", __func__);
+ if(0 != (rc = mm_app_take_raw_picture(cam_id))) {
+ CDBG("%s: mm_app_take_raw_picture() err=%d\n", __func__, rc);
+ }
+ return rc;
+}
+int system_dimension_set(int cam_id)
+{
+ static cam_ctrl_dimension_t dim;
+ int rc = 0;
+
+ if (preview_video_resolution_flag == 0) {
+ mm_app_set_dim_def(&dim);
+ } else {
+ dim.video_width = input_display.user_input_display_width;
+ dim.video_width = CEILING32(dim.video_width);
+ dim.video_height = input_display.user_input_display_height;
+ dim.orig_video_width = dim.video_width;
+ dim.orig_video_height = dim.video_height;
+ dim.display_width = dim.video_width;
+ dim.display_height = dim.video_height;
+ }
+ rc = mm_app_set_dim(cam_id, &dim);
+ return rc;
+}
+
+/*int run_test_harness()
+{
+ int good_test_cnt = 0;
+
+ rc = run_test_1();
+ if(rc < 0)
+ CDBG_EROR("%s: run_test_1 err = %d", __func__, rc);
+ rc = run_test_2();
+}*/
+/*===========================================================================
+ * FUNCTION - main -
+ *
+ * DESCRIPTION:
+ *==========================================================================*/
+int main(int argc, char **argv)
+{
+ int keep_on_going = 1;
+ int c, rc = 0, tmp_fd;
+ int run_tc = 0;
+ int run_dual_tc = 0;
+ struct v4l2_capability v4l2_cap;
+
+ /* get v4l2 params - memory type etc */
+ while ((c = getopt(argc, argv, "tdh")) != -1) {
+ //printf("usage: %s [-m] [-u] [-o]\n", argv[1]);
+ switch (c) {
+#if 0
+ case 'm':
+ memoryType = V4L2_MEMORY_MMAP;
+ break;
+
+ case 'o':
+ /*use_overlay_fb_display_driver();*/
+ break;
+ case 'u':
+ memoryType = V4L2_MEMORY_USERPTR;
+ break;
+#endif
+ case 't':
+ run_tc = 1;
+ break;
+ case 'd':
+ run_dual_tc = 1;
+ break;
+ case 'h':
+ default:
+ printf("usage: %s [-m] [-u] [-o]\n", argv[0]);
+ printf("-m: V4L2_MEMORY_MMAP. \n");
+ printf("-o: use overlay fb display driver\n");
+ printf("-u: V4L2_MEMORY_USERPTR\n");
+ exit(0);
+ }
+ }
+
+ CDBG("\nCamera Test Application\n");
+
+ struct timeval tdBeforePreviewVideo, tdStopCamera;
+ struct timezone tz;
+
+ //return run_test_harness();
+ if((rc = mm_app_load_hal())) {
+ CDBG_ERROR("%s:mm_app_init err=%d\n", __func__, rc);
+ exit(-1);
+ }
+ /* we must init mm_app first */
+ if(mm_app_init() != MM_CAMERA_OK) {
+ CDBG_ERROR("%s:mm_app_init err=%d\n", __func__, rc);
+ exit(-1);
+ }
+
+ if(run_tc) {
+ printf("\tRunning unit test engine only\n");
+ rc = mm_app_unit_test();
+ printf("\tUnit test engine. EXIT(%d)!!!\n", rc);
+ exit(rc);
+ }
+
+ if(run_dual_tc) {
+ printf("\tRunning Dual camera test engine only\n");
+ rc = mm_app_dual_test();
+ printf("\t Dual camera engine. EXIT(%d)!!!\n", rc);
+ exit(rc);
+ }
+
+ gettimeofday(&tdBeforePreviewVideo, &tz);
+
+ CDBG("Profiling: Start Camera timestamp = %ld ms\n",
+ (tdBeforePreviewVideo.tv_sec * 1000) + (tdBeforePreviewVideo.tv_usec/1000));
+
+ /* launch the primary camera in default mode */
+ if( mm_app_open(CAMERA_OPENED)) {
+ CDBG_ERROR("%s:mm_app_open() err=%d\n",__func__, rc);
+ exit(-2);
+ }
+
+ /* main loop doing the work*/
+ do {
+ keep_on_going = submain();
+ } while ( keep_on_going );
+
+ /* Clean up and exit. */
+ CDBG("Exiting the app\n");
+
+error_ionfd_open:
+ mm_app_close(CAMERA_OPENED);
+
+ gettimeofday(&tdStopCamera, &tz);
+ CDBG("Exiting application\n");
+ CDBG("Profiling: Stop camera end timestamp = %ld ms\n",
+ (tdStopCamera.tv_sec * 1000) + (tdStopCamera.tv_usec/1000));
+
+ return 0;
+}
+
+
+/*===========================================================================
+ * FUNCTION - submain -
+ *
+ * DESCRIPTION:
+ * ===========================================================================*/
+static int submain()
+{
+ int rc = 0;
+ int back_mainflag = 0;
+ char tc_buf[3];
+ int stop_preview = 1;
+ menu_id_change_t current_menu_id = MENU_ID_MAIN, next_menu_id;
+ camera_action_t action_id;
+ int action_param;
+ int i;
+ int cam_id;
+
+ CDBG("%s:E", __func__);
+ struct timeval tdStopCamera;
+ struct timezone tz;
+
+ cam_id = my_cam_app.cam_open;
+
+ rc = system_dimension_set(cam_id);
+ CDBG("Start Preview");
+ if( 0 != (rc = startPreview(cam_id))) {
+ CDBG("%s: startPreview() err=%d\n", __func__, rc);
+ return 0;
+ }
+
+ do {
+ print_current_menu (current_menu_id);
+ fgets(tc_buf, 3, stdin);
+
+ next_menu_id = next_menu(current_menu_id, tc_buf[0], &action_id, &action_param);
+
+ if (next_menu_id != MENU_ID_INVALID) {
+ current_menu_id = next_menu_id;
+ }
+ if (action_id == ACTION_NO_ACTION) {
+ continue;
+ }
+ if(camframe_status == -1) {
+ printf("Preview/Video ERROR condition reported Closing Camera APP\n");
+ break;
+ }
+
+ switch(action_id) {
+ case ACTION_STOP_CAMERA:
+ CDBG("ACTION_STOP_CAMERA \n");
+ stopPreview(cam_id);
+ break;
+
+ case ACTION_PREVIEW_VIDEO_RESOLUTION:
+ back_mainflag = 1;
+ CDBG("Selection for the preview/video resolution change\n");
+ switchRes(cam_id);
+ preview_video_resolution (action_param);
+ break;
+ case ACTION_SET_WHITE_BALANCE:
+ CDBG("Selection for the White Balance changes\n");
+ set_whitebalance(action_param);
+ break;
+
+ case ACTION_SET_EXP_METERING:
+ CDBG("Selection for the Exposure Metering changes\n");
+ set_exp_metering(action_param);
+ break;
+
+ case ACTION_GET_CTRL_VALUE:
+ CDBG("Selection for getting control value\n");
+ get_ctrl_value(action_param);
+ break;
+
+ case ACTION_BRIGHTNESS_INCREASE:
+ printf("Increase brightness\n");
+ increase_brightness();
+ break;
+
+ case ACTION_BRIGHTNESS_DECREASE:
+ printf("Decrease brightness\n");
+ decrease_brightness();
+ break;
+
+ case ACTION_CONTRAST_INCREASE:
+ CDBG("Selection for the contrast increase\n");
+ increase_contrast ();
+ break;
+
+ case ACTION_CONTRAST_DECREASE:
+ CDBG("Selection for the contrast decrease\n");
+ decrease_contrast ();
+ break;
+
+ case ACTION_EV_INCREASE:
+ CDBG("Selection for the EV increase\n");
+ increase_EV ();
+ break;
+
+ case ACTION_EV_DECREASE:
+ CDBG("Selection for the EV decrease\n");
+ decrease_EV ();
+ break;
+
+ case ACTION_SATURATION_INCREASE:
+ CDBG("Selection for the EV increase\n");
+ increase_saturation ();
+ break;
+
+ case ACTION_SATURATION_DECREASE:
+ CDBG("Selection for the EV decrease\n");
+ decrease_saturation ();
+ break;
+
+ case ACTION_TOGGLE_AFR:
+ CDBG("Select for auto frame rate toggling\n");
+ toggle_afr();
+ break;
+
+ case ACTION_SET_ISO:
+ CDBG("Select for ISO changes\n");
+ set_iso(action_param);
+ break;
+
+ case ACTION_SET_ZOOM:
+ CDBG("Selection for the zoom direction changes\n");
+ set_zoom(action_param);
+ break;
+
+ case ACTION_SHARPNESS_INCREASE:
+ CDBG("Selection for sharpness increase\n");
+ increase_sharpness();
+ break;
+
+ case ACTION_SHARPNESS_DECREASE:
+ CDBG("Selection for sharpness decrease\n");
+ decrease_sharpness();
+ break;
+
+ case ACTION_TAKE_YUV_SNAPSHOT:
+ CDBG("Take YUV snapshot\n");
+ if (takePicture_yuv(cam_id) < 0)
+ goto ERROR;
+ break;
+ case ACTION_TAKE_RAW_SNAPSHOT:
+ CDBG("Take YUV snapshot\n");
+ if (takePicture_raw(cam_id) < 0)
+ goto ERROR;
+ break;
+ case ACTION_START_RECORDING:
+ CDBG("Start recording action\n");
+ if (startRecording(cam_id) < 0)
+ goto ERROR;
+ break;
+ case ACTION_STOP_RECORDING:
+ CDBG("Stop recording action\n");
+ if (stopRecording(cam_id) < 0)
+ goto ERROR;
+ break;
+ case ACTION_NO_ACTION:
+ printf("Go back to main menu");
+ break;
+ case ACTION_SWITCH_CAMERA:
+ CDBG("Toggle Camera action\n");
+ back_mainflag = 1;
+ if (switchCamera(action_param - 1) < 0)
+ goto ERROR;
+ break;
+
+ case ACTION_TAKE_ZSL_SNAPSHOT:
+ CDBG("Take ZSL snapshot\n");
+ if (takePicture_zsl(cam_id) < 0)
+ {
+ CDBG("Error");
+ goto ERROR;
+ }
+ break;
+ case ACTION_START_RDI:
+ CDBG("Start RDI Stream\n");
+ startRdi(cam_id);
+ break;
+ case ACTION_STOP_RDI:
+ CDBG("Stop RDI Stream\n");
+ stopRdi(cam_id);
+ break;
+ case ACTION_TAKE_LIVE_SNAPSHOT:
+ CDBG("Take Live snapshot\n");
+ if (takePicture_live(cam_id) < 0)
+ {
+ CDBG("Error");
+ goto ERROR;
+ }
+ break;
+
+ default:
+ printf("\n\n!!!!!WRONG INPUT: %d!!!!\n", action_id);
+ break;
+ }
+
+ usleep(1000 * 1000);
+ CDBG("action_id = %d\n", action_id);
+ camframe_status = 0;
+
+ } while ((action_id != ACTION_STOP_CAMERA) &&
+ (action_id != ACTION_PREVIEW_VIDEO_RESOLUTION) && (action_id !=ACTION_SWITCH_CAMERA));
+ action_id = ACTION_NO_ACTION;
+
+ //system_destroy();
+
+
+ return back_mainflag;
+
+ERROR:
+ back_mainflag = 0;
+ return back_mainflag;
+}
+
+
+/*===========================================================================
+ * FUNCTION - preview_resolution -
+ *
+ * DESCRIPTION:
+ * ===========================================================================*/
+int preview_video_resolution (int preview_video_action_param) {
+ char * resolution_name;
+ CDBG("Selecting the action for preview/video resolution = %d \n", preview_video_action_param);
+ resolution_name = set_preview_video_dimension_tbl(preview_video_action_param,
+ & input_display.user_input_display_width,
+ & input_display.user_input_display_height);
+
+ CDBG("Selected preview/video resolution is %s\n", resolution_name);
+
+ if (resolution_name == NULL) {
+ CDBG("main:%d set_preview_dimension failed!\n", __LINE__);
+ goto ERROR;
+ }
+
+ CDBG("Selected Preview Resolution: display_width = %d, display_height = %d\n",
+ input_display.user_input_display_width, input_display.user_input_display_height);
+
+ preview_video_resolution_flag = 1;
+ return 0;
+
+ERROR:
+ return -1;
+}
+
+/*===========================================================================
+ * FUNCTION - set_whitebalance -
+ *
+ * DESCRIPTION:
+ * ===========================================================================*/
+int set_whitebalance (int wb_action_param) {
+
+ int rc = 0;
+ struct v4l2_control ctrl;
+
+ if (wb_action_param == MM_CAMERA_WHITE_BALANCE_AUTO) {
+ ctrl.id = V4L2_CID_AUTO_WHITE_BALANCE;
+ ctrl.value = true;
+ // rc = ioctl(camfd, VIDIOC_S_CTRL, &ctrl);
+
+ } else if ( wb_action_param == MM_CAMERA_WHITE_BALANCE_OFF) {
+ ctrl.id = V4L2_CID_AUTO_WHITE_BALANCE;
+ ctrl.value = false;
+ // rc = ioctl(camfd, VIDIOC_S_CTRL, &ctrl);
+
+ } else {
+ int temperature = 6500;
+
+ switch (wb_action_param) {
+ case MM_CAMERA_WHITE_BALANCE_DAYLIGHT:
+ temperature = 6500;
+ break;
+ case MM_CAMERA_WHITE_BALANCE_INCANDESCENT:
+ temperature = 2800;
+ break;
+ case MM_CAMERA_WHITE_BALANCE_FLUORESCENT:
+ temperature = 4200;
+ break;
+ default:
+ temperature = 4200;
+ break;
+ }
+
+ ctrl.id = V4L2_CID_WHITE_BALANCE_TEMPERATURE;
+ ctrl.value = temperature;
+ // rc = ioctl(camfd, VIDIOC_S_CTRL, &ctrl);
+ }
+
+DONE:
+ return rc;
+}
+
+
+/*===========================================================================
+ * FUNCTION - set_exp_metering -
+ *
+ * DESCRIPTION:
+ * ===========================================================================*/
+int set_exp_metering (int exp_metering_action_param) {
+
+ int rc = 0;
+ struct v4l2_control ctrl;
+
+ ctrl.id = MSM_V4L2_PID_EXP_METERING;
+ ctrl.value = exp_metering_action_param;
+ // rc = ioctl(camfd, VIDIOC_S_CTRL, &ctrl);
+
+ return rc;
+}
+
+int get_ctrl_value (int ctrl_value_mode_param){
+
+ int rc = 0;
+ struct v4l2_control ctrl;
+
+ if (ctrl_value_mode_param == WHITE_BALANCE_STATE) {
+ printf("You chose WHITE_BALANCE_STATE\n");
+ ctrl.id = V4L2_CID_AUTO_WHITE_BALANCE;
+ }
+ else if (ctrl_value_mode_param == WHITE_BALANCE_TEMPERATURE) {
+ printf("You chose WHITE_BALANCE_TEMPERATURE\n");
+ ctrl.id = V4L2_CID_WHITE_BALANCE_TEMPERATURE;
+ }
+ else if (ctrl_value_mode_param == BRIGHTNESS_CTRL) {
+ printf("You chose brightness value\n");
+ ctrl.id = V4L2_CID_BRIGHTNESS;
+ }
+ else if (ctrl_value_mode_param == EV) {
+ printf("You chose exposure value\n");
+ ctrl.id = V4L2_CID_EXPOSURE;
+ }
+ else if (ctrl_value_mode_param == CONTRAST_CTRL) {
+ printf("You chose contrast value\n");
+ ctrl.id = V4L2_CID_CONTRAST;
+ }
+ else if (ctrl_value_mode_param == SATURATION_CTRL) {
+ printf("You chose saturation value\n");
+ ctrl.id = V4L2_CID_SATURATION;
+ } else if (ctrl_value_mode_param == SHARPNESS_CTRL) {
+ printf("You chose sharpness value\n");
+ ctrl.id = V4L2_CID_SHARPNESS;
+ }
+
+ // rc = ioctl(camfd, VIDIOC_G_CTRL, &ctrl);
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION - toggle_afr -
+ *
+ * DESCRIPTION:
+ * ===========================================================================*/
+int toggle_afr () {
+ int rc = 0;
+ struct v4l2_control ctrl;
+
+ memset(&ctrl, 0, sizeof(ctrl));
+ ctrl.id = V4L2_CID_EXPOSURE_AUTO;
+// rc = ioctl(camfd, VIDIOC_G_CTRL, &ctrl);
+ if (rc == -1) {
+ CDBG("%s: VIDIOC_G_CTRL V4L2_CID_EXPOSURE_AUTO failed: %s\n",
+ __func__, strerror(errno));
+ return rc;
+ }
+
+ /* V4L2_CID_EXPOSURE_AUTO needs to be AUTO or SHUTTER_PRIORITY */
+ if (ctrl.value != V4L2_EXPOSURE_AUTO &&
+ ctrl.value != V4L2_EXPOSURE_SHUTTER_PRIORITY) {
+ CDBG("%s: V4L2_CID_EXPOSURE_AUTO needs to be AUTO/SHUTTER_PRIORITY\n",
+ __func__);
+ return -1;
+ }
+
+ /* Get V4L2_CID_EXPOSURE_AUTO_PRIORITY */
+ memset(&ctrl, 0, sizeof(ctrl));
+ ctrl.id = V4L2_CID_EXPOSURE_AUTO_PRIORITY;
+ // rc = ioctl(camfd, VIDIOC_G_CTRL, &ctrl);
+ if (rc == -1) {
+ CDBG("%s: VIDIOC_G_CTRL V4L2_CID_EXPOSURE_AUTO_PRIORITY failed: %s\n",
+ __func__, strerror(errno));
+ return rc;
+ }
+
+ ctrl.value = !ctrl.value;
+ printf("V4L2_CID_EXPOSURE_AUTO_PRIORITY changed to %d\n", ctrl.value);
+ // rc = ioctl(camfd, VIDIOC_S_CTRL, &ctrl);
+ if (rc == -1) {
+ CDBG("%s: VIDIOC_S_CTRL V4L2_CID_EXPOSURE_AUTO_PRIORITY failed: %s\n",
+ __func__, strerror(errno));
+ }
+ return rc;
+}
+
+int set_zoom (int zoom_action_param) {
+ int rc = 0;
+ struct v4l2_control ctrl;
+
+ if (zoom_action_param == ZOOM_IN) {
+ zoom_level += zoom_queryctrl.step;
+ if (zoom_level > zoom_queryctrl.maximum)
+ zoom_level = zoom_queryctrl.maximum;
+ } else if (zoom_action_param == ZOOM_OUT) {
+ zoom_level -= zoom_queryctrl.step;
+ if (zoom_level < zoom_queryctrl.minimum)
+ zoom_level = zoom_queryctrl.minimum;
+ } else {
+ CDBG("%s: Invalid zoom_action_param value\n", __func__);
+ return -EINVAL;
+ }
+ ctrl.id = V4L2_CID_ZOOM_ABSOLUTE;
+ ctrl.value = zoom_level;
+ // rc = ioctl(camfd, VIDIOC_S_CTRL, &ctrl);
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION - set_iso -
+ *
+ * DESCRIPTION:
+ * ===========================================================================*/
+int set_iso (int iso_action_param) {
+ int rc = 0;
+ struct v4l2_control ctrl;
+
+ ctrl.id = MSM_V4L2_PID_ISO;
+ ctrl.value = iso_action_param - 1;
+ // rc = ioctl(camfd, VIDIOC_S_CTRL, &ctrl);
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION - increase_sharpness -
+ *
+ * DESCRIPTION:
+ * ===========================================================================*/
+int increase_sharpness () {
+ int rc = 0;
+ struct v4l2_control ctrl;
+
+ sharpness += sharpness_queryctrl.step;
+ if (sharpness > sharpness_queryctrl.maximum)
+ sharpness = sharpness_queryctrl.maximum;
+
+ ctrl.id = V4L2_CID_SHARPNESS;
+ ctrl.value = sharpness;
+ // rc = ioctl(camfd, VIDIOC_S_CTRL, &ctrl);
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION - decrease_sharpness -
+ *
+ * DESCRIPTION:
+ * ===========================================================================*/
+int decrease_sharpness () {
+ int rc = 0;
+ struct v4l2_control ctrl;
+
+ sharpness -= sharpness_queryctrl.step;
+ if (sharpness < sharpness_queryctrl.minimum)
+ sharpness = sharpness_queryctrl.minimum;
+
+ ctrl.id = V4L2_CID_SHARPNESS;
+ ctrl.value = sharpness;
+ // rc = ioctl(camfd, VIDIOC_S_CTRL, &ctrl);
+
+ return rc;
+}
+
+/*===========================================================================
+ * FUNCTION - print_current_menu -
+ *
+ * DESCRIPTION:
+ * ===========================================================================*/
+int print_current_menu (menu_id_change_t current_menu_id) {
+ if (current_menu_id == MENU_ID_MAIN) {
+ print_menu_preview_video ();
+ } else if (current_menu_id == MENU_ID_PREVIEWVIDEORESOLUTIONCHANGE) {
+ camera_preview_video_resolution_change_tbl ();
+ }else if (current_menu_id == MENU_ID_WHITEBALANCECHANGE) {
+ camera_preview_video_wb_change_tbl();
+ } else if (current_menu_id == MENU_ID_EXPMETERINGCHANGE) {
+ camera_preview_video_exp_metering_change_tbl();
+ } else if (current_menu_id == MENU_ID_GET_CTRL_VALUE) {
+ camera_preview_video_get_ctrl_value_tbl();
+ } else if (current_menu_id == MENU_ID_ISOCHANGE) {
+ camera_preview_video_iso_change_tbl();
+ } else if (current_menu_id == MENU_ID_BRIGHTNESSCHANGE) {
+ camera_brightness_change_tbl ();
+ } else if (current_menu_id == MENU_ID_CONTRASTCHANGE) {
+ camera_contrast_change_tbl ();
+ } else if (current_menu_id == MENU_ID_EVCHANGE) {
+ camera_EV_change_tbl ();
+ } else if (current_menu_id == MENU_ID_SATURATIONCHANGE) {
+ camera_saturation_change_tbl ();
+ } else if (current_menu_id == MENU_ID_ZOOMCHANGE) {
+ camera_preview_video_zoom_change_tbl();
+ } else if (current_menu_id == MENU_ID_SHARPNESSCHANGE) {
+ camera_preview_video_sharpness_change_tbl();
+ }else if (current_menu_id == MENU_ID_SWITCHCAMERA) {
+ camera_switch_tbl();
+ }else if (current_menu_id == MENU_ID_RECORD) {
+ camera_record_tbl();
+ }
+
+ return 0;
+}
+
diff --git a/camera/QCamera/stack/mm-camera-test/src/mm_qcamera_preview.c b/camera/QCamera/stack/mm-camera-test/src/mm_qcamera_preview.c
new file mode 100644
index 0000000..1b04b66
--- /dev/null
+++ b/camera/QCamera/stack/mm-camera-test/src/mm_qcamera_preview.c
@@ -0,0 +1,1076 @@
+/*
+Copyright (c) 2011-2012, The Linux Foundation. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following
+ disclaimer in the documentation and/or other materials provided
+ with the distribution.
+ * Neither the name of The Linux Foundation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+
+#include <pthread.h>
+#include "mm_camera_dbg.h"
+#include <errno.h>
+#include <sys/mman.h>
+#include <sys/ioctl.h>
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <fcntl.h>
+#include <poll.h>
+#include <linux/msm_ion.h>
+#include "mm_qcamera_app.h"
+
+
+/*===========================================================================
+ * FUNCTION - mm_camera_do_mmap_ion -
+ *
+ * DESCRIPTION:
+ *==========================================================================*/
+uint8_t *mm_camera_do_mmap_ion(int ion_fd, struct ion_allocation_data *alloc,
+ struct ion_fd_data *ion_info_fd, int *mapFd)
+{
+ void *ret; /* returned virtual address */
+ int rc = 0;
+ struct ion_handle_data handle_data;
+
+ /* to make it page size aligned */
+ alloc->len = (alloc->len + 4095) & (~4095);
+
+ rc = ioctl(ion_fd, ION_IOC_ALLOC, alloc);
+ if (rc < 0) {
+ CDBG_ERROR("ION allocation failed %s\n", strerror(errno));
+ goto ION_ALLOC_FAILED;
+ }
+
+ ion_info_fd->handle = alloc->handle;
+ rc = ioctl(ion_fd, ION_IOC_SHARE, ion_info_fd);
+ if (rc < 0) {
+ CDBG_ERROR("ION map failed %s\n", strerror(errno));
+ goto ION_MAP_FAILED;
+ }
+ *mapFd = ion_info_fd->fd;
+ ret = mmap(NULL,
+ alloc->len,
+ PROT_READ | PROT_WRITE,
+ MAP_SHARED,
+ *mapFd,
+ 0);
+
+ if (ret == MAP_FAILED) {
+ CDBG_ERROR("ION_MMAP_FAILED: %s (%d)\n", strerror(errno), errno);
+ goto ION_MAP_FAILED;
+ }
+
+ return ret;
+
+ ION_MAP_FAILED:
+ handle_data.handle = ion_info_fd->handle;
+ ioctl(ion_fd, ION_IOC_FREE, &handle_data);
+ ION_ALLOC_FAILED:
+ return NULL;
+}
+
+/*===========================================================================
+ * FUNCTION - mm_camera_do_munmap_ion -
+ *
+ * DESCRIPTION:
+ *==========================================================================*/
+int mm_camera_do_munmap_ion (int ion_fd, struct ion_fd_data *ion_info_fd,
+ void *addr, size_t size)
+{
+ int rc = 0;
+ rc = munmap(addr, size);
+ close(ion_info_fd->fd);
+
+ struct ion_handle_data handle_data;
+ handle_data.handle = ion_info_fd->handle;
+ ioctl(ion_fd, ION_IOC_FREE, &handle_data);
+ return rc;
+}
+
+
+int mm_app_set_preview_fmt(int cam_id,mm_camera_image_fmt_t *fmt)
+{
+ int rc = MM_CAMERA_OK;
+ mm_camera_app_obj_t *pme = mm_app_get_cam_obj(cam_id);
+
+ fmt->meta_header = MM_CAMEAR_META_DATA_TYPE_DEF;
+ fmt->fmt = pme->dim.prev_format;
+ fmt->width = pme->dim.display_width;
+ fmt->height = pme->dim.display_height;
+ return rc;
+}
+
+//void dumpFrameToFile(struct msm_frame* newFrame, int w, int h, char* name, int main_422)
+void dumpFrameToFile(mm_camera_buf_def_t* newFrame, int w, int h, char* name, int main_422)
+{
+ char buf[32];
+ int file_fd;
+ int i;
+ char *ext = "yuv";
+#if 0
+ if ( newFrame != NULL) {
+ char * str;
+ snprintf(buf, sizeof(buf), "/data/%s.%s", name,ext);
+ file_fd = open(buf, O_RDWR | O_CREAT, 0777);
+ if (file_fd < 0) {
+ CDBG_ERROR("%s: cannot open file\n", __func__);
+ } else {
+ CDBG("%s: %d %d", __func__, newFrame->y_off, newFrame->cbcr_off);
+ write(file_fd, (const void *)(newFrame->buffer+newFrame->y_off), w * h);
+ write(file_fd, (const void *)
+ (newFrame->buffer + newFrame->cbcr_off), w * h / 2 * main_422);
+ close(file_fd);
+ CDBG("dump %s", buf);
+ }
+ }
+#endif
+ if ( newFrame != NULL) {
+ char * str;
+ snprintf(buf, sizeof(buf), "/data/%s.%s", name,ext);
+ file_fd = open(buf, O_RDWR | O_CREAT, 0777);
+ if (file_fd < 0) {
+ CDBG_ERROR("%s: cannot open file\n", __func__);
+ } else {
+ int y_off = newFrame->buffer + newFrame->planes[0].data_offset;
+ //int cbcr_off = newFrame->buffer + newFrame->planes[1].data_offset;//newFrame->buffer + newFrame->planes[0].length;
+ int cbcr_off = newFrame->buffer + newFrame->planes[0].length;
+ CDBG("%s: Y_off = %p cbcr_off = %p", __func__, y_off,cbcr_off);
+ CDBG("%s: Y_off length = %d cbcr_off length = %d", __func__, newFrame->planes[0].length,newFrame->planes[1].length);
+
+ write(file_fd, (const void *)(y_off), newFrame->planes[0].length);
+ write(file_fd, (const void *)(cbcr_off),
+ (newFrame->planes[1].length * newFrame->num_planes));
+ /*for(i = 1; i < newFrame->num_planes; i++) {
+ CDBG("%s: CBCR = %d", __func__, newFrame->planes[j].data_offset);
+ write(file_fd, (const void *)
+ (newFrame->planes[i].data_offset), w * h / 2 * main_422);
+ }*/
+ close(file_fd);
+ CDBG("dump %s", buf);
+ }
+ }
+}
+
+int mm_app_open_camera(int cam_id)
+{
+ int rc = MM_CAMERA_OK;
+ int value = 0;
+ mm_camera_app_obj_t *pme = mm_app_get_cam_obj(cam_id);
+
+ if (pme->cam_mode == CAMERA_MODE) {
+ return rc;
+ }
+
+ if (MM_CAMERA_OK != (rc = mm_app_stop_preview(cam_id))) {
+ CDBG_ERROR("%s:Stop preview err=%d\n", __func__, rc);
+ goto end;
+ }
+
+ pme->cam->ops->set_parm(pme->cam->camera_handle,MM_CAMERA_PARM_RECORDING_HINT, &value);
+
+ if (MM_CAMERA_OK != (rc = mm_app_start_preview(cam_id))) {
+ CDBG_ERROR("%s:Start preview err=%d\n", __func__, rc);
+ goto end;
+ }
+
+ pme->cam_mode = CAMERA_MODE;
+ end:
+ CDBG("%s: END, rc=%d\n", __func__, rc);
+ return rc;
+}
+
+
+int mm_app_open_zsl(int cam_id)
+{
+ int rc = MM_CAMERA_OK;
+ int value = 0;
+ mm_camera_app_obj_t *pme = mm_app_get_cam_obj(cam_id);
+
+ if (pme->cam_mode == ZSL_MODE) {
+ return rc;
+ }
+
+ if (MM_CAMERA_OK != (rc = mm_app_stop_preview(cam_id))) {
+ CDBG_ERROR("%s:Stop preview err=%d\n", __func__, rc);
+ goto end;
+ }
+
+ pme->cam->ops->set_parm(pme->cam->camera_handle,MM_CAMERA_PARM_RECORDING_HINT, &value);
+
+ if (MM_CAMERA_OK != (rc = mm_app_start_preview_zsl(cam_id))) {
+ CDBG_ERROR("%s:stream on preview err=%d\n", __func__, rc);
+ goto end;
+ }
+ pme->cam_mode = ZSL_MODE;
+ end:
+ CDBG("%s: END, rc=%d\n", __func__, rc);
+ return rc;
+}
+#if 0
+int mm_stream_deinit_preview_buf(uint32_t camera_handle,
+ uint32_t ch_id, uint32_t stream_id,
+ void *user_data, uint8_t num_bufs,
+ mm_camera_buf_def_t *bufs)
+{
+ int i, rc = MM_CAMERA_OK;
+ mm_camera_app_obj_t *pme = (mm_camera_app_obj_t *)user_data;
+ for (i = 0; i < num_bufs; i++) {
+ rc = my_cam_app.hal_lib.mm_camera_do_munmap_ion (pme->ionfd, &(pme->preview_buf.frame[i].fd_data),
+ (void *)pme->preview_buf.frame[i].buffer, bufs[i].frame_len);
+ if (rc != MM_CAMERA_OK) {
+ CDBG("%s: mm_camera_do_munmap err, pmem_fd = %d, rc = %d",
+ __func__, bufs[i].fd, rc);
+ }
+ }
+ close(pme->ionfd);
+ return rc;
+}
+
+int mm_stream_init_preview_buf(uint32_t camera_handle,
+ uint32_t ch_id, uint32_t stream_id,
+ void *user_data,
+ mm_camera_frame_len_offset *frame_offset_info,
+ uint8_t num_bufs,
+ uint8_t *initial_reg_flag,
+ mm_camera_buf_def_t *bufs)
+{
+ int i,j,num_planes, frame_len, y_off, cbcr_off;
+ uint32_t planes[VIDEO_MAX_PLANES];
+ uint32_t pmem_addr = 0;
+
+ mm_camera_app_obj_t *pme = (mm_camera_app_obj_t *)user_data;
+
+ num_planes = frame_offset_info->num_planes;
+ for ( i = 0; i < num_planes; i++) {
+ planes[i] = frame_offset_info->mp[i].len;
+ }
+
+ frame_len = frame_offset_info->frame_len;
+ y_off = frame_offset_info->mp[0].offset;
+ cbcr_off = frame_offset_info->mp[1].offset;
+
+ CDBG("Allocating Preview Memory for %d buffers frame_len = %d",num_bufs,frame_offset_info->frame_len);
+
+ for (i = 0; i < num_bufs ; i++) {
+ int j;
+ pme->preview_buf.reg[i] = 1;
+ initial_reg_flag[i] = 1;
+
+ pme->preview_buf.frame_len = frame_len;
+ pme->preview_buf.frame[i].ion_alloc.len = pme->preview_buf.frame_len;
+ pme->preview_buf.frame[i].ion_alloc.flags =
+ (0x1 << CAMERA_ION_HEAP_ID | 0x1 << ION_IOMMU_HEAP_ID);
+ pme->preview_buf.frame[i].ion_alloc.align = 4096;
+
+ pmem_addr = (unsigned long) my_cam_app.hal_lib.mm_camera_do_mmap_ion(pme->ionfd,
+ &(pme->preview_buf.frame[i].ion_alloc), &(pme->preview_buf.frame[i].fd_data),
+ &pme->preview_buf.frame[i].fd);
+
+ pme->preview_buf.frame[i].buffer = pmem_addr;
+ pme->preview_buf.frame[i].path = OUTPUT_TYPE_P;
+ pme->preview_buf.frame[i].y_off = 0;
+ pme->preview_buf.frame[i].cbcr_off = planes[0];
+ pme->preview_buf.frame[i].phy_offset = 0;
+
+ CDBG("Buffer allocated Successfully fd = %d",pme->preview_buf.frame[i].fd);
+
+ bufs[i].fd = pme->preview_buf.frame[i].fd;
+ //bufs[i].buffer = pmem_addr;
+ bufs[i].frame_len = pme->preview_buf.frame[i].ion_alloc.len;
+ bufs[i].num_planes = num_planes;
+
+ bufs[i].frame = &pme->preview_buf.frame[i];
+
+ /* Plane 0 needs to be set seperately. Set other planes
+ * in a loop. */
+ bufs[i].planes[0].length = planes[0];
+ bufs[i].planes[0].m.userptr = bufs[i].fd;
+ bufs[i].planes[0].data_offset = y_off;
+ bufs[i].planes[0].reserved[0] = 0;
+ //buf_def->buf.mp[i].frame_offset;
+ for (j = 1; j < num_planes; j++) {
+ bufs[i].planes[j].length = planes[j];
+ bufs[i].planes[j].m.userptr = bufs[i].fd;
+ bufs[i].planes[j].data_offset = cbcr_off;
+ bufs[i].planes[j].reserved[0] =
+ bufs[i].planes[j-1].reserved[0] +
+ bufs[i].planes[j-1].length;
+ }
+ }
+ return MM_CAMERA_OK;
+}
+#endif
+
+int mm_stream_initbuf(uint32_t camera_handle,
+ uint32_t ch_id, uint32_t stream_id,
+ void *user_data,
+ mm_camera_frame_len_offset *frame_offset_info,
+ uint8_t num_bufs,
+ uint8_t *initial_reg_flag,
+ mm_camera_buf_def_t *bufs)
+{
+ int i,y_off, cbcr_off,num_planes;
+ int plane_len;
+ struct ion_allocation_data ion_alloc;
+ struct ion_fd_data ion_info_fd;
+ uint32_t pmem_addr = 0;
+ uint32_t planes[VIDEO_MAX_PLANES];
+
+ mm_camera_app_obj_t *pme = (mm_camera_app_obj_t *)user_data;
+
+ num_planes = frame_offset_info->num_planes;
+ if (num_planes == 1) {
+ y_off = frame_offset_info->mp[0].offset;;
+ cbcr_off = 0;
+ } else {
+ y_off = frame_offset_info->mp[0].offset;
+ cbcr_off = frame_offset_info->mp[1].offset;
+ }
+
+ CDBG("%s: y_off = %d,cbcr_off = %d,num_planes = %d",__func__,y_off,
+ cbcr_off,num_planes);
+
+ pme->ionfd = open("/dev/ion", O_RDONLY);
+ if (pme->ionfd < 0) {
+ ALOGE("Ion dev open failed\n");
+ ALOGE("Error is %s\n", strerror(errno));
+ }
+
+ for (i = 0; i < num_bufs ; i++) {
+ int j;
+
+ //if(pme->cam_mode != RECORDER_MODE || pme->fullSizeSnapshot) {
+ initial_reg_flag[i] = 1;
+ //}else{
+ //initial_reg_flag[i] = 0;
+ //}
+
+ ion_alloc.len = frame_offset_info->frame_len;
+ ion_alloc.flags =
+ (0x1 << CAMERA_ION_HEAP_ID | 0x1 << ION_IOMMU_HEAP_ID);
+ ion_alloc.align = 4096;
+
+ /*bufs[i].buffer = my_cam_app.hal_lib.mm_camera_do_mmap_ion(pme->ionfd,
+ &ion_alloc, &ion_info_fd,
+ &bufs[i].fd);*/
+
+ bufs[i].buffer = mm_camera_do_mmap_ion(pme->ionfd,
+ &ion_alloc, &ion_info_fd,
+ &bufs[i].fd);
+ CDBG(" %s : Buffer allocated fd = %d, length = %d, y_off = %d cdcr_off = %d",
+ __func__,bufs[i].fd,ion_alloc.len,y_off,cbcr_off);
+
+ bufs[i].frame_len = ion_alloc.len;
+ bufs[i].num_planes = num_planes;
+
+ /* Plane 0 needs to be set seperately. Set other planes
+ * in a loop. */
+ bufs[i].planes[0].length = frame_offset_info->mp[0].len;
+ bufs[i].planes[0].m.userptr = bufs[i].fd;
+ bufs[i].planes[0].data_offset = y_off;
+ bufs[i].planes[0].reserved[0] = 0;
+ //buf_def->buf.mp[i].frame_offset;
+ for (j = 1; j < num_planes; j++) {
+ bufs[i].planes[j].length = frame_offset_info->mp[j].len;
+ bufs[i].planes[j].m.userptr = bufs[i].fd;
+ bufs[i].planes[j].data_offset = cbcr_off;
+ bufs[i].planes[j].reserved[0] =
+ bufs[i].planes[j-1].reserved[0] +
+ bufs[i].planes[j-1].length;
+ }
+ }
+ CDBG("%s: X",__func__);
+ return MM_CAMERA_OK;
+}
+
+#if 0
+int mm_stream_initbuf_1(uint32_t camera_handle,
+ uint32_t ch_id, uint32_t stream_id,
+ void *user_data,
+ mm_camera_frame_len_offset *frame_offset_info,
+ uint8_t num_bufs,
+ uint8_t *initial_reg_flag,
+ mm_camera_buf_def_t *bufs)
+{
+ int i;
+ int streamType = 0;
+
+ mm_camera_app_obj_t *pme = (mm_camera_app_obj_t *)user_data;
+
+ CDBG("%s : E ", __FUNCTION__);
+
+ for (i= 0; i < 5; i++) {
+ if (pme->stream[i].id == stream_id) {
+ CDBG("Allocate Memory for Stream %d",i);
+ streamType = i;
+ break;
+ }
+ }
+
+ streamType = MM_CAMERA_PREVIEW;
+ switch (streamType) {
+ case MM_CAMERA_PREVIEW:
+ mm_stream_init_preview_buf( camera_handle,
+ ch_id, stream_id,
+ user_data,
+ frame_offset_info,
+ num_bufs,
+ initial_reg_flag,
+ bufs);
+ break;
+ case MM_CAMERA_VIDEO:
+ mm_stream_init_video_buf( camera_handle,
+ ch_id, stream_id,
+ user_data,
+ frame_offset_info,
+ num_bufs,
+ initial_reg_flag,
+ bufs);
+ break;
+ case MM_CAMERA_SNAPSHOT_MAIN:
+ mm_stream_init_main_buf( camera_handle,
+ ch_id, stream_id,
+ user_data,
+ frame_offset_info,
+ num_bufs,
+ initial_reg_flag,
+ bufs);
+ break;
+ case MM_CAMERA_SNAPSHOT_THUMBNAIL:
+ mm_stream_init_thumbnail_buf( camera_handle,
+ ch_id, stream_id,
+ user_data,
+ frame_offset_info,
+ num_bufs,
+ initial_reg_flag,
+ bufs);
+ break;
+ default:
+ break;
+ }
+
+ CDBG(" %s : X ",__FUNCTION__);
+ return MM_CAMERA_OK;
+}
+#endif
+int mm_stream_deinitbuf(uint32_t camera_handle,
+ uint32_t ch_id, uint32_t stream_id,
+ void *user_data, uint8_t num_bufs,
+ mm_camera_buf_def_t *bufs)
+{
+ int i, rc = MM_CAMERA_OK;
+ mm_camera_app_obj_t *pme = (mm_camera_app_obj_t *)user_data;
+
+ CDBG("%s: E",__func__);
+
+ for (i = 0; i < num_bufs; i++) {
+ /*rc = my_cam_app.hal_lib.mm_camera_do_munmap_ion (pme->ionfd, &bufs[i].fd,
+ (void *)bufs[i].buffer, bufs[i].frame_len);*/
+ rc = mm_camera_do_munmap_ion (pme->ionfd, &bufs[i].fd,
+ (void *)bufs[i].buffer, bufs[i].frame_len);
+ }
+ close(pme->ionfd);
+ CDBG("%s: X",__func__);
+ return rc;
+}
+
+#if 0
+static int mm_stream_deinitbuf_1(uint32_t camera_handle,
+ uint32_t ch_id, uint32_t stream_id,
+ void *user_data, uint8_t num_bufs,
+ mm_camera_buf_def_t *bufs)
+{
+ int i, rc = MM_CAMERA_OK;
+ int streamType = 0;
+ mm_camera_app_obj_t *pme = (mm_camera_app_obj_t *)user_data;
+ CDBG("%s: BEGIN",__func__);
+
+ for (i= 0; i < 5; i++) {
+ if (pme->stream[i].id == stream_id) {
+ CDBG("Allocate Memory for Stream %d",i);
+ streamType = i;
+ break;
+ }
+ }
+ streamType = MM_CAMERA_PREVIEW;
+ switch (streamType) {
+ case MM_CAMERA_PREVIEW:
+ mm_stream_deinit_preview_buf(camera_handle,
+ ch_id, stream_id,
+ user_data, num_bufs,
+ bufs);
+ break;
+ case MM_CAMERA_VIDEO:
+ mm_stream_deinit_video_buf(camera_handle,
+ ch_id, stream_id,
+ user_data, num_bufs,
+ bufs);
+ break;
+ case MM_CAMERA_SNAPSHOT_MAIN:
+ mm_stream_deinit_main_buf(camera_handle,
+ ch_id, stream_id,
+ user_data, num_bufs,
+ bufs);
+ break;
+ case MM_CAMERA_SNAPSHOT_THUMBNAIL:
+ mm_stream_deinit_thumbnail_buf(camera_handle,
+ ch_id, stream_id,
+ user_data, num_bufs,
+ bufs);
+ break;
+ default:
+ break;
+ }
+
+ /* zero out the buf stuct */
+ CDBG("%s: END",__func__);
+ return MM_CAMERA_OK;
+}
+#endif
+
+void preview_cb_signal(mm_camera_app_obj_t *pme)
+{
+ if (pme->cam_state == CAMERA_STATE_PREVIEW) {
+ mm_camera_app_done();
+ }
+}
+
+void mm_app_preview_notify_cb(mm_camera_super_buf_t *bufs,
+ void *user_data)
+{
+ int rc;
+ mm_camera_buf_def_t *frame = NULL;
+ mm_camera_app_obj_t *pme = NULL;
+ CDBG("%s: BEGIN\n", __func__);
+ frame = bufs->bufs[MM_CAMERA_PREVIEW] ;
+ pme = (mm_camera_app_obj_t *)user_data;
+
+ CDBG("%s: BEGIN - length=%d, frame idx = %d\n", __func__, frame->frame_len, frame->frame_idx);
+
+ //dumpFrameToFile(frame->frame,pme->dim.display_width,pme->dim.display_height,"preview", 1);
+ dumpFrameToFile(frame,pme->dim.display_width,pme->dim.display_height,"preview", 1);
+ if (!my_cam_app.run_sanity) {
+ if (0 != (rc = mm_app_dl_render(frame->fd, NULL))) {
+ CDBG("%s:DL rendering err=%d, frame fd=%d,frame idx = %d\n",
+ __func__, rc, frame->fd, frame->frame_idx);
+ }
+ }
+
+ CDBG("In CB function i/p = %p o/p = %p",bufs->bufs[MM_CAMERA_PREVIEW],frame);
+
+ if (MM_CAMERA_OK != pme->cam->ops->qbuf(pme->cam->camera_handle,pme->ch_id,frame)) {
+ CDBG_ERROR("%s: Failed in Preview Qbuf\n", __func__);
+ return;
+ }
+ if (my_cam_app.run_sanity) {
+ preview_cb_signal(pme);
+ }
+ CDBG("%s: END\n", __func__);
+
+}
+
+static void mm_app_zsl_notify_cb(mm_camera_super_buf_t *bufs,
+ void *user_data)
+{
+ int rc;
+ int i = 0;
+ mm_camera_buf_def_t *preview_frame = NULL;
+ mm_camera_buf_def_t *main_frame = NULL;
+ mm_camera_buf_def_t *thumb_frame = NULL;
+ mm_camera_app_obj_t *pme = NULL;
+ CDBG("%s: BEGIN\n", __func__);
+
+ pme = (mm_camera_app_obj_t *)user_data;
+
+ CDBG("%s : total streams = %d",__func__,bufs->num_bufs);
+ preview_frame = bufs->bufs[0] ;
+ main_frame = bufs->bufs[1];
+ thumb_frame = bufs->bufs[0];
+
+ //dumpFrameToFile(preview_frame->frame,pme->dim.display_width,pme->dim.display_height,"preview", 1);
+ dumpFrameToFile(preview_frame,pme->dim.display_width,pme->dim.display_height,"zsl_preview", 1);
+ if (0 != (rc = mm_app_dl_render(preview_frame->fd, NULL))) {
+ CDBG("%s:DL rendering err=%d, frame fd=%d,frame idx = %d\n",
+ __func__, rc, preview_frame->fd, preview_frame->frame_idx);
+ }
+
+ if (bufs->num_bufs == 2 && main_frame != NULL) {
+ CDBG("mainframe frame_idx = %d fd = %d frame length = %d",main_frame->frame_idx,main_frame->fd,main_frame->frame_len);
+ CDBG("thumnail frame_idx = %d fd = %d frame length = %d",thumb_frame->frame_idx,thumb_frame->fd,thumb_frame->frame_len);
+
+ //dumpFrameToFile(main_frame->frame,pme->dim.picture_width,pme->dim.picture_height,"main", 1);
+ //dumpFrameToFile(thumb_frame->frame,pme->dim.thumbnail_width,pme->dim.thumbnail_height,"thumb", 1);
+
+ dumpFrameToFile(main_frame,pme->dim.picture_width,pme->dim.picture_height,"zsl_main", 1);
+ dumpFrameToFile(thumb_frame,pme->dim.thumbnail_width,pme->dim.thumbnail_height,"zsl_thumb", 1);
+
+ if (MM_CAMERA_OK != pme->cam->ops->qbuf(pme->cam->camera_handle,pme->ch_id,main_frame)) {
+ CDBG_ERROR("%s: Failed in thumbnail Qbuf\n", __func__);
+ }
+ }
+
+ if (MM_CAMERA_OK != pme->cam->ops->qbuf(pme->cam->camera_handle,pme->ch_id,preview_frame)) {
+ CDBG_ERROR("%s: Failed in Preview Qbuf\n", __func__);
+ }
+ CDBG("%s: END\n", __func__);
+}
+
+int mm_app_prepare_preview(int cam_id)
+{
+ int rc = MM_CAMERA_OK;
+ int op_mode;
+
+ CDBG("%s: E",__func__);
+ mm_camera_app_obj_t *pme = mm_app_get_cam_obj(cam_id);
+
+ pme->mem_cam->get_buf = mm_stream_initbuf;
+ pme->mem_cam->put_buf = mm_stream_deinitbuf;
+ pme->mem_cam->user_data = pme;
+
+ op_mode = MM_CAMERA_OP_MODE_VIDEO;
+ if (MM_CAMERA_OK != (rc = pme->cam->ops->set_parm(
+ pme->cam->camera_handle,MM_CAMERA_PARM_OP_MODE, &op_mode))) {
+ CDBG_ERROR("%s: Set preview op mode error",__func__);
+ goto end;
+ }
+
+ pme->stream[MM_CAMERA_PREVIEW].id = pme->cam->ops->add_stream(pme->cam->camera_handle,pme->ch_id,
+ mm_app_preview_notify_cb,pme,
+ MM_CAMERA_PREVIEW, 0);
+
+ if (!pme->stream[MM_CAMERA_PREVIEW].id) {
+ CDBG_ERROR("%s:Add stream preview error =%d\n", __func__, rc);
+ rc = -1;
+ goto end;
+ }
+
+ CDBG("%s :Add stream is successfull stream ID = %d",__func__,pme->stream[MM_CAMERA_PREVIEW].id);
+
+ mm_app_set_preview_fmt(cam_id,&pme->stream[MM_CAMERA_PREVIEW].str_config.fmt);
+ pme->stream[MM_CAMERA_PREVIEW].str_config.need_stream_on = 1;
+ pme->stream[MM_CAMERA_PREVIEW].str_config.num_of_bufs = PREVIEW_BUF_NUM;
+
+ if (MM_CAMERA_OK != (rc = pme->cam->ops->config_stream(pme->cam->camera_handle,pme->ch_id,pme->stream[MM_CAMERA_PREVIEW].id,
+ &pme->stream[MM_CAMERA_PREVIEW].str_config))) {
+ CDBG_ERROR("%s:preview streaming err=%d\n", __func__, rc);
+ goto end;
+ }
+ end:
+ return rc;
+}
+
+int mm_app_unprepare_preview(int cam_id)
+{
+ int rc = MM_CAMERA_OK;
+ return rc;
+}
+
+int mm_app_streamon_preview(int cam_id)
+{
+ int rc = MM_CAMERA_OK;
+ int stream[2];
+ mm_camera_app_obj_t *pme = mm_app_get_cam_obj(cam_id);
+ mm_camera_frame_len_offset frame_offset_info;
+
+ stream[0] = pme->stream[MM_CAMERA_PREVIEW].id;
+ stream[1] = 0;
+
+ pme->cam->ops->get_stream_parm(pme->cam->camera_handle,pme->ch_id,pme->stream[MM_CAMERA_PREVIEW].id,MM_CAMERA_STREAM_OFFSET,&frame_offset_info);
+ ALOGE("DEBUG : length = %d",frame_offset_info.frame_len);
+ if (MM_CAMERA_OK != (rc = pme->cam->ops->start_streams(pme->cam->camera_handle,pme->ch_id,1,&stream))) {
+ CDBG_ERROR("%s : Start Stream preview Error",__func__);
+ goto end;
+ }
+ pme->cam_state = CAMERA_STATE_PREVIEW;
+ end:
+ CDBG("%s: X rc = %d",__func__,rc);
+ return rc;
+}
+
+int mm_app_prepare_preview_zsl(int cam_id)
+{
+ int rc = MM_CAMERA_OK;
+ mm_camera_bundle_attr_t attr;
+ int stream[3];
+
+ mm_camera_app_obj_t *pme = mm_app_get_cam_obj(cam_id);
+ int op_mode = 0;
+
+ op_mode = MM_CAMERA_OP_MODE_ZSL;
+ if (MM_CAMERA_OK != (rc = pme->cam->ops->set_parm(
+ pme->cam->camera_handle,MM_CAMERA_PARM_OP_MODE, &op_mode))) {
+ CDBG_ERROR("%s: Set preview op mode error",__func__);
+ goto end;
+ }
+
+ pme->stream[MM_CAMERA_PREVIEW].id = pme->cam->ops->add_stream(pme->cam->camera_handle,pme->ch_id,
+ mm_app_preview_notify_cb,pme,
+ MM_CAMERA_PREVIEW, 0);
+
+ if (!pme->stream[MM_CAMERA_PREVIEW].id) {
+ CDBG_ERROR("%s:Add stream preview error =%d\n", __func__, rc);
+ goto end;
+ }
+
+ CDBG("%s :Add stream is successfull stream ID = %d",__func__,pme->stream[MM_CAMERA_PREVIEW].id);
+
+ mm_app_set_preview_fmt(cam_id,&pme->stream[MM_CAMERA_PREVIEW].str_config.fmt);
+ pme->stream[MM_CAMERA_PREVIEW].str_config.need_stream_on = 1;
+ pme->stream[MM_CAMERA_PREVIEW].str_config.num_of_bufs = PREVIEW_BUF_NUM;
+
+ if (MM_CAMERA_OK != (rc = pme->cam->ops->config_stream(pme->cam->camera_handle,pme->ch_id,pme->stream[MM_CAMERA_PREVIEW].id,
+ &pme->stream[MM_CAMERA_PREVIEW].str_config))) {
+ CDBG_ERROR("%s:preview streaming err=%d\n", __func__, rc);
+ goto end;
+ }
+
+ pme->stream[MM_CAMERA_SNAPSHOT_MAIN].id = pme->cam->ops->add_stream(pme->cam->camera_handle,pme->ch_id,
+ NULL,pme,
+ MM_CAMERA_SNAPSHOT_MAIN, 0);
+
+ CDBG("Add Snapshot main is successfull stream ID = %d",pme->stream[MM_CAMERA_SNAPSHOT_MAIN].id);
+ if (!pme->stream[MM_CAMERA_SNAPSHOT_MAIN].id) {
+ CDBG_ERROR("%s:preview streaming err=%d\n", __func__, rc);
+ rc = -1;
+ goto end;
+ }
+
+ pme->stream[MM_CAMERA_SNAPSHOT_MAIN].str_config.need_stream_on = 1;
+ pme->stream[MM_CAMERA_SNAPSHOT_MAIN].str_config.num_of_bufs = 7;
+
+ mm_app_set_snapshot_fmt(cam_id,&pme->stream[MM_CAMERA_SNAPSHOT_MAIN].str_config.fmt);
+
+
+ if (MM_CAMERA_OK != (rc = pme->cam->ops->config_stream(pme->cam->camera_handle,pme->ch_id,pme->stream[MM_CAMERA_SNAPSHOT_MAIN].id,
+ &pme->stream[MM_CAMERA_SNAPSHOT_MAIN].str_config))) {
+ CDBG_ERROR("%s:preview streaming err=%d\n", __func__, rc);
+ goto end;
+ }
+ end:
+ CDBG("%s: END, rc=%d\n", __func__, rc);
+ return rc;
+}
+
+int mm_app_streamon_preview_zsl(int cam_id)
+{
+ int rc = MM_CAMERA_OK;
+ mm_camera_bundle_attr_t attr;
+ int stream[3];
+
+ mm_camera_app_obj_t *pme = mm_app_get_cam_obj(cam_id);
+ int op_mode = 0;
+
+
+ stream[0] = pme->stream[MM_CAMERA_PREVIEW].id;
+ stream[1] = pme->stream[MM_CAMERA_SNAPSHOT_MAIN].id;
+
+ attr.notify_mode = MM_CAMERA_SUPER_BUF_NOTIFY_BURST;
+ attr.burst_num = 1;
+ attr.look_back = 2;
+ attr.post_frame_skip = 0;
+ attr.water_mark = 2;
+
+ if (MM_CAMERA_OK != (rc = pme->cam->ops->init_stream_bundle(
+ pme->cam->camera_handle,pme->ch_id,mm_app_zsl_notify_cb,pme,&attr,2,stream))) {
+ CDBG_ERROR("%s:init_stream_bundle err=%d\n", __func__, rc);
+ rc = -1;
+ goto end;
+ }
+
+ if (MM_CAMERA_OK != (rc = pme->cam->ops->start_streams(pme->cam->camera_handle,pme->ch_id,
+ 2, stream))) {
+ CDBG_ERROR("%s:start_streams err=%d\n", __func__, rc);
+ rc = -1;
+ goto end;
+ }
+ pme->cam_state = CAMERA_STATE_PREVIEW;
+ end:
+ CDBG("%s: END, rc=%d\n", __func__, rc);
+ return rc;
+
+}
+
+int initDisplay()
+{
+ int rc = MM_CAMERA_OK;
+
+ use_overlay_fb_display_driver();
+ if (launch_camframe_fb_thread()) {
+ CDBG_ERROR("%s:launch_camframe_fb_thread failed!\n", __func__);
+ //rc = -MM_CAMERA_E_GENERAL;
+ }
+ CDBG("%s: launch_camframe_fb_thread done\n", __func__);
+ return rc;
+}
+
+int deinitDisplay()
+{
+ /* stop the display thread */
+ release_camframe_fb_thread();
+ return MM_CAMERA_OK;
+}
+
+int mm_app_start_preview(int cam_id)
+{
+ int rc = MM_CAMERA_OK;
+
+ mm_camera_app_obj_t *pme = mm_app_get_cam_obj(cam_id);
+ int op_mode = 0;
+
+ CDBG("pme = %p, pme->cam =%p, pme->cam->camera_handle = %d",
+ pme,pme->cam,pme->cam->camera_handle);
+
+ if (pme->cam_state == CAMERA_STATE_PREVIEW) {
+ return rc;
+ }
+
+ if (!my_cam_app.run_sanity) {
+ if (MM_CAMERA_OK != initDisplay()) {
+ CDBG_ERROR("%s : Could not initalize display",__func__);
+ goto end;
+ }
+ }
+
+ if (MM_CAMERA_OK != (rc = mm_app_prepare_preview(cam_id))) {
+ CDBG_ERROR("%s:Stream On Preview failed rc=%d\n", __func__, rc);
+ goto end;
+ }
+
+ if (MM_CAMERA_OK != (rc = mm_app_streamon_preview(cam_id))) {
+ CDBG_ERROR("%s:Stream On Preview failed rc=%d\n", __func__, rc);
+ goto end;
+ }
+
+ end:
+ CDBG("%s: END, rc=%d\n", __func__, rc);
+ return rc;
+}
+
+int mm_app_start_preview_zsl(int cam_id)
+{
+ int rc = MM_CAMERA_OK;
+
+ mm_camera_app_obj_t *pme = mm_app_get_cam_obj(cam_id);
+
+ CDBG("pme = %p, pme->cam =%p, pme->cam->camera_handle = %d",
+ pme,pme->cam,pme->cam->camera_handle);
+
+ if (!my_cam_app.run_sanity) {
+ if (MM_CAMERA_OK != initDisplay()) {
+ CDBG_ERROR("%s : Could not initalize display",__func__);
+ goto end;
+ }
+ }
+
+ pme->mem_cam->get_buf = mm_stream_initbuf;
+ pme->mem_cam->put_buf = mm_stream_deinitbuf;
+ pme->mem_cam->user_data = pme;
+
+ if (MM_CAMERA_OK != (rc = mm_app_prepare_preview_zsl(cam_id))) {
+ CDBG_ERROR("%s:Prepare preview err=%d\n", __func__, rc);
+ goto end;
+ }
+
+ if (MM_CAMERA_OK != (rc = mm_app_streamon_preview_zsl(cam_id))) {
+ CDBG_ERROR("%s:stream on preview err=%d\n", __func__, rc);
+ goto end;
+ }
+
+ /*if(MM_CAMERA_OK != (rc = mm_app_bundle_zsl_stream(cam_id))){
+ CDBG_ERROR("%s: bundle and start of ZSl err=%d\n", __func__, rc);
+ goto end;
+ }*/
+
+ end:
+ CDBG("%s: END, rc=%d\n", __func__, rc);
+ return rc;
+}
+
+static int mm_app_streamoff_preview(int cam_id)
+{
+ int rc = MM_CAMERA_OK;
+ int stream[2];
+
+ mm_camera_app_obj_t *pme = mm_app_get_cam_obj(cam_id);
+
+ stream[0] = pme->stream[MM_CAMERA_PREVIEW].id;
+ stream[1] = 0;
+
+ if (MM_CAMERA_OK != (rc = pme->cam->ops->stop_streams(pme->cam->camera_handle,pme->ch_id,1,&stream))) {
+ CDBG_ERROR("%s : Preview Stream off Error",__func__);
+ goto end;
+ }
+
+ if (MM_CAMERA_OK != (rc = pme->cam->ops->del_stream(pme->cam->camera_handle,pme->ch_id,pme->stream[MM_CAMERA_PREVIEW].id))) {
+ CDBG_ERROR("%s : Delete Stream Preview error",__func__);
+ goto end;
+ }
+ CDBG("del_stream successfull");
+ pme->cam_state = CAMERA_STATE_OPEN;
+ end:
+ CDBG("%s: END, rc=%d\n", __func__, rc);
+
+ return rc;
+}
+
+static int mm_app_streamoff_preview_zsl(int cam_id)
+{
+ int rc = MM_CAMERA_OK;
+ int stream[2];
+
+ mm_camera_app_obj_t *pme = mm_app_get_cam_obj(cam_id);
+
+ stream[0] = pme->stream[MM_CAMERA_PREVIEW].id;
+ stream[1] = pme->stream[MM_CAMERA_SNAPSHOT_MAIN].id;
+
+ if (MM_CAMERA_OK != (rc = pme->cam->ops->destroy_stream_bundle(pme->cam->camera_handle,pme->ch_id))) {
+ CDBG_ERROR("%s : ZSL Snapshot destroy_stream_bundle Error",__func__);
+ goto end;
+ }
+
+ if (MM_CAMERA_OK != (rc = pme->cam->ops->stop_streams(pme->cam->camera_handle,pme->ch_id,2,&stream))) {
+ CDBG_ERROR("%s : Preview Stream off Error",__func__);
+ goto end;
+ }
+
+ if (MM_CAMERA_OK != (rc = pme->cam->ops->del_stream(pme->cam->camera_handle,pme->ch_id,pme->stream[MM_CAMERA_PREVIEW].id))) {
+ CDBG_ERROR("%s : Delete Stream Preview error",__func__);
+ goto end;
+ }
+
+ if (MM_CAMERA_OK != (rc = pme->cam->ops->del_stream(pme->cam->camera_handle,pme->ch_id,pme->stream[MM_CAMERA_SNAPSHOT_MAIN].id))) {
+ CDBG_ERROR("%s : Delete Stream Preview error",__func__);
+ goto end;
+ }
+ CDBG("del_stream successfull");
+ pme->cam_state = CAMERA_STATE_OPEN;
+ end:
+ CDBG("%s: END, rc=%d\n", __func__, rc);
+
+ return rc;
+}
+
+int startPreview(int cam_id)
+{
+ int rc = MM_CAMERA_OK;
+
+ mm_camera_app_obj_t *pme = mm_app_get_cam_obj(cam_id);
+
+ CDBG("%s: Start Preview",__func__);
+
+ if (pme->cam_mode == ZSL_MODE || pme->cam_mode == RECORDER_MODE) {
+ switch (pme->cam_state) {
+ case CAMERA_STATE_RECORD:
+ if (MM_CAMERA_OK != mm_app_stop_video(cam_id)) {
+ CDBG_ERROR("%s:Cannot stop video err=%d\n", __func__, rc);
+ return -1;
+ }
+ case CAMERA_STATE_PREVIEW:
+ if (MM_CAMERA_OK != mm_app_open_camera(cam_id)) {
+ CDBG_ERROR("%s: Cannot switch to camera mode=%d\n", __func__);
+ return -1;
+ }
+ break;
+ case CAMERA_STATE_SNAPSHOT:
+ default:
+ break;
+ }
+ } else if (pme->cam_mode == CAMERA_MODE && pme->cam_state == CAMERA_STATE_OPEN) {
+
+ if (MM_CAMERA_OK != (rc = mm_app_start_preview(cam_id))) {
+ CDBG_ERROR("%s:preview streaming on err=%d\n", __func__, rc);
+ return -1;
+ }
+ }
+ CDBG("%s: END, rc=%d\n", __func__, rc);
+ return rc;
+}
+
+int stopPreview(int cam_id)
+{
+ int rc = MM_CAMERA_OK;
+ mm_camera_app_obj_t *pme = mm_app_get_cam_obj(cam_id);
+
+ CDBG("%s : pme->cam_mode = %d, pme->cam_state = %d",__func__,pme->cam_mode,pme->cam_state);
+
+ if (pme->cam_mode == CAMERA_MODE && pme->cam_state == CAMERA_STATE_PREVIEW) {
+ if (MM_CAMERA_OK != (rc = mm_app_stop_preview(cam_id))) {
+ CDBG("%s:streamoff preview err=%d\n", __func__, rc);
+ goto end;
+ }
+ } else if (pme->cam_mode == ZSL_MODE && pme->cam_state == CAMERA_STATE_PREVIEW) {
+ if (MM_CAMERA_OK != (rc = mm_app_stop_preview_zsl(cam_id))) {
+ CDBG("%s:streamoff preview err=%d\n", __func__, rc);
+ goto end;
+ }
+ } else if (pme->cam_mode == RECORDER_MODE && pme->cam_state == CAMERA_STATE_PREVIEW) {
+ if (MM_CAMERA_OK != (rc = mm_app_stop_preview(cam_id))) {
+ CDBG("%s:streamoff preview err=%d\n", __func__, rc);
+ goto end;
+ }
+ mm_app_unprepare_video(cam_id);
+ }
+ end:
+ return rc;
+}
+
+int mm_app_stop_preview(int cam_id)
+{
+ int rc = MM_CAMERA_OK;
+
+ mm_camera_app_obj_t *pme = mm_app_get_cam_obj(cam_id);
+
+ if (MM_CAMERA_OK != (rc = mm_app_streamoff_preview(cam_id))) {
+ CDBG_ERROR("%s : Delete Stream Preview error",__func__);
+ goto end;
+ }
+
+ CDBG("Stop Preview successfull");
+
+ if (!my_cam_app.run_sanity) {
+ deinitDisplay();
+ }
+ end:
+ CDBG("%s: END, rc=%d\n", __func__, rc);
+ return rc;
+}
+
+int mm_app_stop_preview_zsl(int cam_id)
+{
+ int rc = MM_CAMERA_OK;
+
+ mm_camera_app_obj_t *pme = mm_app_get_cam_obj(cam_id);
+
+ if (MM_CAMERA_OK != (rc = mm_app_streamoff_preview_zsl(cam_id))) {
+ CDBG_ERROR("%s : Delete Stream Preview error",__func__);
+ goto end;
+ }
+
+ CDBG("Stop Preview successfull");
+ if (!my_cam_app.run_sanity) {
+ deinitDisplay();
+ }
+ end:
+ CDBG("%s: END, rc=%d\n", __func__, rc);
+ return rc;
+}
diff --git a/camera/QCamera/stack/mm-camera-test/src/mm_qcamera_rdi.c b/camera/QCamera/stack/mm-camera-test/src/mm_qcamera_rdi.c
new file mode 100644
index 0000000..09d2722
--- /dev/null
+++ b/camera/QCamera/stack/mm-camera-test/src/mm_qcamera_rdi.c
@@ -0,0 +1,329 @@
+/*
+Copyright (c) 2012, The Linux Foundation. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following
+ disclaimer in the documentation and/or other materials provided
+ with the distribution.
+ * Neither the name of The Linux Foundation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+
+#include <pthread.h>
+#include "mm_camera_dbg.h"
+#include <errno.h>
+#include <sys/ioctl.h>
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <fcntl.h>
+#include <poll.h>
+#include <linux/ion.h>
+#include "mm_qcamera_app.h"
+
+/*typedef enum {
+ STREAM_IMAGE,
+ STREAM_RAW,
+ STREAM_IMAGE_AND_RAW,
+ STREAM_RAW_AND_RAW,
+ STREAM_MAX,
+} mm_camera_channel_stream_info_t;*/
+
+#define RDI_MASK STREAM_RAW
+mm_camera_channel_stream_info_t rdi_mode;
+
+static int mm_app_set_rdi_fmt(int cam_id,mm_camera_image_fmt_t *fmt)
+{
+ int rc = MM_CAMERA_OK;
+ cam_ctrl_dimension_t dim;
+ mm_camera_app_obj_t *pme = mm_app_get_cam_obj(cam_id);
+
+ fmt->meta_header = MM_CAMEAR_META_DATA_TYPE_DEF;
+ //pme->cam->ops->get_parm(pme->cam->camera_handle,MM_CAMERA_PARM_DIMENSION, &dim);
+ fmt->fmt = CAMERA_BAYER_SBGGR10;//CAMERA_RDI;
+ fmt->width = 0;
+ fmt->height = 0;
+ fmt->rotation = 0;
+
+ CDBG("%s: RDI Dimensions = %d X %d",__func__,fmt->width,fmt->height);
+ return rc;
+}
+
+int mm_app_open_rdi(int cam_id)
+{
+ int rc = MM_CAMERA_OK;
+ int value = RDI_MASK;
+ mm_camera_app_obj_t *pme = mm_app_get_cam_obj(cam_id);
+
+ if (pme->cam_mode == RDI_MODE) {
+ return rc;
+ }
+
+ if (MM_CAMERA_OK != (rc = stopPreview(cam_id))) {
+ CDBG_ERROR("%s:Stop preview err=%d\n", __func__, rc);
+ goto end;
+ }
+ pme->cam_mode = RDI_MODE;
+ end:
+ CDBG("%s: END, rc=%d\n", __func__, rc);
+ return rc;
+}
+
+void rdi_cb_signal(mm_camera_app_obj_t *pme)
+{
+ if (pme->cam_mode == RDI_MODE) {
+ mm_camera_app_done();
+ }
+}
+
+static void mm_app_rdi_notify_cb(mm_camera_super_buf_t *bufs,
+ void *user_data)
+{
+ int rc;
+ mm_camera_buf_def_t *frame = NULL;
+ mm_camera_app_obj_t *pme = NULL;
+ CDBG("%s: BEGIN\n", __func__);
+ frame = bufs->bufs[0] ;
+ pme = (mm_camera_app_obj_t *)user_data;
+
+ CDBG("%s: BEGIN - length=%d, frame idx = %d\n", __func__, frame->frame_len, frame->frame_idx);
+
+ //dumpFrameToFile(frame->frame,pme->dim.display_width,pme->dim.display_height,"preview", 1);
+ dumpFrameToFile(frame,pme->dim.rdi0_width,pme->dim.rdi0_height,"rdi", 1);
+
+ if (MM_CAMERA_OK != pme->cam->ops->qbuf(pme->cam->camera_handle,pme->ch_id,frame)) {
+ CDBG_ERROR("%s: Failed in Preview Qbuf\n", __func__);
+ return;
+ }
+ if (my_cam_app.run_sanity) {
+ mm_camera_app_done(pme);
+ }
+ CDBG("%s: END\n", __func__);
+
+}
+
+int mm_app_prepare_rdi(int cam_id)
+{
+ int rc = MM_CAMERA_OK;
+ int op_mode;
+ int value = RDI_MASK;
+
+ CDBG("%s: E",__func__);
+ mm_camera_app_obj_t *pme = mm_app_get_cam_obj(cam_id);
+
+ pme->mem_cam->get_buf = mm_stream_initbuf;
+ pme->mem_cam->put_buf = mm_stream_deinitbuf;
+ pme->mem_cam->user_data = pme;
+
+ //pme->cam->ops->set_parm(pme->cam->camera_handle,MM_CAMERA_PARM_CH_INTERFACE, &value);
+ //pme->cam->ops->get_parm(pme->cam->camera_handle,MM_CAMERA_PARM_CH_INTERFACE, &rdi_mode);
+
+ pme->stream[MM_CAMERA_RDI].id = pme->cam->ops->add_stream(pme->cam->camera_handle,pme->ch_id,
+ mm_app_rdi_notify_cb,pme,
+ MM_CAMERA_RDI, 0);
+
+ if (!pme->stream[MM_CAMERA_RDI].id) {
+ CDBG_ERROR("%s:Add RDI error =%d\n", __func__, rc);
+ rc = -1;
+ goto end;
+ }
+ CDBG("%s :Add RDI stream is successfull stream ID = %d",__func__,pme->stream[MM_CAMERA_RDI].id);
+
+ mm_app_set_rdi_fmt(cam_id,&pme->stream[MM_CAMERA_RDI].str_config.fmt);
+ pme->stream[MM_CAMERA_RDI].str_config.need_stream_on = 1;
+ pme->stream[MM_CAMERA_RDI].str_config.num_of_bufs = 7;
+
+ if (MM_CAMERA_OK != (rc = pme->cam->ops->config_stream(pme->cam->camera_handle,pme->ch_id,pme->stream[MM_CAMERA_RDI].id,
+ &pme->stream[MM_CAMERA_RDI].str_config))) {
+ CDBG_ERROR("%s:RDI streaming err=%d\n", __func__, rc);
+ goto end;
+ }
+#if 0
+ if (rdi_mode == STREAM_IMAGE_AND_RAW) {
+ pme->stream[MM_CAMERA_PREVIEW].id = pme->cam->ops->add_stream(pme->cam->camera_handle,pme->ch_id,
+ mm_app_preview_notify_cb,pme,
+ MM_CAMERA_PREVIEW, 0);
+
+ if (!pme->stream[MM_CAMERA_PREVIEW].id) {
+ CDBG_ERROR("%s:Add stream preview error =%d\n", __func__, rc);
+ rc = -1;
+ goto end;
+ }
+
+ CDBG("%s :Add stream is successfull stream ID = %d",__func__,pme->stream[MM_CAMERA_PREVIEW].id);
+
+ mm_app_set_preview_fmt(cam_id,&pme->stream[MM_CAMERA_PREVIEW].str_config.fmt);
+ pme->stream[MM_CAMERA_PREVIEW].str_config.need_stream_on = 1;
+ pme->stream[MM_CAMERA_PREVIEW].str_config.num_of_bufs = PREVIEW_BUF_NUM;
+
+ if (MM_CAMERA_OK != (rc = pme->cam->ops->config_stream(pme->cam->camera_handle,pme->ch_id,pme->stream[MM_CAMERA_PREVIEW].id,
+ &pme->stream[MM_CAMERA_PREVIEW].str_config))) {
+ CDBG_ERROR("%s:preview streaming err=%d\n", __func__, rc);
+ goto end;
+ }
+ }
+#endif
+ end:
+ return rc;
+}
+
+int mm_app_unprepare_rdi(int cam_id)
+{
+ int rc = MM_CAMERA_OK;
+ return rc;
+}
+
+int mm_app_streamon_rdi(int cam_id)
+{
+ int rc = MM_CAMERA_OK;
+ int stream[2];
+ mm_camera_app_obj_t *pme = mm_app_get_cam_obj(cam_id);
+ int num_of_streams;
+
+ /*if(rdi_mode == STREAM_IMAGE_AND_RAW){
+ num_of_streams = 2;
+ stream[0] = pme->stream[MM_CAMERA_RDI].id;
+ stream[1] = pme->stream[MM_CAMERA_PREVIEW].id;
+ }else */{
+ num_of_streams = 1;
+ stream[0] = pme->stream[MM_CAMERA_RDI].id;
+ }
+
+ if (MM_CAMERA_OK != (rc = pme->cam->ops->start_streams(pme->cam->camera_handle,pme->ch_id,num_of_streams,&stream))) {
+ CDBG_ERROR("%s : Start RDI Stream preview Error",__func__);
+ goto end;
+ }
+ pme->cam_state = CAMERA_STATE_RDI;
+ end:
+ CDBG("%s: X rc = %d",__func__,rc);
+ return rc;
+}
+
+int mm_app_start_rdi(int cam_id)
+{
+ int rc = MM_CAMERA_OK;
+
+ mm_camera_app_obj_t *pme = mm_app_get_cam_obj(cam_id);
+ int op_mode = 0;
+
+ CDBG("pme = %p, pme->cam =%p, pme->cam->camera_handle = %d",
+ pme,pme->cam,pme->cam->camera_handle);
+
+ if (pme->cam_state == CAMERA_STATE_RDI) {
+ return rc;
+ }
+
+ if (MM_CAMERA_OK != (rc = mm_app_prepare_rdi(cam_id))) {
+ CDBG_ERROR("%s:Prepare RDI failed rc=%d\n", __func__, rc);
+ goto end;
+ }
+
+ if (MM_CAMERA_OK != (rc = mm_app_streamon_rdi(cam_id))) {
+ CDBG_ERROR("%s:Stream On RDI failed rc=%d\n", __func__, rc);
+ goto end;
+ }
+
+ end:
+ CDBG("%s: END, rc=%d\n", __func__, rc);
+ return rc;
+}
+
+static int mm_app_streamoff_rdi(int cam_id)
+{
+ int rc = MM_CAMERA_OK;
+ int stream[2];
+ int num_of_streams;
+
+ mm_camera_app_obj_t *pme = mm_app_get_cam_obj(cam_id);
+
+ /*if(rdi_mode == STREAM_IMAGE_AND_RAW){
+ num_of_streams = 2;
+ stream[0] = pme->stream[MM_CAMERA_RDI].id;
+ stream[1] = pme->stream[MM_CAMERA_PREVIEW].id;
+ }else*/{
+ num_of_streams = 1;
+ stream[0] = pme->stream[MM_CAMERA_RDI].id;
+ }
+
+ if (MM_CAMERA_OK != (rc = pme->cam->ops->stop_streams(pme->cam->camera_handle,pme->ch_id,num_of_streams,&stream))) {
+ CDBG_ERROR("%s : RDI Stream off Error",__func__);
+ goto end;
+ }
+
+ /*if(rdi_mode == STREAM_IMAGE_AND_RAW) {
+ if(MM_CAMERA_OK != (rc = pme->cam->ops->del_stream(pme->cam->camera_handle,pme->ch_id,pme->stream[MM_CAMERA_PREVIEW].id)))
+ {
+ CDBG_ERROR("%s : Delete Preview error",__func__);
+ goto end;
+ }
+ }*/
+ if (MM_CAMERA_OK != (rc = pme->cam->ops->del_stream(pme->cam->camera_handle,pme->ch_id,pme->stream[MM_CAMERA_RDI].id))) {
+ CDBG_ERROR("%s : Delete Stream RDI error",__func__);
+ goto end;
+ }
+ CDBG("del_stream successfull");
+ pme->cam_state = CAMERA_STATE_OPEN;
+ end:
+ CDBG("%s: END, rc=%d\n", __func__, rc);
+
+ return rc;
+}
+
+int startRdi(int cam_id)
+{
+ int rc = MM_CAMERA_OK;
+
+ mm_camera_app_obj_t *pme = mm_app_get_cam_obj(cam_id);
+
+ CDBG("%s: Start Preview",__func__);
+
+ if (pme->cam_mode == ZSL_MODE || pme->cam_mode == RECORDER_MODE || pme->cam_mode == CAMERA_MODE) {
+ switch (pme->cam_state) {
+ case CAMERA_STATE_RECORD:
+ if (MM_CAMERA_OK != mm_app_stop_video(cam_id)) {
+ CDBG_ERROR("%s:Cannot stop video err=%d\n", __func__, rc);
+ return -1;
+ }
+ case CAMERA_STATE_PREVIEW:
+ if (MM_CAMERA_OK != mm_app_open_rdi(cam_id)) {
+ CDBG_ERROR("%s: Cannot switch to camera mode=%d\n", __func__);
+ return -1;
+ }
+ case CAMERA_STATE_SNAPSHOT:
+ default:
+ break;
+ }
+ }
+ mm_app_start_rdi(cam_id);
+ return rc;
+}
+
+int stopRdi(int cam_id)
+{
+ int rc = MM_CAMERA_OK;
+ mm_camera_app_obj_t *pme = mm_app_get_cam_obj(cam_id);
+
+ mm_app_streamoff_rdi(cam_id);
+
+ end:
+ return rc;
+}
+
+
diff --git a/camera/QCamera/stack/mm-camera-test/src/mm_qcamera_snapshot.c b/camera/QCamera/stack/mm-camera-test/src/mm_qcamera_snapshot.c
new file mode 100644
index 0000000..4b54197
--- /dev/null
+++ b/camera/QCamera/stack/mm-camera-test/src/mm_qcamera_snapshot.c
@@ -0,0 +1,1402 @@
+/*
+Copyright (c) 2011-2012, The Linux Foundation. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following
+ disclaimer in the documentation and/or other materials provided
+ with the distribution.
+ * Neither the name of The Linux Foundation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+
+#include <pthread.h>
+#include <stdbool.h>
+#include "mm_camera_dbg.h"
+#include <errno.h>
+#include <sys/ioctl.h>
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <fcntl.h>
+#include <poll.h>
+#include "mm_qcamera_app.h"
+
+#define BUFF_SIZE_128 128
+
+//static mm_camera_ch_data_buf_t *mCurrentFrameEncoded;
+static int JpegOffset = 0;
+static int raw_snapshot_cnt = 0;
+static int snapshot_cnt = 0;
+static pthread_mutex_t g_s_mutex;
+static int g_status = 0;
+static pthread_cond_t g_s_cond_v;
+
+
+static void mm_app_snapshot_done()
+{
+ pthread_mutex_lock(&g_s_mutex);
+ g_status = true;
+ pthread_cond_signal(&g_s_cond_v);
+ pthread_mutex_unlock(&g_s_mutex);
+}
+
+static int mm_app_dump_snapshot_frame(struct msm_frame *frame,
+ uint32_t len, int is_main, int is_raw)
+{
+ char bufp[BUFF_SIZE_128];
+ int file_fdp;
+ int rc = 0;
+
+ if (is_raw) {
+ snprintf(bufp, BUFF_SIZE_128, "/data/main_raw_%d.yuv", raw_snapshot_cnt);
+ } else {
+ if (is_main) {
+ snprintf(bufp, BUFF_SIZE_128, "/data/main_%d.yuv", snapshot_cnt);
+ } else {
+ snprintf(bufp, BUFF_SIZE_128, "/data/thumb_%d.yuv", snapshot_cnt);
+ }
+ }
+
+ file_fdp = open(bufp, O_RDWR | O_CREAT, 0777);
+
+ if (file_fdp < 0) {
+ CDBG("cannot open file %s\n", bufp);
+ rc = -1;
+ goto end;
+ }
+ CDBG("%s:dump snapshot frame to '%s'\n", __func__, bufp);
+ write(file_fdp,
+ (const void *)frame->buffer, len);
+ close(file_fdp);
+ end:
+ return rc;
+}
+
+
+static void mm_app_dump_jpeg_frame(const void * data, uint32_t size, char* name, char* ext, int index)
+{
+ char buf[32];
+ int file_fd;
+ if ( data != NULL) {
+ char * str;
+ snprintf(buf, sizeof(buf), "/data/%s_%d.%s", name, index, ext);
+ CDBG("%s size =%d", buf, size);
+ file_fd = open(buf, O_RDWR | O_CREAT, 0777);
+ write(file_fd, data, size);
+ close(file_fd);
+ }
+}
+
+static int mm_app_set_thumbnail_fmt(int cam_id,mm_camera_image_fmt_t *fmt)
+{
+ int rc = MM_CAMERA_OK;
+ mm_camera_app_obj_t *pme = mm_app_get_cam_obj(cam_id);
+
+ fmt->meta_header = MM_CAMEAR_META_DATA_TYPE_DEF;
+ fmt->fmt = pme->dim.thumb_format;
+ fmt->width = pme->dim.thumbnail_width;
+ fmt->height = pme->dim.thumbnail_height;
+ CDBG("Thumbnail Dimension = %dX%d",fmt->width,fmt->height);
+ return rc;
+}
+
+int mm_app_set_snapshot_fmt(int cam_id,mm_camera_image_fmt_t *fmt)
+{
+
+ int rc = MM_CAMERA_OK;
+ mm_camera_app_obj_t *pme = mm_app_get_cam_obj(cam_id);
+ fmt->meta_header = MM_CAMEAR_META_DATA_TYPE_DEF;
+ fmt->fmt = pme->dim.main_img_format;
+ fmt->width = pme->dim.picture_width;
+ fmt->height = pme->dim.picture_height;
+ CDBG("Snapshot Dimension = %dX%d",fmt->width,fmt->height);
+ return rc;
+}
+
+int mm_app_set_live_snapshot_fmt(int cam_id,mm_camera_image_fmt_t *fmt)
+{
+ int rc = MM_CAMERA_OK;
+ mm_camera_app_obj_t *pme = mm_app_get_cam_obj(cam_id);
+ fmt->meta_header = MM_CAMEAR_META_DATA_TYPE_DEF;
+ fmt->fmt = pme->dim.enc_format;
+ fmt->width = pme->dim.video_width;
+ fmt->height = pme->dim.video_height;
+ CDBG("Livesnapshot Dimension = %dX%d",fmt->width,fmt->height);
+ return rc;
+}
+
+int mm_app_set_raw_snapshot_fmt(int cam_id)
+{
+ int rc = MM_CAMERA_OK;
+#if 0
+ /* now we hard code format */
+ mm_camera_app_obj_t *pme = mm_app_get_cam_obj(cam_id);
+
+ mm_camera_ch_image_fmt_parm_t fmt;
+
+ CDBG("%s: BEGIN\n", __func__);
+ memset(&fmt, 0, sizeof(mm_camera_ch_image_fmt_parm_t));
+ fmt.ch_type = MM_CAMERA_CH_RAW;
+ fmt.def.fmt = CAMERA_BAYER_SBGGR10;
+ fmt.def.dim.width = pme->dim.raw_picture_width;
+ fmt.def.dim.height = pme->dim.raw_picture_height;
+ rc = pme->cam->cfg->set_parm(pme->cam, MM_CAMERA_PARM_CH_IMAGE_FMT, &fmt);
+ if (rc != MM_CAMERA_OK) {
+ CDBG("%s:set raw snapshot format err=%d\n", __func__, rc);
+ }
+ end:
+ CDBG("%s: END, rc=%d\n", __func__, rc);
+#endif
+ return rc;
+}
+
+int mm_app_prepare_raw_snapshot_buf(int cam_id)
+{
+ int rc = MM_CAMERA_OK;
+#if 0
+ int j;
+ mm_camera_app_obj_t *pme = mm_app_get_cam_obj(cam_id);
+ mm_camera_reg_buf_t reg_buf;
+ uint32_t y_off, cbcr_off;
+ uint8_t num_planes_main;
+ uint32_t planes_main[VIDEO_MAX_PLANES];
+
+ CDBG("%s: BEGIN, raw_w=%d, raw_h=%d\n",
+ __func__, pme->dim.raw_picture_width, pme->dim.raw_picture_height);
+ memset(®_buf, 0, sizeof(reg_buf));
+ reg_buf.def.buf.mp = malloc(sizeof(mm_camera_mp_buf_t));
+ if (!reg_buf.def.buf.mp) {
+ CDBG_ERROR("%s Error allocating memory for mplanar struct ", __func__);
+ rc = -MM_CAMERA_E_NO_MEMORY;
+ goto end;
+ }
+
+ // setup main buffer
+ memset(&pme->raw_snapshot_buf, 0, sizeof(pme->raw_snapshot_buf));
+ pme->raw_snapshot_buf.num = 1;
+ pme->raw_snapshot_buf.frame_len =
+ my_cam_app.hal_lib.mm_camera_get_msm_frame_len(CAMERA_BAYER_SBGGR10,
+ CAMERA_MODE_2D,
+ pme->dim.raw_picture_width,
+ pme->dim.raw_picture_height,
+ OUTPUT_TYPE_S,
+ &num_planes_main,
+ planes_main);
+#ifdef USE_ION
+ pme->raw_snapshot_buf.frame[0].ion_alloc.len = pme->raw_snapshot_buf.frame_len;
+ pme->raw_snapshot_buf.frame[0].ion_alloc.flags = (0x1 << CAMERA_ION_HEAP_ID);
+ pme->raw_snapshot_buf.frame[0].ion_alloc.align = 4096;
+#endif
+ pme->raw_snapshot_buf.frame[0].buffer = (unsigned long) my_cam_app.hal_lib.mm_camera_do_mmap(
+ pme->raw_snapshot_buf.frame_len, &pme->raw_snapshot_buf.frame[0].fd);
+
+
+ if (!pme->raw_snapshot_buf.frame[0].buffer) {
+ CDBG("%s:no mem for snapshot buf\n", __func__);
+ rc = -MM_CAMERA_E_NO_MEMORY;
+ goto end;
+ }
+ pme->raw_snapshot_buf.frame[0].path = OUTPUT_TYPE_S;
+ pme->preview_buf.frame[0].y_off = 0;
+ pme->raw_snapshot_buf.frame[0].cbcr_off = planes_main[0];
+
+ /*setup registration buffer*/
+ reg_buf.def.buf.mp[0].frame = pme->raw_snapshot_buf.frame[0];
+ reg_buf.def.buf.mp[0].frame_offset = 0;
+ reg_buf.def.buf.mp[0].num_planes = num_planes_main;
+
+ reg_buf.def.buf.mp[0].planes[0].length = planes_main[0];
+ reg_buf.def.buf.mp[0].planes[0].m.userptr = pme->raw_snapshot_buf.frame[0].fd;
+ reg_buf.def.buf.mp[0].planes[0].data_offset = 0;
+ reg_buf.def.buf.mp[0].planes[0].reserved[0] = reg_buf.def.buf.mp[0].frame_offset;
+ for (j = 1; j < num_planes_main; j++) {
+ reg_buf.def.buf.mp[0].planes[j].length = planes_main[j];
+ reg_buf.def.buf.mp[0].planes[j].m.userptr = pme->raw_snapshot_buf.frame[0].fd;
+ reg_buf.def.buf.mp[0].planes[j].data_offset = 0;
+ reg_buf.def.buf.mp[0].planes[j].reserved[0] = reg_buf.def.buf.mp[0].planes[j-1].reserved[0] +
+ reg_buf.def.buf.mp[0].planes[j-1].length;
+ }
+
+ reg_buf.ch_type = MM_CAMERA_CH_RAW;
+ reg_buf.def.num = pme->raw_snapshot_buf.num;
+ rc = pme->cam->cfg->prepare_buf(pme->cam, ®_buf);
+ if (rc != MM_CAMERA_OK) {
+ CDBG("%s:reg snapshot buf err=%d\n", __func__, rc);
+ goto end;
+ }
+ end:
+ CDBG("%s: END, rc=%d\n", __func__, rc);
+#endif
+ return rc;
+}
+
+static int mm_app_unprepare_raw_snapshot_buf(int cam_id)
+{
+ int i, rc = MM_CAMERA_OK;
+#if 0
+ /* now we hard code format */
+ mm_camera_app_obj_t *pme = mm_app_get_cam_obj(cam_id);
+
+ CDBG("%s: BEGIN\n", __func__);
+ rc = pme->cam->cfg->unprepare_buf(pme->cam, MM_CAMERA_CH_RAW);
+ rc = my_cam_app.hal_lib.mm_camera_do_munmap(pme->raw_snapshot_buf.frame[0].fd,
+ (void *)pme->raw_snapshot_buf.frame[0].buffer,
+ pme->raw_snapshot_buf.frame_len);
+ rc = my_cam_app.hal_lib.mm_camera_do_munmap(pme->jpeg_buf.frame[0].fd,
+ (void *)pme->jpeg_buf.frame[0].buffer,
+ pme->jpeg_buf.frame_len);
+ /* zero out the buf stuct */
+ memset(&pme->raw_snapshot_buf, 0, sizeof(pme->raw_snapshot_buf));
+ memset(&pme->jpeg_buf, 0, sizeof(pme->jpeg_buf));
+
+ end:
+ CDBG("%s: END, rc=%d\n", __func__, rc);
+#endif
+ return rc;
+}
+
+#ifndef DISABLE_JPEG_ENCODING
+/* Once we give frame for encoding, we get encoded jpeg image
+ fragments by fragment. We'll need to store them in a buffer
+ to form complete JPEG image */
+static void snapshot_jpeg_fragment_cb(uint8_t *ptr,
+ uint32_t size,
+ void *user_data)
+{
+#if 0
+ mm_camera_app_obj_t *pme = user_data;
+
+ CDBG("%s: E",__func__);
+ if (pme) {
+ memcpy((uint8_t *)((uint32_t)pme->jpeg_buf.frame[0].buffer + JpegOffset), ptr, size);
+ JpegOffset += size;
+ }
+ CDBG("%s: X",__func__);
+#endif
+}
+#endif
+/* This callback is received once the complete JPEG encoding is done */
+static void snapshot_raw_cb(mm_camera_super_buf_t *bufs,
+ void *user_data)
+{
+
+ int rc;
+ int i = 0;
+ mm_camera_buf_def_t *main_frame = NULL;
+ mm_camera_buf_def_t *thumb_frame = NULL;
+ mm_camera_app_obj_t *pme = NULL;
+ CDBG("%s: BEGIN\n", __func__);
+
+ pme = (mm_camera_app_obj_t *)user_data;
+
+ CDBG("%s : total streams = %d",__func__,bufs->num_bufs);
+ main_frame = bufs->bufs[0];
+ thumb_frame = bufs->bufs[1];
+
+ CDBG("mainframe frame_idx = %d fd = %d frame length = %d",main_frame->frame_idx,main_frame->fd,main_frame->frame_len);
+ CDBG("thumnail frame_idx = %d fd = %d frame length = %d",thumb_frame->frame_idx,thumb_frame->fd,thumb_frame->frame_len);
+
+ //dumpFrameToFile(main_frame->frame,pme->dim.picture_width,pme->dim.picture_height,"main", 1);
+ //dumpFrameToFile(thumb_frame->frame,pme->dim.thumbnail_width,pme->dim.thumbnail_height,"thumb", 1);
+
+ dumpFrameToFile(main_frame,pme->dim.picture_width,pme->dim.picture_height,"main", 1);
+ dumpFrameToFile(thumb_frame,pme->dim.thumbnail_width,pme->dim.thumbnail_height,"thumb", 1);
+
+ if (MM_CAMERA_OK != pme->cam->ops->qbuf(pme->cam->camera_handle,pme->ch_id,main_frame)) {
+ CDBG_ERROR("%s: Failed in thumbnail Qbuf\n", __func__);
+ }
+ if (MM_CAMERA_OK != pme->cam->ops->qbuf(pme->cam->camera_handle,pme->ch_id,thumb_frame)) {
+ CDBG_ERROR("%s: Failed in thumbnail Qbuf\n", __func__);
+ }
+
+ mm_app_snapshot_done();
+ CDBG("%s: END\n", __func__);
+
+
+}
+
+#ifndef DISABLE_JPEG_ENCODING
+static int encodeData(mm_camera_super_buf_t* recvd_frame,
+ int frame_len,
+ int enqueued,
+ mm_camera_app_obj_t *pme)
+{
+ int ret = -1;
+#if 0
+
+ cam_ctrl_dimension_t dimension;
+ struct msm_frame *postviewframe;
+ struct msm_frame *mainframe;
+ common_crop_t crop;
+ cam_point_t main_crop_offset;
+ cam_point_t thumb_crop_offset;
+ int width, height;
+ uint8_t *thumbnail_buf;
+ uint32_t thumbnail_fd;
+
+ omx_jpeg_encode_params encode_params;
+ postviewframe = recvd_frame->snapshot.thumbnail.frame;
+ mainframe = recvd_frame->snapshot.main.frame;
+ dimension.orig_picture_dx = pme->dim.picture_width;
+ dimension.orig_picture_dy = pme->dim.picture_height;
+ dimension.thumbnail_width = pme->dim.ui_thumbnail_width;
+ dimension.thumbnail_height = pme->dim.ui_thumbnail_height;
+ dimension.main_img_format = pme->dim.main_img_format;
+ dimension.thumb_format = pme->dim.thumb_format;
+
+ CDBG("Setting callbacks, initializing encoder and start encoding.");
+ my_cam_app.hal_lib.set_callbacks(snapshot_jpeg_fragment_cb, snapshot_jpeg_cb, pme,
+ (void *)pme->jpeg_buf.frame[0].buffer, &JpegOffset);
+ my_cam_app.hal_lib.omxJpegStart();
+ my_cam_app.hal_lib.mm_jpeg_encoder_setMainImageQuality(85);
+
+ /*TBD: Pass 0 as cropinfo for now as v4l2 doesn't provide
+ cropinfo. It'll be changed later.*/
+ memset(&crop,0,sizeof(common_crop_t));
+ memset(&main_crop_offset,0,sizeof(cam_point_t));
+ memset(&thumb_crop_offset,0,sizeof(cam_point_t));
+
+ /*Fill in the encode parameters*/
+ encode_params.dimension = (const cam_ctrl_dimension_t *)&dimension;
+ encode_params.thumbnail_buf = (uint8_t *)postviewframe->buffer;
+ encode_params.thumbnail_fd = postviewframe->fd;
+ encode_params.thumbnail_offset = postviewframe->phy_offset;
+ encode_params.snapshot_buf = (uint8_t *)mainframe->buffer;
+ encode_params.snapshot_fd = mainframe->fd;
+ encode_params.snapshot_offset = mainframe->phy_offset;
+ encode_params.scaling_params = &crop;
+ encode_params.exif_data = NULL;
+ encode_params.exif_numEntries = 0;
+ encode_params.a_cbcroffset = -1;
+ encode_params.main_crop_offset = &main_crop_offset;
+ encode_params.thumb_crop_offset = &thumb_crop_offset;
+
+ if (!my_cam_app.hal_lib.omxJpegEncode(&encode_params)) {
+ CDBG_ERROR("%s: Failure! JPEG encoder returned error.", __func__);
+ ret = -1;
+ goto end;
+ }
+
+ /* Save the pointer to the frame sent for encoding. we'll need it to
+ tell kernel that we are done with the frame.*/
+ mCurrentFrameEncoded = recvd_frame;
+
+ end:
+ CDBG("%s: X", __func__);
+#endif
+ return ret;
+}
+
+static int encodeDisplayAndSave(mm_camera_super_buf_t* recvd_frame,
+ int enqueued, mm_camera_app_obj_t *pme)
+{
+ int ret = -1;
+#if 0
+
+
+ CDBG("%s: Send frame for encoding", __func__);
+ ret = encodeData(recvd_frame, pme->snapshot_buf.frame_len,
+ enqueued, pme);
+ if (!ret) {
+ CDBG_ERROR("%s: Failure configuring JPEG encoder", __func__);
+ }
+
+ LOGD("%s: X", __func__);
+#endif
+ return ret;
+}
+#endif //DISABLE_JPEG_ENCODING
+static void mm_app_snapshot_notify_cb(mm_camera_super_buf_t *bufs,
+ void *user_data)
+{
+#if 0
+ mm_camera_app_obj_t *pme = user_data;
+ int rc;
+
+ CDBG("%s: BEGIN\n", __func__);
+ snapshot_cnt++;
+ mm_app_dump_snapshot_frame(bufs->snapshot.main.frame, pme->snapshot_buf.frame_len, TRUE, 0);
+ mm_app_dump_snapshot_frame(bufs->snapshot.thumbnail.frame, pme->thumbnail_buf.frame_len, FALSE, 0);
+#ifndef DISABLE_JPEG_ENCODING
+ /* The recvd_frame structre we receive from lower library is a local
+variable. So we'll need to save this structure so that we won't
+be later pointing to garbage data when that variable goes out of
+scope */
+ mm_camera_ch_data_buf_t* frame =
+ (mm_camera_ch_data_buf_t *)malloc(sizeof(mm_camera_ch_data_buf_t));
+ if (frame == NULL) {
+ CDBG_ERROR("%s: Error allocating memory to save received_frame structure.", __func__);
+ goto error1;
+ }
+ memcpy(frame, bufs, sizeof(mm_camera_ch_data_buf_t));
+ rc = encodeDisplayAndSave(frame, 0, pme);
+ if (!rc) {
+ CDBG_ERROR("%s: Error encoding buffer.", __func__);
+ goto error;
+ }
+#endif //DISABLE_JPEG_ENCODING
+ /* return buffer back for taking next snapshot */
+ pme->cam->evt->buf_done(pme->cam, bufs);
+ mm_app_snapshot_done();
+/*
+ CDBG("%s: calling mm_app_snapshot_done()\n", __func__);
+ mm_app_snapshot_done();
+*/
+ CDBG("%s: END\n", __func__);
+ return;
+ error:
+ /*if (frame != NULL)
+ free(frame);*/
+ error1:
+ pme->cam->evt->buf_done(pme->cam, bufs);
+ mm_app_snapshot_done();
+#endif
+ return;
+}
+
+static void mm_app_raw_snapshot_notify_cb(mm_camera_super_buf_t *bufs,
+ void *user_data)
+{
+#if 0
+ mm_camera_app_obj_t *pme = user_data;
+ static int loop = 0;
+
+ CDBG("%s: BEGIN\n", __func__);
+ raw_snapshot_cnt++;
+ mm_app_dump_snapshot_frame(bufs->def.frame, pme->raw_snapshot_buf.frame_len, TRUE, 1);
+ /* return buffer back for taking next snapshot */
+ pme->cam->evt->buf_done(pme->cam, bufs);
+ CDBG("%s: calling mm_app_snapshot_done()\n", __func__);
+ mm_app_snapshot_done();
+ CDBG("%s: END\n", __func__);
+#endif
+}
+static int mm_app_reg_snapshot_data_cb(int cam_id, int is_reg)
+{
+ int rc = MM_CAMERA_OK;
+#if 0
+ mm_camera_app_obj_t *pme = mm_app_get_cam_obj(cam_id);
+
+
+ CDBG("%s: BEGIN\n", __func__);
+ if (is_reg) {
+ rc = pme->cam->evt->register_buf_notify(pme->cam,
+ MM_CAMERA_CH_SNAPSHOT,
+ mm_app_snapshot_notify_cb,
+ MM_CAMERA_REG_BUF_CB_INFINITE, 0,
+ pme);
+ if (rc != MM_CAMERA_OK) {
+ CDBG("%s:register snapshot data notify cb err=%d\n",
+ __func__, rc);
+ goto end;
+ }
+ } else {
+ rc = pme->cam->evt->register_buf_notify(pme->cam,
+ MM_CAMERA_CH_SNAPSHOT,
+ NULL,
+ (mm_camera_register_buf_cb_type_t)NULL,
+ 0, pme);
+ if (rc != MM_CAMERA_OK) {
+ CDBG("%s:unregister snapshot data notify cb err=%d\n",
+ __func__, rc);
+ goto end;
+ }
+ }
+ end:
+ CDBG("%s: END, rc=%d\n", __func__, rc);
+#endif
+ return rc;
+}
+static int mm_app_reg_raw_snapshot_data_cb(int cam_id, int is_reg)
+{
+ int rc = MM_CAMERA_OK;
+#if 0
+ mm_camera_app_obj_t *pme = mm_app_get_cam_obj(cam_id);
+
+
+ CDBG("%s: BEGIN\n", __func__);
+ if (is_reg) {
+ rc = pme->cam->evt->register_buf_notify(pme->cam,
+ MM_CAMERA_CH_RAW,
+ mm_app_raw_snapshot_notify_cb,
+ MM_CAMERA_REG_BUF_CB_INFINITE, 0,
+ pme);
+ if (rc != MM_CAMERA_OK) {
+ CDBG("%s:register raw snapshot data notify cb err=%d\n",
+ __func__, rc);
+ goto end;
+ }
+ } else {
+ rc = pme->cam->evt->register_buf_notify(pme->cam,
+ MM_CAMERA_CH_RAW,
+ NULL,
+ (mm_camera_register_buf_cb_type_t)NULL, 0, pme);
+ if (rc != MM_CAMERA_OK) {
+ CDBG("%s:unregister raw snapshot data notify cb err=%d\n",
+ __func__, rc);
+ goto end;
+ }
+ }
+ end:
+ CDBG("%s: END, rc=%d\n", __func__, rc);
+#endif
+ return rc;
+}
+
+int mm_app_add_snapshot_stream(int cam_id)
+{
+ int rc = MM_CAMERA_OK;
+ mm_camera_app_obj_t *pme = mm_app_get_cam_obj(cam_id);
+
+ pme->stream[MM_CAMERA_SNAPSHOT_MAIN].id = pme->cam->ops->add_stream(pme->cam->camera_handle,pme->ch_id,
+ NULL,pme,
+ MM_CAMERA_SNAPSHOT_MAIN, 0);
+
+ CDBG("Add Snapshot main is successfull stream ID = %d",pme->stream[MM_CAMERA_SNAPSHOT_MAIN].id);
+ if (!pme->stream[MM_CAMERA_SNAPSHOT_MAIN].id) {
+ CDBG_ERROR("%s:preview streaming err=%d\n", __func__, rc);
+ rc = -1;
+ goto end;
+ }
+
+ pme->stream[MM_CAMERA_SNAPSHOT_THUMBNAIL].id = pme->cam->ops->add_stream(pme->cam->camera_handle,pme->ch_id,
+ NULL,pme,
+ MM_CAMERA_SNAPSHOT_THUMBNAIL, 0);
+ if (!pme->stream[MM_CAMERA_SNAPSHOT_THUMBNAIL].id) {
+ CDBG_ERROR("%s:preview streaming err=%d\n", __func__, rc);
+ rc = -1;
+ goto end;
+ }
+ end:
+ CDBG("%s: END, rc=%d\n", __func__, rc);
+ return rc;
+}
+
+void mm_app_set_snapshot_mode(int cam_id,int op_mode)
+{
+ mm_camera_app_obj_t *pme = mm_app_get_cam_obj(cam_id);
+ pme->cam->ops->set_parm(pme->cam->camera_handle,MM_CAMERA_PARM_OP_MODE, &op_mode);
+
+}
+
+int mm_app_config_snapshot_format(int cam_id)
+{
+ int rc = MM_CAMERA_OK;
+ mm_camera_app_obj_t *pme = mm_app_get_cam_obj(cam_id);
+
+ mm_app_set_snapshot_fmt(cam_id,&pme->stream[MM_CAMERA_SNAPSHOT_MAIN].str_config.fmt);
+
+ mm_app_set_thumbnail_fmt(cam_id,&pme->stream[MM_CAMERA_SNAPSHOT_THUMBNAIL].str_config.fmt);
+
+ pme->stream[MM_CAMERA_SNAPSHOT_MAIN].str_config.need_stream_on = 1;
+ pme->stream[MM_CAMERA_SNAPSHOT_MAIN].str_config.num_of_bufs = 1;
+
+ if (MM_CAMERA_OK != (rc = pme->cam->ops->config_stream(pme->cam->camera_handle,pme->ch_id,pme->stream[MM_CAMERA_SNAPSHOT_MAIN].id,
+ &pme->stream[MM_CAMERA_SNAPSHOT_MAIN].str_config))) {
+ CDBG_ERROR("%s:preview streaming err=%d\n", __func__, rc);
+ goto end;
+ }
+
+ pme->stream[MM_CAMERA_SNAPSHOT_THUMBNAIL].str_config.need_stream_on = 1;
+ pme->stream[MM_CAMERA_SNAPSHOT_THUMBNAIL].str_config.num_of_bufs = 1;
+
+ if (MM_CAMERA_OK != (rc = pme->cam->ops->config_stream(pme->cam->camera_handle,pme->ch_id,pme->stream[MM_CAMERA_SNAPSHOT_THUMBNAIL].id,
+ &pme->stream[MM_CAMERA_SNAPSHOT_THUMBNAIL].str_config))) {
+ CDBG_ERROR("%s:preview streaming err=%d\n", __func__, rc);
+ goto end;
+ }
+ end:
+ CDBG("%s: END, rc=%d\n", __func__, rc);
+ return rc;
+
+}
+
+int mm_app_streamon_snapshot(int cam_id)
+{
+ int rc = MM_CAMERA_OK;
+ int stream[2];
+ mm_camera_bundle_attr_t attr;
+
+ mm_camera_app_obj_t *pme = mm_app_get_cam_obj(cam_id);
+
+ stream[0] = pme->stream[MM_CAMERA_SNAPSHOT_MAIN].id;
+ stream[1] = pme->stream[MM_CAMERA_SNAPSHOT_THUMBNAIL].id;
+
+ attr.notify_mode = MM_CAMERA_SUPER_BUF_NOTIFY_BURST;
+ attr.burst_num = 1;
+ attr.look_back = 2;
+ attr.post_frame_skip = 0;
+ attr.water_mark = 2;
+
+ if (MM_CAMERA_OK != (rc = pme->cam->ops->init_stream_bundle(
+ pme->cam->camera_handle,pme->ch_id,snapshot_raw_cb,pme,&attr,2,stream))) {
+ CDBG_ERROR("%s:init_stream_bundle err=%d\n", __func__, rc);
+ goto end;
+ }
+
+ if (MM_CAMERA_OK != (rc = pme->cam->ops->start_streams(pme->cam->camera_handle,pme->ch_id,
+ 2, stream))) {
+ CDBG_ERROR("%s:start_streams err=%d\n", __func__, rc);
+ goto end;
+ }
+
+ if (MM_CAMERA_OK != (rc =pme->cam->ops->request_super_buf(pme->cam->camera_handle,pme->ch_id))) {
+ CDBG_ERROR("%s:request_super_buf err=%d\n", __func__, rc);
+ goto end;
+ }
+ pme->cam_state = CAMERA_STATE_SNAPSHOT;
+ end:
+ CDBG("%s: END, rc=%d\n", __func__, rc);
+ return rc;
+}
+
+int mm_app_streamoff_snapshot(int cam_id)
+{
+ int rc = MM_CAMERA_OK;
+ int stream[2];
+
+ mm_camera_app_obj_t *pme = mm_app_get_cam_obj(cam_id);
+
+ stream[0] = pme->stream[MM_CAMERA_SNAPSHOT_MAIN].id;
+ stream[1] = pme->stream[MM_CAMERA_SNAPSHOT_THUMBNAIL].id;
+
+ if (MM_CAMERA_OK != (rc = pme->cam->ops->stop_streams(pme->cam->camera_handle,pme->ch_id,2,&stream))) {
+ CDBG_ERROR("%s : Snapshot Stream off Error",__func__);
+ goto end;
+ }
+ CDBG("Stop snapshot main successfull");
+
+ if (MM_CAMERA_OK != (rc = pme->cam->ops->destroy_stream_bundle(pme->cam->camera_handle,pme->ch_id))) {
+ CDBG_ERROR("%s : Snapshot destroy_stream_bundle Error",__func__);
+ goto end;
+ }
+
+ if (MM_CAMERA_OK != (rc = pme->cam->ops->del_stream(pme->cam->camera_handle,pme->ch_id,pme->stream[MM_CAMERA_SNAPSHOT_MAIN].id))) {
+ CDBG_ERROR("%s : Snapshot main image del_stream Error",__func__);
+ goto end;
+ }
+ CDBG("del_stream successfull");
+
+ if (MM_CAMERA_OK != (rc = pme->cam->ops->del_stream(pme->cam->camera_handle,pme->ch_id,pme->stream[MM_CAMERA_SNAPSHOT_THUMBNAIL].id))) {
+ CDBG_ERROR("%s : Snapshot thumnail image del_stream Error",__func__);
+ goto end;
+ }
+ CDBG("del_stream successfull");
+
+ end:
+ return rc;
+}
+
+int mm_app_start_snapshot(int cam_id)
+{
+ int rc = MM_CAMERA_OK;
+ int stream[2];
+ int op_mode = 0;
+
+ mm_camera_bundle_attr_t attr;
+
+
+ mm_camera_app_obj_t *pme = mm_app_get_cam_obj(cam_id);
+
+ if (MM_CAMERA_OK != mm_app_stop_preview(cam_id)) {
+ CDBG_ERROR("%s: Stop preview Failed cam_id=%d\n",__func__,cam_id);
+ return -1;
+ }
+
+ op_mode = MM_CAMERA_OP_MODE_CAPTURE;
+ mm_app_set_snapshot_mode(cam_id,op_mode);
+
+ pme->cam->ops->prepare_snapshot(pme->cam->camera_handle,pme->ch_id,0);
+
+ if (MM_CAMERA_OK != (rc = mm_app_add_snapshot_stream(cam_id))) {
+ CDBG_ERROR("%s : Add Snapshot stream err",__func__);
+ }
+
+ if (MM_CAMERA_OK != (rc = mm_app_config_snapshot_format(cam_id))) {
+ CDBG_ERROR("%s : Config Snapshot stream err",__func__);
+ }
+
+ if (MM_CAMERA_OK != (rc = mm_app_streamon_snapshot(cam_id))) {
+ CDBG_ERROR("%s : Stream on Snapshot stream err",__func__);
+ }
+
+#if 0
+ /*start OMX Jpeg encoder*/
+#ifndef DISABLE_JPEG_ENCODING
+ my_cam_app.hal_lib.omxJpegOpen();
+#endif
+
+#endif
+ end:
+ CDBG("%s: END, rc=%d\n", __func__, rc);
+
+ return rc;
+}
+
+
+int mm_app_stop_snapshot(int cam_id)
+{
+ int rc = MM_CAMERA_OK;
+ int stream[2];
+
+ mm_camera_app_obj_t *pme = mm_app_get_cam_obj(cam_id);
+
+ stream[0] = pme->stream[MM_CAMERA_SNAPSHOT_MAIN].id;
+ stream[1] = pme->stream[MM_CAMERA_SNAPSHOT_THUMBNAIL].id;
+
+ if (MM_CAMERA_OK != (rc = mm_app_streamoff_snapshot(cam_id))) {
+ CDBG_ERROR("%s : Stream off Snapshot stream err",__func__);
+ }
+ pme->cam_state = CAMERA_STATE_OPEN;
+#if 0
+#ifndef DISABLE_JPEG_ENCODING
+ my_cam_app.hal_lib.omxJpegClose();
+#endif
+#endif
+ end:
+ CDBG("%s: END, rc=%d\n", __func__, rc);
+
+ return rc;
+}
+
+int mm_app_start_raw_snapshot(int cam_id)
+{
+ int rc = MM_CAMERA_OK;
+#if 0
+ mm_camera_app_obj_t *pme = mm_app_get_cam_obj(cam_id);
+ mm_camera_channel_attr_t attr;
+
+
+ attr.type = MM_CAMERA_CH_ATTR_RAW_STREAMING_TYPE;
+ attr.raw_streaming_mode = MM_CAMERA_RAW_STREAMING_CAPTURE_SINGLE;
+
+ if (MM_CAMERA_OK != (rc = mm_app_set_op_mode(cam_id, MM_CAMERA_OP_MODE_CAPTURE))) {
+ CDBG("%s:mm_app_set_op_mode(op_mode=%d) err=%d\n", __func__,
+ MM_CAMERA_OP_MODE_CAPTURE, rc);
+ goto end;
+ }
+ if (MM_CAMERA_OK != (rc = mm_app_open_ch(cam_id, MM_CAMERA_CH_RAW))) {
+ CDBG("%s:open raw snapshot channel err=%d\n", __func__, rc);
+ goto end;
+ }
+ if (MM_CAMERA_OK != (rc = mm_app_set_raw_snapshot_fmt(cam_id))) {
+ CDBG("%s:set raw snapshot format err=%d\n", __func__, rc);
+ goto end;
+ }
+ mm_app_get_dim(cam_id, NULL);
+ if (MM_CAMERA_OK != (rc = mm_app_prepare_raw_snapshot_buf(cam_id))) {
+ CDBG("%s:reg raw snapshot buf err=%d\n", __func__, rc);
+ goto end;
+ }
+ if (MM_CAMERA_OK != (rc = mm_app_reg_raw_snapshot_data_cb(cam_id, TRUE))) {
+ CDBG("%s:reg raw snapshot data cb err=%d\n", __func__, rc);
+ }
+ if (MM_CAMERA_OK != (rc = pme->cam->ops->ch_set_attr(pme->cam, MM_CAMERA_CH_RAW, &attr))) {
+ CDBG("%s:set raw capture attribute err=%d\n", __func__, rc);
+ goto end;
+ }
+ if (MM_CAMERA_OK != (rc = pme->cam->ops->action(pme->cam, TRUE, MM_CAMERA_OPS_RAW, 0))) {
+ CDBG("%s:snapshot streaming err=%d\n", __func__, rc);
+ goto end;
+ }
+ end:
+ CDBG("%s: END, rc=%d\n", __func__, rc);
+#endif
+ return rc;
+}
+
+int mm_app_stop_raw_snapshot(int cam_id)
+{
+ int rc = MM_CAMERA_OK;
+#if 0
+ mm_camera_app_obj_t *pme = mm_app_get_cam_obj(cam_id);
+
+
+ CDBG("%s: BEGIN\n", __func__);
+ if (MM_CAMERA_OK != (rc = pme->cam->ops->action(pme->cam, FALSE, MM_CAMERA_OPS_RAW, 0))) {
+ CDBG("%s:stop raw snapshot streaming err=%d\n", __func__, rc);
+ goto end;
+ }
+ if (MM_CAMERA_OK != (rc = mm_app_unprepare_raw_snapshot_buf(cam_id))) {
+ CDBG("%s:mm_app_unprepare_raw_snapshot_buf err=%d\n", __func__, rc);
+ return rc;
+ }
+ if (MM_CAMERA_OK != (rc = mm_app_reg_raw_snapshot_data_cb(cam_id, FALSE))) {
+ CDBG("%s:mm_app_reg_raw_snapshot_data_cb err=%d\n", __func__, rc);
+ return rc;
+ }
+ mm_app_close_ch(cam_id, MM_CAMERA_CH_RAW);
+ end:
+ CDBG("%s: END, rc=%d\n", __func__, rc);
+#endif
+ return rc;
+}
+
+static void mm_app_snapshot_wait(int cam_id)
+{
+ pthread_mutex_lock(&g_s_mutex);
+ if (false == g_status) {
+ pthread_cond_wait(&g_s_cond_v, &g_s_mutex);
+ g_status = false;
+ }
+ pthread_mutex_unlock(&g_s_mutex);
+}
+
+#if 0
+int mm_stream_deinit_thumbnail_buf(uint32_t camera_handle,
+ uint32_t ch_id, uint32_t stream_id,
+ void *user_data, uint8_t num_bufs,
+ mm_camera_buf_def_t *bufs)
+{
+ int i, rc = MM_CAMERA_OK;
+ mm_camera_app_obj_t *pme = (mm_camera_app_obj_t *)user_data;
+
+ for (i = 0; i < num_bufs; i++) {
+ rc = my_cam_app.hal_lib.mm_camera_do_munmap_ion (pme->ionfd, &(pme->thumbnail_buf.frame[i].fd_data),
+ (void *)pme->thumbnail_buf.frame[i].buffer, pme->thumbnail_buf.frame_len);
+ if (rc != MM_CAMERA_OK) {
+ CDBG("%s: mm_camera_do_munmap err, pmem_fd = %d, rc = %d",
+ __func__, bufs[i].fd, rc);
+ }
+ }
+ return rc;
+}
+
+int mm_stream_deinit_main_buf(uint32_t camera_handle,
+ uint32_t ch_id, uint32_t stream_id,
+ void *user_data, uint8_t num_bufs,
+ mm_camera_buf_def_t *bufs)
+{
+ int i, rc = MM_CAMERA_OK;
+ mm_camera_app_obj_t *pme = (mm_camera_app_obj_t *)user_data;
+
+ for (i = 0; i < num_bufs; i++) {
+ rc = my_cam_app.hal_lib.mm_camera_do_munmap_ion (pme->ionfd, &(pme->snapshot_buf.frame[i].fd_data),
+ (void *)pme->snapshot_buf.frame[i].buffer, pme->snapshot_buf.frame_len);
+ if (rc != MM_CAMERA_OK) {
+ CDBG("%s: mm_camera_do_munmap err, pmem_fd = %d, rc = %d",
+ __func__, bufs[i].fd, rc);
+ }
+ }
+ return rc;
+}
+
+int mm_stream_init_main_buf(uint32_t camera_handle,
+ uint32_t ch_id, uint32_t stream_id,
+ void *user_data,
+ mm_camera_frame_len_offset *frame_offset_info,
+ uint8_t num_bufs,
+ uint8_t *initial_reg_flag,
+ mm_camera_buf_def_t *bufs)
+{
+ int i,j,num_planes, frame_len, y_off, cbcr_off;
+ uint32_t planes[VIDEO_MAX_PLANES];
+ uint32_t pmem_addr = 0;
+
+ mm_camera_app_obj_t *pme = (mm_camera_app_obj_t *)user_data;
+
+ num_planes = frame_offset_info->num_planes;
+ for ( i = 0; i < num_planes; i++) {
+ planes[i] = frame_offset_info->mp[i].len;
+ }
+
+ frame_len = frame_offset_info->frame_len;
+ y_off = frame_offset_info->mp[0].offset;
+ cbcr_off = frame_offset_info->mp[1].offset;
+
+ CDBG("Allocating main image Memory for %d buffers frame_len = %d",num_bufs,frame_offset_info->frame_len);
+
+ for (i = 0; i < num_bufs ; i++) {
+ int j;
+ if (pme->cam_mode != RECORDER_MODE || pme->fullSizeSnapshot) {
+ pme->snapshot_buf.reg[i] = 1;
+ initial_reg_flag[i] = 1;
+ } else {
+ pme->snapshot_buf.reg[i] = 0;
+ initial_reg_flag[i] = 0;
+ }
+
+ pme->snapshot_buf.frame_len = frame_len;
+
+ pme->snapshot_buf.frame[i].ion_alloc.len = pme->snapshot_buf.frame_len;
+ pme->snapshot_buf.frame[i].ion_alloc.flags =
+ (0x1 << CAMERA_ION_HEAP_ID | 0x1 << ION_IOMMU_HEAP_ID);
+ pme->snapshot_buf.frame[i].ion_alloc.align = 4096;
+
+ pmem_addr = (unsigned long) my_cam_app.hal_lib.mm_camera_do_mmap_ion(pme->ionfd,
+ &(pme->snapshot_buf.frame[i].ion_alloc), &(pme->snapshot_buf.frame[i].fd_data),
+ &pme->snapshot_buf.frame[i].fd);
+
+ pme->snapshot_buf.frame[i].buffer = pmem_addr;
+ pme->snapshot_buf.frame[i].path = OUTPUT_TYPE_S;
+ pme->snapshot_buf.frame[i].y_off = 0;
+ pme->snapshot_buf.frame[i].cbcr_off = planes[0];
+ pme->snapshot_buf.frame[i].phy_offset = 0;
+
+ CDBG("Buffer allocated Successfully fd = %d",pme->snapshot_buf.frame[i].fd);
+
+ bufs[i].fd = pme->snapshot_buf.frame[i].fd;
+ //bufs[i].buffer = pmem_addr;
+ bufs[i].frame_len = pme->snapshot_buf.frame[i].ion_alloc.len;
+ bufs[i].num_planes = num_planes;
+
+ bufs[i].frame = &pme->snapshot_buf.frame[i];
+
+ /* Plane 0 needs to be set seperately. Set other planes
+ * in a loop. */
+ bufs[i].planes[0].length = planes[0];
+ bufs[i].planes[0].m.userptr = bufs[i].fd;
+ bufs[i].planes[0].data_offset = y_off;
+ bufs[i].planes[0].reserved[0] = 0;
+ //buf_def->buf.mp[i].frame_offset;
+ for (j = 1; j < num_planes; j++) {
+ bufs[i].planes[j].length = planes[j];
+ bufs[i].planes[j].m.userptr = bufs[i].fd;
+ bufs[i].planes[j].data_offset = cbcr_off;
+ bufs[i].planes[j].reserved[0] =
+ bufs[i].planes[j-1].reserved[0] +
+ bufs[i].planes[j-1].length;
+ }
+ }
+ return MM_CAMERA_OK;
+}
+
+int mm_stream_init_thumbnail_buf(uint32_t camera_handle,
+ uint32_t ch_id, uint32_t stream_id,
+ void *user_data,
+ mm_camera_frame_len_offset *frame_offset_info,
+ uint8_t num_bufs,
+ uint8_t *initial_reg_flag,
+ mm_camera_buf_def_t *bufs)
+{
+ int i,j,num_planes, frame_len, y_off, cbcr_off;
+ uint32_t planes[VIDEO_MAX_PLANES];
+ uint32_t pmem_addr = 0;
+
+ mm_camera_app_obj_t *pme = (mm_camera_app_obj_t *)user_data;
+
+ num_planes = frame_offset_info->num_planes;
+ for ( i = 0; i < num_planes; i++) {
+ planes[i] = frame_offset_info->mp[i].len;
+ }
+
+ frame_len = frame_offset_info->frame_len;
+ y_off = frame_offset_info->mp[0].offset;
+ cbcr_off = frame_offset_info->mp[1].offset;
+
+ CDBG("Allocating thumanail image Memory for %d buffers frame_len = %d",num_bufs,frame_offset_info->frame_len);
+
+ for (i = 0; i < num_bufs ; i++) {
+ int j;
+ pme->thumbnail_buf.reg[i] = 1;
+ initial_reg_flag[i] = 1;
+
+ pme->thumbnail_buf.frame_len = frame_len;
+ pme->thumbnail_buf.frame[i].ion_alloc.len = pme->thumbnail_buf.frame_len;
+ pme->thumbnail_buf.frame[i].ion_alloc.flags =
+ (0x1 << CAMERA_ION_HEAP_ID | 0x1 << ION_IOMMU_HEAP_ID);
+ pme->thumbnail_buf.frame[i].ion_alloc.align = 4096;
+
+ pmem_addr = (unsigned long) my_cam_app.hal_lib.mm_camera_do_mmap_ion(pme->ionfd,
+ &(pme->thumbnail_buf.frame[i].ion_alloc), &(pme->thumbnail_buf.frame[i].fd_data),
+ &pme->thumbnail_buf.frame[i].fd);
+
+ pme->thumbnail_buf.frame[i].buffer = pmem_addr;
+ pme->thumbnail_buf.frame[i].path = OUTPUT_TYPE_S;
+ pme->thumbnail_buf.frame[i].y_off = 0;
+ pme->thumbnail_buf.frame[i].cbcr_off = planes[0];
+ pme->thumbnail_buf.frame[i].phy_offset = 0;
+
+ CDBG("Buffer allocated Successfully fd = %d",pme->thumbnail_buf.frame[i].fd);
+
+ bufs[i].fd = pme->thumbnail_buf.frame[i].fd;
+ //bufs[i].buffer = pmem_addr;
+ bufs[i].frame_len = pme->thumbnail_buf.frame[i].ion_alloc.len;
+ bufs[i].num_planes = num_planes;
+
+ bufs[i].frame = &pme->thumbnail_buf.frame[i];
+
+ /* Plane 0 needs to be set seperately. Set other planes
+ * in a loop. */
+ bufs[i].planes[0].length = planes[0];
+ bufs[i].planes[0].m.userptr = bufs[i].fd;
+ bufs[i].planes[0].data_offset = y_off;
+ bufs[i].planes[0].reserved[0] = 0;
+ //buf_def->buf.mp[i].frame_offset;
+ for (j = 1; j < num_planes; j++) {
+ bufs[i].planes[j].length = planes[j];
+ bufs[i].planes[j].m.userptr = bufs[i].fd;
+ bufs[i].planes[j].data_offset = cbcr_off;
+ bufs[i].planes[j].reserved[0] =
+ bufs[i].planes[j-1].reserved[0] +
+ bufs[i].planes[j-1].length;
+ }
+ }
+ return MM_CAMERA_OK;
+}
+#endif
+#if 0
+int mm_app_bundle_zsl_stream(int cam_id)
+{
+ int rc = MM_CAMERA_OK;
+ int stream[3];
+ mm_camera_bundle_attr_t attr;
+
+ mm_camera_app_obj_t *pme = mm_app_get_cam_obj(cam_id);
+
+ stream[0] = pme->stream[MM_CAMERA_PREVIEW].id;
+ stream[1] = pme->stream[MM_CAMERA_SNAPSHOT_MAIN].id;
+
+ attr.notify_mode = MM_CAMERA_SUPER_BUF_NOTIFY_BURST;
+ attr.burst_num = 1;
+ attr.look_back = 2;
+ attr.post_frame_skip = 0;
+ attr.water_mark = 2;
+
+ if (MM_CAMERA_OK != (rc = pme->cam->ops->init_stream_bundle(
+ pme->cam->camera_handle,pme->ch_id,mm_app_zsl_notify_cb,pme,&attr,2,stream))) {
+ CDBG_ERROR("%s:init_stream_bundle err=%d\n", __func__, rc);
+ rc = -1;
+ goto end;
+ }
+
+ if (MM_CAMERA_OK != (rc = pme->cam->ops->start_streams(pme->cam->camera_handle,pme->ch_id,
+ 2, stream))) {
+ CDBG_ERROR("%s:start_streams err=%d\n", __func__, rc);
+ rc = -1;
+ goto end;
+ }
+ end:
+ return rc;
+}
+#endif
+
+static void mm_app_live_notify_cb(mm_camera_super_buf_t *bufs,
+ void *user_data)
+{
+
+ int rc;
+ int i = 0;
+ mm_camera_buf_def_t *main_frame = NULL;
+ mm_camera_app_obj_t *pme = NULL;
+ CDBG("%s: BEGIN\n", __func__);
+
+ pme = (mm_camera_app_obj_t *)user_data;
+
+ CDBG("%s : total streams = %d",__func__,bufs->num_bufs);
+ main_frame = bufs->bufs[0];
+ //thumb_frame = bufs->bufs[1];
+
+ CDBG("mainframe frame_idx = %d fd = %d frame length = %d",main_frame->frame_idx,main_frame->fd,main_frame->frame_len);
+ //CDBG("thumnail frame_idx = %d fd = %d frame length = %d",thumb_frame->frame_idx,thumb_frame->fd,thumb_frame->frame_len);
+
+ //dumpFrameToFile(main_frame->frame,pme->dim.picture_width,pme->dim.picture_height,"main", 1);
+
+ dumpFrameToFile(main_frame,pme->dim.picture_width,pme->dim.picture_height,"liveshot_main", 1);
+
+ if (MM_CAMERA_OK != pme->cam->ops->qbuf(pme->cam->camera_handle,pme->ch_id,main_frame)) {
+ CDBG_ERROR("%s: Failed in thumbnail Qbuf\n", __func__);
+ }
+ /*if(MM_CAMERA_OK != pme->cam->ops->qbuf(pme->cam->camera_handle,pme->ch_id,thumb_frame))
+ {
+ CDBG_ERROR("%s: Failed in thumbnail Qbuf\n", __func__);
+ }*/
+
+ mm_app_snapshot_done();
+ CDBG("%s: END\n", __func__);
+
+
+}
+
+int mm_app_prepare_live_snapshot(int cam_id)
+{
+ int rc = 0;
+ int stream[1];
+ mm_camera_bundle_attr_t attr;
+ int value = 0;
+
+ mm_camera_app_obj_t *pme = mm_app_get_cam_obj(cam_id);
+ CDBG("%s:BEGIN, cam_id=%d\n",__func__,cam_id);
+
+ stream[0] = pme->stream[MM_CAMERA_SNAPSHOT_MAIN].id;
+ //stream[1] = pme->stream[MM_CAMERA_PREVIEW].id; //Need to clarify
+
+ attr.notify_mode = MM_CAMERA_SUPER_BUF_NOTIFY_BURST;
+ attr.burst_num = 1;
+ attr.look_back = 2;
+ attr.post_frame_skip = 0;
+ attr.water_mark = 2;
+
+
+ if (MM_CAMERA_OK != (rc = pme->cam->ops->set_parm(
+ pme->cam->camera_handle,MM_CAMERA_PARM_FULL_LIVESHOT, (void *)&value))) {
+ CDBG_ERROR("%s: set dimension err=%d\n", __func__, rc);
+ }
+
+ if (MM_CAMERA_OK != (rc = pme->cam->ops->init_stream_bundle(
+ pme->cam->camera_handle,pme->ch_id,mm_app_live_notify_cb,pme,&attr,1,stream))) {
+ CDBG_ERROR("%s:init_stream_bundle err=%d\n", __func__, rc);
+ rc = -1;
+ goto end;
+ }
+
+ if (MM_CAMERA_OK != (rc = pme->cam->ops->start_streams(pme->cam->camera_handle,pme->ch_id,
+ 1, stream))) {
+ CDBG_ERROR("%s:start_streams err=%d\n", __func__, rc);
+ rc = -1;
+ goto end;
+ }
+
+
+ end:
+ CDBG("%s:END, cam_id=%d\n",__func__,cam_id);
+ return rc;
+}
+
+int mm_app_unprepare_live_snapshot(int cam_id)
+{
+ int rc = 0;
+ int stream[2];
+
+ mm_camera_app_obj_t *pme = mm_app_get_cam_obj(cam_id);
+ CDBG("%s:BEGIN, cam_id=%d\n",__func__,cam_id);
+
+ stream[0] = pme->stream[MM_CAMERA_SNAPSHOT_MAIN].id;
+ if (MM_CAMERA_OK != (rc = pme->cam->ops->stop_streams(pme->cam->camera_handle,pme->ch_id,1,&stream))) {
+ CDBG_ERROR("%s : Snapshot Stream off Error",__func__);
+ return -1;
+ }
+
+ if (MM_CAMERA_OK != (rc = pme->cam->ops->destroy_stream_bundle(pme->cam->camera_handle,pme->ch_id))) {
+ CDBG_ERROR("%s : Snapshot destroy_stream_bundle Error",__func__);
+ return -1;
+ }
+ CDBG("%s:END, cam_id=%d\n",__func__,cam_id);
+ return rc;
+}
+
+int mm_app_take_live_snapshot(int cam_id)
+{
+ int rc = 0;
+ int stream[3];
+ mm_camera_bundle_attr_t attr;
+
+ mm_camera_app_obj_t *pme = mm_app_get_cam_obj(cam_id);
+
+ CDBG("%s:BEGIN, cam_id=%d\n",__func__,cam_id);
+
+ if (pme->cam_mode == RECORDER_MODE &&
+ pme->cam_state == CAMERA_STATE_RECORD) {
+ //Code to get live shot
+ if (mm_app_prepare_live_snapshot(cam_id) != MM_CAMERA_OK) {
+ CDBG_ERROR("%s: Failed prepare liveshot",__func__);
+ return -1;
+ }
+ if (MM_CAMERA_OK != (rc =pme->cam->ops->request_super_buf(pme->cam->camera_handle,pme->ch_id))) {
+ CDBG_ERROR("%s:request_super_buf err=%d\n", __func__, rc);
+ return -1;
+ }
+ CDBG("%s:waiting images\n",__func__);
+ mm_app_snapshot_wait(cam_id);
+
+ if (MM_CAMERA_OK !=mm_app_unprepare_live_snapshot(cam_id)) {
+ CDBG_ERROR("%s: Snapshot Stop error",__func__);
+ }
+
+ } else {
+ CDBG_ERROR("%s: Should not come here for liveshot",__func__);
+ }
+ return rc;
+}
+
+int mm_app_take_picture_zsl(int cam_id)
+{
+ int rc = MM_CAMERA_OK;
+ int value = 1;
+ int op_mode;
+
+ mm_camera_app_obj_t *pme = mm_app_get_cam_obj(cam_id);
+
+ CDBG("%s: Take picture ZSL",__func__);
+
+ if (MM_CAMERA_OK != (rc =pme->cam->ops->request_super_buf(pme->cam->camera_handle,pme->ch_id))) {
+ CDBG_ERROR("%s:request_super_buf err=%d\n", __func__, rc);
+ goto end;
+ }
+
+ CDBG("%s: Start ZSL Preview",__func__);
+
+ end:
+ CDBG("%s: END, rc=%d\n", __func__, rc);
+ return rc;
+}
+
+int mm_app_take_picture_yuv(int cam_id)
+{
+ int rc;
+ mm_camera_app_obj_t *pme = mm_app_get_cam_obj(cam_id);
+
+ CDBG("%s:BEGIN, cam_id=%d\n",__func__,cam_id);
+
+ if (MM_CAMERA_OK != mm_app_start_snapshot(cam_id)) {
+ CDBG_ERROR("%s: cam_id=%d\n",__func__,cam_id);
+ rc = -1;
+ goto end;
+ }
+
+ CDBG("%s:waiting images\n",__func__);
+ mm_app_snapshot_wait(cam_id);
+
+ if (MM_CAMERA_OK !=mm_app_stop_snapshot(cam_id)) {
+ CDBG_ERROR("%s: Snapshot Stop error",__func__);
+ }
+
+ preview:
+ if (MM_CAMERA_OK != (rc = mm_app_start_preview(cam_id))) {
+ CDBG("%s:preview start stream err=%d\n", __func__, rc);
+ }
+ end:
+ CDBG("%s:END, cam_id=%d\n",__func__,cam_id);
+ return rc;
+}
+
+int mm_app_take_zsl(int cam_id)
+{
+ int rc = MM_CAMERA_OK;
+
+ mm_camera_app_obj_t *pme = mm_app_get_cam_obj(cam_id);
+
+ CDBG("%s:BEGIN, cam_id=%d\n",__func__,cam_id);
+
+ if (pme->cam_mode == RECORDER_MODE) {
+ switch (pme->cam_state) {
+ case CAMERA_STATE_RECORD:
+ if (MM_CAMERA_OK != mm_app_stop_video(cam_id)) {
+ CDBG_ERROR("%s:Cannot stop video err=%d\n", __func__, rc);
+ return -1;
+ }
+ case CAMERA_STATE_PREVIEW:
+ if (MM_CAMERA_OK != mm_app_open_zsl(cam_id)) {
+ CDBG_ERROR("%s: Cannot switch to camera mode=%d\n", __func__);
+ return -1;
+ }
+ break;
+ case CAMERA_STATE_SNAPSHOT:
+ default:
+ CDBG("%s: Cannot normal pciture in record mode\n", __func__);
+ break;
+ }
+ } else if (pme->cam_mode == CAMERA_MODE) {
+ switch (pme->cam_state) {
+ case CAMERA_STATE_PREVIEW:
+ mm_app_open_zsl(cam_id);
+ break;
+
+ case CAMERA_STATE_SNAPSHOT:
+ case CAMERA_STATE_RECORD:
+ default:
+ CDBG("%s: Cannot normal pciture in record mode\n", __func__);
+ break;
+ }
+ }
+
+ if (pme->cam_mode == ZSL_MODE && pme->cam_state == CAMERA_STATE_PREVIEW) {
+ mm_app_take_picture_zsl(cam_id);
+ }
+ return rc;
+}
+
+int mm_app_take_picture(int cam_id)
+{
+ int rc = 0;
+
+ mm_camera_app_obj_t *pme = mm_app_get_cam_obj(cam_id);
+
+ CDBG("%s:BEGIN, cam_id=%d\n",__func__,cam_id);
+
+ if (pme->cam_mode == RECORDER_MODE) {
+ switch (pme->cam_state) {
+ case CAMERA_STATE_RECORD:
+ if (MM_CAMERA_OK != mm_app_stop_video(cam_id)) {
+ CDBG_ERROR("%s:Cannot stop video err=%d\n", __func__, rc);
+ return -1;
+ }
+ case CAMERA_STATE_PREVIEW:
+ if (MM_CAMERA_OK != mm_app_open_camera(cam_id)) {
+ CDBG_ERROR("%s: Cannot switch to camera mode=%d\n", __func__,rc);
+ return -1;
+ }
+ break;
+ case CAMERA_STATE_SNAPSHOT:
+ default:
+ CDBG("%s: Cannot normal pciture in record mode\n", __func__);
+ break;
+ }
+ } else if (pme->cam_mode == ZSL_MODE) {
+ switch (pme->cam_state) {
+ case CAMERA_STATE_PREVIEW:
+ mm_app_open_camera(cam_id);
+ break;
+
+ case CAMERA_STATE_SNAPSHOT:
+ case CAMERA_STATE_RECORD:
+ default:
+ CDBG("%s: Cannot normal pciture in record mode\n", __func__);
+ break;
+ }
+ }
+
+ CDBG("%s : Takepicture : mode = %d state = %d, rc = %d",__func__,pme->cam_mode,pme->cam_state,rc);
+ if (pme->cam_mode == CAMERA_MODE && pme->cam_state == CAMERA_STATE_PREVIEW) {
+ mm_app_take_picture_yuv(cam_id);
+ }
+ return rc;
+}
+
+int mm_app_take_raw_picture(int cam_id)
+{
+ int rc;
+#if 0
+ mm_camera_app_obj_t *pme = mm_app_get_cam_obj(cam_id);
+
+ CDBG("%s:BEGIN, cam_id=%d\n",__func__,cam_id);
+ g_status = FALSE;
+ if (MM_CAMERA_OK != (rc = pme->cam->ops->action(pme->cam, TRUE, MM_CAMERA_OPS_PREPARE_SNAPSHOT, 0))) {
+ CDBG("%s:prepare snapshot err=%d\n", __func__, rc);
+ goto end;
+ }
+ if (MM_CAMERA_OK != (rc = mm_app_stop_preview(cam_id))) {
+ CDBG("%s:mm_app_stop_preview err=%d\n", __func__, rc);
+ goto end;
+ }
+ if (MM_CAMERA_OK != mm_app_start_raw_snapshot(cam_id))
+ goto preview;
+ CDBG("%s:waiting images\n",__func__);
+ mm_app_snapshot_wait(cam_id);
+ CDBG("%s:calling mm_app_stop_snapshot() \n",__func__);
+ mm_app_stop_raw_snapshot(cam_id);
+ preview:
+ mm_app_start_preview(cam_id);
+ end:
+ CDBG("%s:END, cam_id=%d\n",__func__,cam_id);
+#endif
+ return rc;
+}
+
diff --git a/camera/QCamera/stack/mm-camera-test/src/mm_qcamera_unit_test.c b/camera/QCamera/stack/mm-camera-test/src/mm_qcamera_unit_test.c
new file mode 100644
index 0000000..ad4030f
--- /dev/null
+++ b/camera/QCamera/stack/mm-camera-test/src/mm_qcamera_unit_test.c
@@ -0,0 +1,559 @@
+/*
+Copyright (c) 2011-2012, The Linux Foundation. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following
+ disclaimer in the documentation and/or other materials provided
+ with the distribution.
+ * Neither the name of The Linux Foundation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+
+#include <pthread.h>
+#include "mm_camera_dbg.h"
+#include <errno.h>
+#include <sys/ioctl.h>
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <fcntl.h>
+#include <poll.h>
+#include "mm_qcamera_unit_test.h"
+
+#define MM_QCAMERA_APP_UTEST_MAX_MAIN_LOOP 4
+#define MM_QCAM_APP_TEST_NUM 128
+
+#define MM_QCAMERA_APP_INTERATION 5
+
+static mm_app_tc_t mm_app_tc[MM_QCAM_APP_TEST_NUM];
+static int num_test_cases = 0;
+
+int mm_app_tc_0(mm_camera_app_t *cam_apps)
+{
+ int rc = MM_CAMERA_OK;
+ int i,j;
+ int result = 0;
+
+ printf("\n Verifying Preview on front and back camera...\n");
+ if (cam_apps->num_cameras == 0) {
+ CDBG_ERROR("%s:Query Failed: Num of cameras = %d\n",__func__, cam_apps->num_cameras);
+ rc = -1;
+ goto end;
+ }
+ for (i = 0; i < cam_apps->num_cameras; i++) {
+ if ( mm_app_open(i) != MM_CAMERA_OK) {
+ CDBG_ERROR("%s:mm_app_open() err=%d\n",__func__, rc);
+ rc = -1;
+ goto end;
+ }
+ if (system_dimension_set(my_cam_app.cam_open) != MM_CAMERA_OK) {
+ CDBG_ERROR("%s:system_dimension_set() err=%d\n",__func__, rc);
+ rc = -1;
+ goto end;
+ }
+
+ for (j = 0; j < MM_QCAMERA_APP_INTERATION; j++) {
+ if ( MM_CAMERA_OK != (rc = startPreview(my_cam_app.cam_open))) {
+ CDBG_ERROR("%s: startPreview() err=%d\n", __func__, rc);
+ break;
+ }
+ /*if(mm_camera_app_timedwait() == ETIMEDOUT) {
+ CDBG_ERROR("%s: Preview Callback not received in time or qbuf failed\n", __func__);
+ break;
+ }*/
+ mm_camera_app_wait();
+ if ( MM_CAMERA_OK != (rc = stopPreview(my_cam_app.cam_open))) {
+ CDBG("%s: startPreview() err=%d\n", __func__, rc);
+ break;
+ }
+ result++;
+ }
+ if ( mm_app_close(my_cam_app.cam_open) != MM_CAMERA_OK) {
+ CDBG_ERROR("%s:mm_app_close() err=%d\n",__func__, rc);
+ rc = -1;
+ goto end;
+ }
+
+ if (result != MM_QCAMERA_APP_INTERATION) {
+ printf("%s: Preview Start/Stop Fails for Camera %d in %d iteration", __func__, i,j);
+ rc = -1;
+ break;
+ }
+
+ result = 0;
+ }
+ end:
+ if (rc == 0) {
+ printf("\nPassed\n");
+ } else {
+ printf("\nFailed\n");
+ }
+ CDBG("%s:END, rc = %d\n", __func__, rc);
+ return rc;
+}
+
+int mm_app_tc_1(mm_camera_app_t *cam_apps)
+{
+ int rc = MM_CAMERA_OK;
+ int i,j;
+ int result = 0;
+
+ printf("\n Verifying Snapshot on front and back camera...\n");
+ for (i = 0; i < cam_apps->num_cameras; i++) {
+ if ( mm_app_open(i) != MM_CAMERA_OK) {
+ CDBG_ERROR("%s:mm_app_open() err=%d\n",__func__, rc);
+ rc = -1;
+ goto end;
+ }
+ if (system_dimension_set(my_cam_app.cam_open) != MM_CAMERA_OK) {
+ CDBG_ERROR("%s:system_dimension_set() err=%d\n",__func__, rc);
+ rc = -1;
+ goto end;
+ }
+
+ if ( MM_CAMERA_OK != (rc = startPreview(my_cam_app.cam_open))) {
+ CDBG_ERROR("%s: startPreview() err=%d\n", __func__, rc);
+ break;
+ }
+ for (j = 0; j < MM_QCAMERA_APP_INTERATION; j++) {
+ if ( MM_CAMERA_OK != (rc = takePicture_yuv(my_cam_app.cam_open))) {
+ CDBG_ERROR("%s: TakePicture() err=%d\n", __func__, rc);
+ break;
+ }
+ /*if(mm_camera_app_timedwait() == ETIMEDOUT) {
+ CDBG_ERROR("%s: Snapshot/Preview Callback not received in time or qbuf Faile\n", __func__);
+ break;
+ }*/
+ mm_camera_app_wait();
+ result++;
+ }
+ if ( MM_CAMERA_OK != (rc = stopPreview(my_cam_app.cam_open))) {
+ CDBG("%s: startPreview() err=%d\n", __func__, rc);
+ break;
+ }
+ if ( mm_app_close(my_cam_app.cam_open) != MM_CAMERA_OK) {
+ CDBG_ERROR("%s:mm_app_close() err=%d\n",__func__, rc);
+ rc = -1;
+ goto end;
+ }
+ if (result != MM_QCAMERA_APP_INTERATION) {
+ printf("%s: Snapshot Start/Stop Fails for Camera %d in %d iteration", __func__, i,j);
+ rc = -1;
+ break;
+ }
+
+ result = 0;
+ }
+ end:
+ if (rc == 0) {
+ printf("\t***Passed***\n");
+ } else {
+ printf("\t***Failed***\n");
+ }
+ CDBG("%s:END, rc = %d\n", __func__, rc);
+ return rc;
+}
+
+int mm_app_tc_2(mm_camera_app_t *cam_apps)
+{
+ int rc = MM_CAMERA_OK;
+ int i,j;
+ int result = 0;
+
+ printf("\n Verifying Video on front and back camera...\n");
+ for (i = 0; i < cam_apps->num_cameras; i++) {
+ if ( mm_app_open(i) != MM_CAMERA_OK) {
+ CDBG_ERROR("%s:mm_app_open() err=%d\n",__func__, rc);
+ rc = -1;
+ goto end;
+ }
+ if (system_dimension_set(my_cam_app.cam_open) != MM_CAMERA_OK) {
+ CDBG_ERROR("%s:system_dimension_set() err=%d\n",__func__, rc);
+ rc = -1;
+ goto end;
+ }
+
+ if ( MM_CAMERA_OK != (rc = startPreview(my_cam_app.cam_open))) {
+ CDBG_ERROR("%s: startPreview() err=%d\n", __func__, rc);
+ break;
+ }
+ for (j = 0; j < MM_QCAMERA_APP_INTERATION; j++) {
+ if ( MM_CAMERA_OK != (rc = startRecording(my_cam_app.cam_open))) {
+ CDBG_ERROR("%s: StartVideorecording() err=%d\n", __func__, rc);
+ break;
+ }
+
+ /*if(mm_camera_app_timedwait() == ETIMEDOUT) {
+ CDBG_ERROR("%s: Video Callback not received in time\n", __func__);
+ break;
+ }*/
+ mm_camera_app_wait();
+ if ( MM_CAMERA_OK != (rc = stopRecording(my_cam_app.cam_open))) {
+ CDBG_ERROR("%s: Stopvideorecording() err=%d\n", __func__, rc);
+ break;
+ }
+ result++;
+ }
+ if ( MM_CAMERA_OK != (rc = stopPreview(my_cam_app.cam_open))) {
+ CDBG("%s: startPreview() err=%d\n", __func__, rc);
+ break;
+ }
+ if ( mm_app_close(my_cam_app.cam_open) != MM_CAMERA_OK) {
+ CDBG_ERROR("%s:mm_app_close() err=%d\n",__func__, rc);
+ rc = -1;
+ goto end;
+ }
+ if (result != MM_QCAMERA_APP_INTERATION) {
+ printf("%s: Video Start/Stop Fails for Camera %d in %d iteration", __func__, i,j);
+ rc = -1;
+ break;
+ }
+
+ result = 0;
+ }
+ end:
+ if (rc == 0) {
+ printf("\nPassed\n");
+ } else {
+ printf("\nFailed\n");
+ }
+ CDBG("%s:END, rc = %d\n", __func__, rc);
+ return rc;
+}
+
+int mm_app_tc_3(mm_camera_app_t *cam_apps)
+{
+ int rc = MM_CAMERA_OK;
+ int i,j;
+ int result = 0;
+
+ printf("\n Verifying RDI Stream on front and back camera...\n");
+ if (cam_apps->num_cameras == 0) {
+ CDBG_ERROR("%s:Query Failed: Num of cameras = %d\n",__func__, cam_apps->num_cameras);
+ rc = -1;
+ goto end;
+ }
+ for (i = 0; i < cam_apps->num_cameras; i++) {
+ if ( mm_app_open(i) != MM_CAMERA_OK) {
+ CDBG_ERROR("%s:mm_app_open() err=%d\n",__func__, rc);
+ rc = -1;
+ goto end;
+ }
+ if (system_dimension_set(my_cam_app.cam_open) != MM_CAMERA_OK) {
+ CDBG_ERROR("%s:system_dimension_set() err=%d\n",__func__, rc);
+ rc = -1;
+ goto end;
+ }
+ for (j = 0; j < MM_QCAMERA_APP_INTERATION; j++) {
+ if ( MM_CAMERA_OK != (rc = startRdi(my_cam_app.cam_open))) {
+ CDBG_ERROR("%s: StartVideorecording() err=%d\n", __func__, rc);
+ break;
+ }
+
+ /*if(mm_camera_app_timedwait() == ETIMEDOUT) {
+ CDBG_ERROR("%s: Video Callback not received in time\n", __func__);
+ break;
+ }*/
+ mm_camera_app_wait();
+ if ( MM_CAMERA_OK != (rc = stopRdi(my_cam_app.cam_open))) {
+ CDBG_ERROR("%s: Stopvideorecording() err=%d\n", __func__, rc);
+ break;
+ }
+ result++;
+ }
+ if ( mm_app_close(my_cam_app.cam_open) != MM_CAMERA_OK) {
+ CDBG_ERROR("%s:mm_app_close() err=%d\n",__func__, rc);
+ rc = -1;
+ goto end;
+ }
+ if (result != MM_QCAMERA_APP_INTERATION) {
+ printf("%s: Video Start/Stop Fails for Camera %d in %d iteration", __func__, i,j);
+ rc = -1;
+ break;
+ }
+
+ result = 0;
+ }
+ end:
+ if (rc == 0) {
+ printf("\nPassed\n");
+ } else {
+ printf("\nFailed\n");
+ }
+ CDBG("%s:END, rc = %d\n", __func__, rc);
+ return rc;
+}
+
+int mm_app_tc_4(mm_camera_app_t *cam_apps)
+{
+ int rc = MM_CAMERA_OK;
+ int i;
+ printf("Running %s - open/close ,video0, open/close preview channel only\n", __func__);
+#if 0
+ for (i = 0; i < MM_QCAMERA_APP_UTEST_MAX_MAIN_LOOP; i++) {
+ if ( 0 != (rc = mm_app_open(cam_id, MM_CAMERA_OP_MODE_NOTUSED))) {
+ CDBG("%s: open cam %d at opmode = %d err, loop=%d, rc=%d\n", __func__, cam_id, MM_CAMERA_OP_MODE_NOTUSED, i, rc);
+ goto end;
+ }
+ if (0 != (rc = mm_app_open_preview(cam_id))) {
+ goto end;
+ }
+ if (0 != (rc = mm_app_close_preview(cam_id))) {
+ goto end;
+ }
+ if ( 0 != (rc = mm_app_close(cam_id))) {
+ CDBG("%s: close cam %d at opmode = %d err,loop=%d, rc=%d\n", __func__, cam_id, MM_CAMERA_OP_MODE_NOTUSED, i, rc);
+ goto end;
+ }
+ }
+end:
+#endif
+ CDBG("%s:END, rc=%d\n", __func__, rc);
+ return rc;
+}
+
+
+
+int mm_app_tc_5(mm_camera_app_t *cam_apps)
+{
+ int rc = MM_CAMERA_OK;
+ int i;
+ printf("Running %s - open/close ,video0, open/close snapshot channel only\n", __func__);
+#if 0
+ for (i = 0; i < MM_QCAMERA_APP_UTEST_MAX_MAIN_LOOP; i++) {
+ if ( 0 != (rc = mm_app_open(cam_id, MM_CAMERA_OP_MODE_NOTUSED))) {
+ CDBG("%s: open cam %d at opmode = %d err, loop=%d, rc=%d\n",
+ __func__, cam_id, MM_CAMERA_OP_MODE_NOTUSED, i, rc);
+ goto end;
+ }
+ if (0 != (rc = mm_app_open_snapshot(cam_id))) {
+ goto end;
+ }
+ if (0 != (rc = mm_app_close_snapshot(cam_id))) {
+ goto end;
+ }
+ if ( 0 != (rc = mm_app_close(cam_id))) {
+ CDBG("%s: close cam %d at opmode = %d err,loop=%d, rc=%d\n",
+ __func__, cam_id, MM_CAMERA_OP_MODE_NOTUSED, i, rc);
+ goto end;
+ }
+ }
+ end:
+#endif
+ CDBG("%s:END, rc=%d\n", __func__, rc);
+ return rc;
+}
+
+int mm_app_tc_6(mm_camera_app_t *cam_apps)
+{
+ int rc = MM_CAMERA_OK;
+ int i;
+ printf("Running %s - simple preview \n", __func__);
+#if 0
+ if ( 0 != (rc = mm_app_open(cam_id, MM_CAMERA_OP_MODE_NOTUSED))) {
+ CDBG("%s: open cam %d at opmode = %d err, loop=%d, rc=%d\n",
+ __func__, cam_id, MM_CAMERA_OP_MODE_NOTUSED, i, rc);
+ goto end;
+ }
+
+ for (i = 0; i < MM_QCAMERA_APP_UTEST_MAX_MAIN_LOOP; i++) {
+ if (0 != (rc = mm_app_init_preview(cam_id))) {
+ goto end;
+ }
+ if (0 != (rc = mm_app_start_preview(cam_id))) {
+ goto end;
+ }
+ /* sleep 8 seconds */
+ usleep(8000000);
+ if (0 != (rc = mm_app_stop_preview(cam_id))) {
+ goto end;
+ }
+ if (0 != (rc=mm_app_deinit_preview(cam_id))) {
+ goto end;
+ }
+ if (0 != (rc = mm_app_close_preview(cam_id))) {
+ goto end;
+ }
+ }
+ if ( 0 != (rc = mm_app_close(cam_id))) {
+ CDBG("%s: close cam %d at opmode = %d err,loop=%d, rc=%d\n",
+ __func__, cam_id, MM_CAMERA_OP_MODE_NOTUSED, i, rc);
+ goto end;
+ }
+end:
+#endif
+ CDBG("%s:END, rc=%d\n", __func__, rc);
+ return rc;
+}
+
+int mm_app_tc_7(mm_camera_app_t *cam_apps)
+{
+ int rc = MM_CAMERA_OK;
+ int i;
+ printf("Running %s - simple preview and recording \n", __func__);
+#if 0
+ if ( 0 != (rc = mm_app_open(cam_id, MM_CAMERA_OP_MODE_NOTUSED))) {
+ CDBG("%s: open cam %d at opmode = %d err, loop=%d, rc=%d\n",
+ __func__, cam_id, MM_CAMERA_OP_MODE_NOTUSED, i, rc);
+ goto end;
+ }
+
+ for (i = 0; i < MM_QCAMERA_APP_UTEST_MAX_MAIN_LOOP; i++) {
+ if (0 != (rc = mm_app_init_preview(cam_id))) {
+ goto end;
+ }
+ if (0 != (rc = mm_app_start_preview(cam_id))) {
+ goto end;
+ }
+ /* sleep 8 seconds */
+ usleep(8000000);
+ if (0 != (rc = mm_app_start_recording(cam_id))) {
+ goto end;
+ }
+ usleep(1000000);
+ if (0 != (rc = mm_app_stop_recording(cam_id))) {
+ goto end;
+ }
+ usleep(8000000);
+ if (0 != (rc = mm_app_stop_preview(cam_id))) {
+ goto end;
+ }
+ if (0 != (rc=mm_app_deinit_preview(cam_id))) {
+ goto end;
+ }
+ if (0 != (rc = mm_app_close_preview(cam_id))) {
+ goto end;
+ }
+ }
+ if ( 0 != (rc = mm_app_close(cam_id))) {
+ CDBG("%s: close cam %d at opmode = %d err,loop=%d, rc=%d\n",
+ __func__, cam_id, MM_CAMERA_OP_MODE_NOTUSED, i, rc);
+ goto end;
+ }
+end:
+#endif
+ CDBG("%s:END, rc=%d\n", __func__, rc);
+ return rc;
+}
+
+int mm_app_tc_8(mm_camera_app_t *cam_apps)
+{
+ int rc = MM_CAMERA_OK;
+ int i;
+ printf("Running %s - preview, recording, and snapshot, then preview again \n", __func__);
+#if 0
+ if ( 0 != (rc = mm_app_open(cam_id, MM_CAMERA_OP_MODE_NOTUSED))) {
+ CDBG("%s: open cam %d at opmode = %d err, loop=%d, rc=%d\n",
+ __func__, cam_id, MM_CAMERA_OP_MODE_NOTUSED, i, rc);
+ goto end;
+ }
+
+ for (i = 0; i < MM_QCAMERA_APP_UTEST_MAX_MAIN_LOOP; i++) {
+ if (0 != (rc = mm_app_init_preview(cam_id))) {
+ goto end;
+ }
+ if (0 != (rc = mm_app_start_preview(cam_id))) {
+ goto end;
+ }
+ /* sleep 8 seconds */
+ usleep(8000000);
+ if (0 != (rc = mm_app_start_recording(cam_id))) {
+ goto end;
+ }
+ usleep(1000000);
+ if (0 != (rc = mm_app_stop_recording(cam_id))) {
+ goto end;
+ }
+ if (0 != (rc = mm_app_stop_preview(cam_id))) {
+ goto end;
+ }
+ if (0!=(rc=mm_app_init_snapshot(cam_id))) {
+ goto end;
+ }
+ if (0 != (rc=mm_app_take_picture(cam_id))) {
+ goto end;
+ }
+ if ( 0 != (rc = mm_app_deinit_snahspot(cam_id))) {
+ goto end;
+ }
+ if (0 != (rc = mm_app_start_preview(cam_id))) {
+ goto end;
+ }
+ usleep(8000000);
+ if (0 != (rc=mm_app_deinit_preview(cam_id))) {
+ goto end;
+ }
+ if (0 != (rc = mm_app_close_preview(cam_id))) {
+ goto end;
+ }
+ }
+ if ( 0 != (rc = mm_app_close(cam_id))) {
+ CDBG("%s: close cam %d at opmode = %d err,loop=%d, rc=%d\n",
+ __func__, cam_id, MM_CAMERA_OP_MODE_NOTUSED, i, rc);
+ goto end;
+ }
+end:
+#endif
+ CDBG("%s:END, rc=%d\n", __func__, rc);
+ return rc;
+}
+
+
+int mm_app_gen_test_cases()
+{
+ int tc = 0;
+ memset(mm_app_tc, 0, sizeof(mm_app_tc));
+ if (tc < MM_QCAM_APP_TEST_NUM) mm_app_tc[tc++].f = mm_app_tc_0;
+ if (tc < MM_QCAM_APP_TEST_NUM) mm_app_tc[tc++].f = mm_app_tc_1;
+ if (tc < MM_QCAM_APP_TEST_NUM) mm_app_tc[tc++].f = mm_app_tc_2;
+ if (tc < MM_QCAM_APP_TEST_NUM) mm_app_tc[tc++].f = mm_app_tc_3;
+ /*if(tc < MM_QCAM_APP_TEST_NUM) mm_app_tc[tc++].f = mm_app_tc_4;
+ if(tc < MM_QCAM_APP_TEST_NUM) mm_app_tc[tc++].f = mm_app_tc_5;
+ if(tc < MM_QCAM_APP_TEST_NUM) mm_app_tc[tc++].f = mm_app_tc_6;
+ if(tc < MM_QCAM_APP_TEST_NUM) mm_app_tc[tc++].f = mm_app_tc_7;
+ if(tc < MM_QCAM_APP_TEST_NUM) mm_app_tc[tc++].f = mm_app_tc_8;*/
+ return tc;
+}
+
+int mm_app_unit_test_entry(mm_camera_app_t *cam_app)
+{
+ int rc = MM_CAMERA_OK;
+ int i, tc = 0;
+ int cam_id = 0;
+
+ tc = mm_app_gen_test_cases();
+ CDBG("Running %d test cases\n",tc);
+ for (i = 0; i < tc; i++) {
+ mm_app_tc[i].r = mm_app_tc[i].f(cam_app);
+ if (mm_app_tc[i].r != MM_CAMERA_OK) {
+ printf("%s: test case %d error = %d, abort unit testing engine!!!!\n",
+ __func__, i, mm_app_tc[i].r);
+ rc = mm_app_tc[i].r;
+ goto end;
+ }
+ }
+ end:
+ printf("nTOTAL_TSET_CASE = %d, NUM_TEST_RAN = %d, rc=%d\n", tc, i, rc);
+ return rc;
+}
+
+
+
+
diff --git a/camera/QCamera/stack/mm-camera-test/src/mm_qcamera_video.c b/camera/QCamera/stack/mm-camera-test/src/mm_qcamera_video.c
new file mode 100644
index 0000000..ddc7a33
--- /dev/null
+++ b/camera/QCamera/stack/mm-camera-test/src/mm_qcamera_video.c
@@ -0,0 +1,484 @@
+/*
+Copyright (c) 2011-2012, The Linux Foundation. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following
+ disclaimer in the documentation and/or other materials provided
+ with the distribution.
+ * Neither the name of The Linux Foundation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+
+#include <pthread.h>
+#include "mm_camera_dbg.h"
+#include <errno.h>
+#include <sys/ioctl.h>
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <fcntl.h>
+#include <poll.h>
+#include "mm_qcamera_app.h"
+
+#define BUFF_SIZE_128 128
+static int num_run = 0;
+
+static int mm_app_dump_video_frame(struct msm_frame *frame,
+ uint32_t len)
+{
+ static int v_cnt = 0;
+ char bufp[BUFF_SIZE_128];
+ int file_fdp;
+ int rc = 0;
+
+ return rc; /* disable dump */
+
+ v_cnt++;
+ if(0 == (v_cnt % 10))
+ snprintf(bufp, BUFF_SIZE_128, "/data/v_%d.yuv", v_cnt);
+ else
+ return 0;
+
+ file_fdp = open(bufp, O_RDWR | O_CREAT, 0777);
+
+ if (file_fdp < 0) {
+ CDBG("cannot open file %s\n", bufp);
+ rc = -1;
+ goto end;
+ }
+ CDBG("%s:dump frame to '%s'\n", __func__, bufp);
+ write(file_fdp,
+ (const void *)frame->buffer, len);
+ close(file_fdp);
+end:
+ return rc;
+}
+
+static int mm_app_set_video_fmt(int cam_id,mm_camera_image_fmt_t *fmt)
+{
+ int rc = MM_CAMERA_OK;
+ mm_camera_app_obj_t *pme = mm_app_get_cam_obj(cam_id);
+
+ fmt->meta_header = MM_CAMEAR_META_DATA_TYPE_DEF;
+ fmt->fmt = pme->dim.enc_format;
+ fmt->width = pme->dim.video_width;
+ fmt->height = pme->dim.video_height;
+ return rc;
+}
+
+#if 0
+int mm_stream_deinit_video_buf(uint32_t camera_handle,
+ uint32_t ch_id, uint32_t stream_id,
+ void *user_data, uint8_t num_bufs,
+ mm_camera_buf_def_t *bufs)
+{
+ int i, rc = MM_CAMERA_OK;
+ mm_camera_app_obj_t *pme = (mm_camera_app_obj_t *)user_data;
+
+ for(i = 0; i < num_bufs; i++) {
+ rc = my_cam_app.hal_lib.mm_camera_do_munmap_ion (pme->ionfd, &(pme->video_buf.frame[i].fd_data),
+ (void *)pme->video_buf.frame[i].buffer, pme->video_buf.frame_len);
+ if(rc != MM_CAMERA_OK) {
+ CDBG_ERROR("%s: mm_camera_do_munmap err, pmem_fd = %d, rc = %d",
+ __func__, bufs[i].fd, rc);
+ }
+ }
+ return rc;
+}
+
+int mm_stream_init_video_buf(uint32_t camera_handle,
+ uint32_t ch_id, uint32_t stream_id,
+ void *user_data,
+ mm_camera_frame_len_offset *frame_offset_info,
+ uint8_t num_bufs,
+ uint8_t *initial_reg_flag,
+ mm_camera_buf_def_t *bufs)
+{
+ int i,j,num_planes, frame_len, y_off, cbcr_off;
+ uint32_t planes[VIDEO_MAX_PLANES];
+ uint32_t pmem_addr = 0;
+
+ mm_camera_app_obj_t *pme = (mm_camera_app_obj_t *)user_data;
+
+ num_planes = frame_offset_info->num_planes;
+ for ( i = 0; i < num_planes; i++) {
+ planes[i] = frame_offset_info->mp[i].len;
+ }
+
+ frame_len = frame_offset_info->frame_len;
+ y_off = frame_offset_info->mp[0].offset;
+ cbcr_off = frame_offset_info->mp[1].offset;
+
+ CDBG("Allocating video Memory for %d buffers frame_len = %d",num_bufs,frame_offset_info->frame_len);
+
+ for (i = 0; i < num_bufs ; i++) {
+ int j;
+ pme->video_buf.reg[i] = 1;
+ initial_reg_flag[i] = 1;
+
+ pme->video_buf.frame_len = frame_len;
+ pme->video_buf.frame[i].ion_alloc.len = pme->video_buf.frame_len;
+ pme->video_buf.frame[i].ion_alloc.flags =
+ (0x1 << CAMERA_ION_HEAP_ID | 0x1 << ION_IOMMU_HEAP_ID);
+ pme->video_buf.frame[i].ion_alloc.align = 4096;
+
+ pmem_addr = (unsigned long) my_cam_app.hal_lib.mm_camera_do_mmap_ion(pme->ionfd,
+ &(pme->video_buf.frame[i].ion_alloc), &(pme->video_buf.frame[i].fd_data),
+ &pme->video_buf.frame[i].fd);
+
+ pme->video_buf.frame[i].buffer = pmem_addr;
+ pme->video_buf.frame[i].path = OUTPUT_TYPE_V;
+ pme->video_buf.frame[i].y_off = 0;
+ pme->video_buf.frame[i].cbcr_off = planes[0];
+ pme->video_buf.frame[i].phy_offset = 0;
+
+ CDBG("Buffer allocated Successfully fd = %d",pme->video_buf.frame[i].fd);
+
+ bufs[i].fd = pme->video_buf.frame[i].fd;
+ //bufs[i].buffer = pmem_addr;
+ bufs[i].frame_len = pme->video_buf.frame[i].ion_alloc.len;
+ bufs[i].num_planes = num_planes;
+
+ bufs[i].frame = &pme->video_buf.frame[i];
+
+ /* Plane 0 needs to be set seperately. Set other planes
+ * in a loop. */
+ bufs[i].planes[0].length = planes[0];
+ bufs[i].planes[0].m.userptr = bufs[i].fd;
+ bufs[i].planes[0].data_offset = y_off;
+ bufs[i].planes[0].reserved[0] = 0;
+ //buf_def->buf.mp[i].frame_offset;
+ for (j = 1; j < num_planes; j++) {
+ bufs[i].planes[j].length = planes[j];
+ bufs[i].planes[j].m.userptr = bufs[i].fd;
+ bufs[i].planes[j].data_offset = cbcr_off;
+ bufs[i].planes[j].reserved[0] =
+ bufs[i].planes[j-1].reserved[0] +
+ bufs[i].planes[j-1].length;
+ }
+ }
+ return MM_CAMERA_OK;
+}
+#endif
+
+void video_cb_signal(mm_camera_app_obj_t *pme)
+{
+ if(pme->cam_state == CAMERA_STATE_RECORD) {
+ mm_camera_app_done();
+ }
+}
+
+static void mm_app_video_notify_cb(mm_camera_super_buf_t *bufs,
+ void *user_data)
+{
+ int rc;
+ mm_camera_buf_def_t *frame = NULL;
+ mm_camera_app_obj_t *pme = NULL;
+ CDBG("%s: BEGIN\n", __func__);
+ frame = bufs->bufs[MM_CAMERA_PREVIEW] ;
+ pme = (mm_camera_app_obj_t *)user_data;
+
+ CDBG("%s: BEGIN - length=%d, frame idx = %d\n", __func__, frame->frame_len, frame->frame_idx);
+ //Need to code to Send to Encoder .. Simulat
+ CDBG("In CB function i/p = %p o/p = %p",bufs->bufs[MM_CAMERA_PREVIEW],frame);
+
+ dumpFrameToFile(frame,pme->dim.orig_video_width,pme->dim.orig_video_height,"video", 1);
+ if(MM_CAMERA_OK != pme->cam->ops->qbuf(pme->cam->camera_handle,pme->ch_id,frame))
+ {
+ CDBG_ERROR("%s: Failed in Snapshot Qbuf\n", __func__);
+ return;
+ }
+ video_cb_signal(pme);
+ CDBG("%s: END\n", __func__);
+
+}
+
+int mm_app_config_video(int cam_id)
+{
+ int rc = MM_CAMERA_OK;
+ mm_camera_app_obj_t *pme = mm_app_get_cam_obj(cam_id);
+
+ mm_app_set_video_fmt(cam_id,&pme->stream[MM_CAMERA_VIDEO].str_config.fmt);
+ pme->stream[MM_CAMERA_VIDEO].str_config.need_stream_on = 1;
+ pme->stream[MM_CAMERA_VIDEO].str_config.num_of_bufs = VIDEO_BUF_NUM;
+
+ if(MM_CAMERA_OK != (rc = pme->cam->ops->config_stream(pme->cam->camera_handle,pme->ch_id,pme->stream[MM_CAMERA_VIDEO].id,
+ &pme->stream[MM_CAMERA_VIDEO].str_config))) {
+ CDBG_ERROR("%s:MM_CAMERA_VIDEO config streaming err=%d\n", __func__, rc);
+ goto end;
+ }
+
+ CDBG("config_stream stream is successfull");
+
+ pme->stream[MM_CAMERA_SNAPSHOT_MAIN].str_config.need_stream_on = pme->fullSizeSnapshot;
+ pme->stream[MM_CAMERA_SNAPSHOT_MAIN].str_config.num_of_bufs = 1;
+
+ mm_app_set_live_snapshot_fmt(cam_id,&pme->stream[MM_CAMERA_SNAPSHOT_MAIN].str_config.fmt);
+
+ if(MM_CAMERA_OK != (rc = pme->cam->ops->config_stream(pme->cam->camera_handle,pme->ch_id,pme->stream[MM_CAMERA_SNAPSHOT_MAIN].id,
+ &pme->stream[MM_CAMERA_SNAPSHOT_MAIN].str_config))) {
+ CDBG_ERROR("%s:preview streaming err=%d\n", __func__, rc);
+ goto end;
+ }
+end:
+ return rc;
+
+}
+
+int mm_app_prepare_video(int cam_id)
+{
+ int rc = MM_CAMERA_OK;
+
+ mm_camera_app_obj_t *pme = mm_app_get_cam_obj(cam_id);
+
+ pme->stream[MM_CAMERA_VIDEO].id = pme->cam->ops->add_stream(pme->cam->camera_handle,pme->ch_id,
+ mm_app_video_notify_cb,pme,
+ MM_CAMERA_VIDEO, 0);
+
+ if(!pme->stream[MM_CAMERA_VIDEO].id) {
+ CDBG("%s:MM_CAMERA_VIDEO streaming err = %d\n", __func__, rc);
+ rc = -1;
+ goto end;
+ }
+
+ CDBG("Add stream is successfull stream ID = %d",pme->stream[MM_CAMERA_PREVIEW].id);
+
+ /* Code to add live snapshot*/
+ pme->stream[MM_CAMERA_SNAPSHOT_MAIN].id = pme->cam->ops->add_stream(pme->cam->camera_handle,pme->ch_id,
+ NULL,pme,
+ MM_CAMERA_SNAPSHOT_MAIN, 0);
+
+ CDBG("Add Snapshot main is successfull stream ID = %d",pme->stream[MM_CAMERA_SNAPSHOT_MAIN].id);
+ if(!pme->stream[MM_CAMERA_SNAPSHOT_MAIN].id) {
+ CDBG_ERROR("%s:preview streaming err=%d\n", __func__, rc);
+ rc = -1;
+ goto end;
+ }
+ /*if(mm_app_config_video(cam_id) != MM_CAMERA_OK)
+ {
+ CDBG_ERROR("%s:Video config err=%d\n", __func__, rc);
+ }*/
+end:
+ return rc;
+}
+
+int mm_app_unprepare_video(int cam_id)
+{
+ int rc = MM_CAMERA_OK;
+ mm_camera_app_obj_t *pme = mm_app_get_cam_obj(cam_id);
+
+ if(MM_CAMERA_OK != (rc = pme->cam->ops->del_stream(pme->cam->camera_handle,pme->ch_id,pme->stream[MM_CAMERA_VIDEO].id))){
+ CDBG_ERROR("%s : Delete Stream Video error",__func__);
+ goto end;
+ }
+
+ if(MM_CAMERA_OK != (rc = pme->cam->ops->del_stream(pme->cam->camera_handle,pme->ch_id,pme->stream[MM_CAMERA_SNAPSHOT_MAIN].id))){
+ CDBG_ERROR("%s : Delete Stream Video error",__func__);
+ goto end;
+ }
+end:
+ CDBG("del_stream successfull");
+ return rc;
+}
+
+static int mm_app_streamon_video(int cam_id)
+{
+ int stream[2];
+ int rc = MM_CAMERA_OK;
+
+ mm_camera_app_obj_t *pme = mm_app_get_cam_obj(cam_id);
+
+ if(mm_app_config_video(cam_id) != MM_CAMERA_OK)
+ {
+ CDBG_ERROR("%s:Video config err=%d\n", __func__, rc);
+ }
+
+ stream[MM_CAMERA_VIDEO] = pme->stream[MM_CAMERA_VIDEO].id;
+ if(MM_CAMERA_OK != (rc = pme->cam->ops->start_streams(pme->cam->camera_handle,pme->ch_id,1,&stream[1])))
+ {
+ CDBG_ERROR("%s : Start Stream video Error",__func__);
+ return -1;
+ }
+ CDBG("Start video stream is successfull");
+ pme->cam_state = CAMERA_STATE_RECORD;
+ return rc;
+}
+
+static int mm_app_streamoff_video(int cam_id)
+{
+ int stream[2];
+ int rc = MM_CAMERA_OK;
+
+ mm_camera_app_obj_t *pme = mm_app_get_cam_obj(cam_id);
+
+ stream[0] = pme->stream[MM_CAMERA_VIDEO].id;
+
+ if(MM_CAMERA_OK != (rc =pme->cam->ops->stop_streams(pme->cam->camera_handle,pme->ch_id,1,&stream)))
+ {
+ CDBG_ERROR("%s : stop Stream video Error",__func__);
+ goto end;
+ }
+ CDBG("stop video stream is successfull");
+ pme->cam_state = CAMERA_STATE_PREVIEW;
+end:
+ return rc;
+
+}
+int mm_app_stop_video(int cam_id)
+{
+ int stream[2];
+ int rc = MM_CAMERA_OK;
+
+ mm_camera_app_obj_t *pme = mm_app_get_cam_obj(cam_id);
+
+ stream[0] = pme->stream[MM_CAMERA_VIDEO].id;
+
+ if(MM_CAMERA_OK != (rc = mm_app_streamoff_video(cam_id))){
+ CDBG_ERROR("%s : Video Stream off error",__func__);
+ goto end;
+ }
+end:
+ return rc;
+}
+
+static int mm_app_start_video(int cam_id)
+{
+ int rc = MM_CAMERA_OK;
+ mm_camera_app_obj_t *pme = mm_app_get_cam_obj(cam_id);
+
+ CDBG("pme = %p, pme->cam =%p, pme->ch = %d pme->cam->camera_handle = %d",
+ pme,pme->cam,pme->ch_id,pme->cam->camera_handle);
+
+ if(MM_CAMERA_OK != (rc = mm_app_prepare_video(cam_id))){
+ CDBG_ERROR("%s:MM_CAMERA_VIDEO streaming err = %d\n", __func__, rc);
+ goto end;
+ }
+ if(MM_CAMERA_OK != (rc = mm_app_streamon_video(cam_id))){
+ CDBG_ERROR("%s:MM_CAMERA_VIDEO streaming err = %d\n", __func__, rc);
+ goto end;
+ }
+
+end:
+ CDBG("%s: END, rc=%d\n", __func__, rc);
+
+ return rc;
+}
+
+int mm_app_open_recorder(int cam_id)
+{
+ int rc = MM_CAMERA_OK;
+ int value = 1;
+ int powermode = 1;
+
+ mm_camera_app_obj_t *pme = mm_app_get_cam_obj(cam_id);
+
+ CDBG("%s: mm_app_open_recorder",__func__);
+ if(pme->cam_mode == RECORDER_MODE) {
+ CDBG("%s : Already in record mode",__func__);
+ return rc;
+ }
+
+ if(MM_CAMERA_OK != (rc = mm_app_stop_preview(cam_id))){
+ CDBG_ERROR("%s:Stop preview err=%d\n", __func__, rc);
+ goto end;
+ }
+
+ if(MM_CAMERA_OK != initDisplay())
+ {
+ CDBG_ERROR("%s : Could not initalize display",__func__);
+ goto end;
+ }
+
+ pme->cam->ops->set_parm(pme->cam->camera_handle,MM_CAMERA_PARM_RECORDING_HINT, &value);
+
+ pme->cam->ops->set_parm(pme->cam->camera_handle,MM_CAMERA_PARM_LOW_POWER_MODE, &powermode);
+
+
+ if(MM_CAMERA_OK != (rc = mm_app_prepare_preview(cam_id))){
+ CDBG_ERROR("%s:stream on preview err=%d\n", __func__, rc);
+ goto end;
+ }
+
+ if(MM_CAMERA_OK != (rc = mm_app_prepare_video(cam_id))){
+ CDBG_ERROR("%s:stream on video err=%d\n", __func__, rc);
+ goto end;
+ }
+
+ if(MM_CAMERA_OK != (rc = mm_app_streamon_preview(cam_id))){
+ CDBG_ERROR("%s:start preview err=%d\n", __func__, rc);
+ goto end;
+ }
+ pme->cam_mode = RECORDER_MODE;
+end:
+ CDBG("%s: END, rc=%d\n", __func__, rc);
+ return rc;
+}
+
+int startRecording(int cam_id)
+{
+ int rc = MM_CAMERA_OK;
+
+ mm_camera_app_obj_t *pme = mm_app_get_cam_obj(cam_id);
+
+ CDBG("%s: Start Recording mode = %d state = %d",__func__,pme->cam_mode,pme->cam_state);
+
+ if(pme->cam_mode == CAMERA_MODE || pme->cam_mode == ZSL_MODE) {
+ switch(pme->cam_state) {
+ case CAMERA_STATE_PREVIEW:
+ if(MM_CAMERA_OK != mm_app_open_recorder(cam_id)){
+ CDBG_ERROR("%s: Open Record Failed \n", __func__);
+ return -1;
+ }
+ break;
+ case CAMERA_STATE_RECORD:
+ case CAMERA_STATE_SNAPSHOT:
+ default:
+ break;
+ }
+ }/*else{
+ mm_app_prepare_video(cam_id);
+ }*/
+ CDBG("%s : startRecording : mode = %d state = %d",__func__,pme->cam_mode,pme->cam_state);
+ if(pme->cam_mode == RECORDER_MODE && pme->cam_state == CAMERA_STATE_PREVIEW){
+ if(MM_CAMERA_OK != mm_app_streamon_video(cam_id)){
+ CDBG_ERROR("%s:start video err=%d\n", __func__, rc);
+ return -1;
+ }
+ }
+ CDBG("%s: END, rc=%d\n", __func__, rc);
+ return rc;
+}
+
+int stopRecording(int cam_id)
+{
+
+ int rc = MM_CAMERA_OK;
+ mm_camera_app_obj_t *pme = mm_app_get_cam_obj(cam_id);
+
+ if(pme->cam_mode != RECORDER_MODE || pme->cam_state != CAMERA_STATE_RECORD) {
+ return rc;
+ }
+ if(MM_CAMERA_OK != mm_app_stop_video(cam_id)){
+ CDBG_ERROR("%s:stop video err=%d\n", __func__, rc);
+ return -1;
+ }
+ return rc;
+}
+
diff --git a/camera/QCamera/stack/mm-jpeg-interface/Android.mk b/camera/QCamera/stack/mm-jpeg-interface/Android.mk
new file mode 100644
index 0000000..9d6fc85
--- /dev/null
+++ b/camera/QCamera/stack/mm-jpeg-interface/Android.mk
@@ -0,0 +1,31 @@
+OLD_LOCAL_PATH := $(LOCAL_PATH)
+LOCAL_PATH := $(call my-dir)
+include $(CLEAR_VARS)
+
+LOCAL_CFLAGS+= -D_ANDROID_
+LOCAL_COPY_HEADERS_TO := mm-camera-interface
+LOCAL_COPY_HEADERS = inc/mm_jpeg_interface.h
+
+LOCAL_C_INCLUDES := $(TARGET_OUT_INTERMEDIATES)/KERNEL_OBJ/usr/include
+LOCAL_C_INCLUDES += $(TARGET_OUT_INTERMEDIATES)/KERNEL_OBJ/usr/include/media
+LOCAL_ADDITIONAL_DEPENDENCIES := $(TARGET_OUT_INTERMEDIATES)/KERNEL_OBJ/usr
+
+LOCAL_C_INCLUDES += \
+ $(LOCAL_PATH)/inc \
+ $(LOCAL_PATH)/../common \
+ $(LOCAL_PATH)/../../../ \
+ $(LOCAL_PATH)/../../../inc \
+
+LOCAL_SRC_FILES := \
+ src/mm_jpeg_queue.c \
+ src/mm_jpeg.c \
+ src/mm_jpeg_interface.c
+
+LOCAL_MODULE := libmmjpeg_interface
+LOCAL_PRELINK_MODULE := false
+LOCAL_SHARED_LIBRARIES := libdl libcutils liblog
+LOCAL_MODULE_TAGS := optional
+
+include $(BUILD_SHARED_LIBRARY)
+
+LOCAL_PATH := $(OLD_LOCAL_PATH)
diff --git a/camera/QCamera/stack/mm-jpeg-interface/inc/mm_jpeg.h b/camera/QCamera/stack/mm-jpeg-interface/inc/mm_jpeg.h
new file mode 100644
index 0000000..cd0f35c
--- /dev/null
+++ b/camera/QCamera/stack/mm-jpeg-interface/inc/mm_jpeg.h
@@ -0,0 +1,166 @@
+
+/* Copyright (c) 2012, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ * * Neither the name of The Linux Foundation nor the names of its
+ * contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef MM_JPEG_H_
+#define MM_JPEG_H_
+
+#include "mm_jpeg_interface.h"
+#include "cam_list.h"
+#include "OMX_Types.h"
+#include "OMX_Index.h"
+#include "OMX_Core.h"
+#include "OMX_Component.h"
+//#include "omx_jpeg_ext.h"
+#include <semaphore.h>
+
+typedef struct {
+ struct cam_list list;
+ void* data;
+} mm_jpeg_q_node_t;
+
+typedef struct {
+ mm_jpeg_q_node_t head; /* dummy head */
+ uint32_t size;
+ pthread_mutex_t lock;
+} mm_jpeg_queue_t;
+
+typedef enum
+{
+ MM_JPEG_CMD_TYPE_JOB, /* job cmd */
+ MM_JPEG_CMD_TYPE_EXIT, /* EXIT cmd for exiting jobMgr thread */
+ MM_JPEG_CMD_TYPE_MAX
+} mm_jpeg_cmd_type_t;
+
+typedef struct {
+ OMX_BUFFERHEADERTYPE* buf_header;
+ uint32_t portIdx;
+} mm_jpeg_omx_buf_info;
+
+typedef struct {
+ uint8_t num_bufs;
+ mm_jpeg_omx_buf_info bufs[MAX_SRC_BUF_NUM];
+} mm_jpeg_omx_src_buf;
+
+typedef struct {
+ uint32_t client_hdl; /* client handler */
+ uint32_t jobId; /* job ID */
+ mm_jpeg_job job; /* job description */
+ pthread_t cb_pid; /* cb thread heandler*/
+
+ void* jpeg_obj; /* ptr to mm_jpeg_obj */
+ jpeg_job_status_t job_status; /* job status */
+ uint8_t thumbnail_dropped; /* flag indicating if thumbnail is dropped */
+ int32_t jpeg_size; /* the size of jpeg output after job is done */
+
+ mm_jpeg_omx_src_buf src_bufs[JPEG_SRC_IMAGE_TYPE_MAX];
+ mm_jpeg_omx_buf_info sink_buf;
+} mm_jpeg_job_entry;
+
+typedef struct {
+ mm_jpeg_cmd_type_t type;
+ union {
+ mm_jpeg_job_entry entry;
+ };
+} mm_jpeg_job_q_node_t;
+
+typedef struct {
+ uint8_t is_used; /* flag: if is a valid client */
+ uint32_t client_handle; /* client handle */
+} mm_jpeg_client_t;
+
+typedef struct {
+ pthread_t pid; /* job cmd thread ID */
+ sem_t job_sem; /* semaphore for job cmd thread */
+ mm_jpeg_queue_t job_queue; /* queue for job to do */
+} mm_jpeg_job_cmd_thread_t;
+
+typedef enum {
+ MM_JPEG_EVENT_MASK_JPEG_DONE = 0x00000001, /* jpeg job is done */
+ MM_JPEG_EVENT_MASK_JPEG_ABORT = 0x00000002, /* jpeg job is aborted */
+ MM_JPEG_EVENT_MASK_JPEG_ERROR = 0x00000004, /* jpeg job has error */
+ MM_JPEG_EVENT_MASK_CMD_COMPLETE = 0x00000100 /* omx cmd complete evt */
+} mm_jpeg_event_mask_t;
+
+typedef struct {
+ uint32_t evt;
+ int omx_value1; /* only valid when evt_mask == MM_JPEG_EVENT_MASK_CMD_COMPLETE */
+ int omx_value2; /* only valid when evt_mask == MM_JPEG_EVENT_MASK_CMD_COMPLETE */
+} mm_jpeg_evt_t;
+
+#define MAX_JPEG_CLIENT_NUM 8
+typedef struct mm_jpeg_obj_t {
+ /* ClientMgr */
+ int num_clients; /* num of clients */
+ mm_jpeg_client_t clnt_mgr[MAX_JPEG_CLIENT_NUM]; /* client manager */
+
+ /* JobMkr */
+ pthread_mutex_t job_lock; /* job lock */
+ mm_jpeg_job_cmd_thread_t job_mgr; /* job mgr thread including todo_q*/
+ mm_jpeg_queue_t ongoing_job_q; /* queue for ongoing jobs */
+
+ /* Notifier */
+ mm_jpeg_queue_t cb_q; /* queue for CB threads */
+
+ /* OMX related */
+ OMX_HANDLETYPE omx_handle; /* handle to omx engine */
+
+ pthread_mutex_t omx_evt_lock;
+ pthread_cond_t omx_evt_cond;
+ mm_jpeg_evt_t omx_evt_rcvd;
+} mm_jpeg_obj;
+
+extern int32_t mm_jpeg_init(mm_jpeg_obj *my_obj);
+extern int32_t mm_jpeg_deinit(mm_jpeg_obj *my_obj);
+extern uint32_t mm_jpeg_new_client(mm_jpeg_obj *my_obj);
+extern int32_t mm_jpeg_start_job(mm_jpeg_obj *my_obj,
+ uint32_t client_hdl,
+ mm_jpeg_job* job,
+ uint32_t* jobId);
+extern int32_t mm_jpeg_abort_job(mm_jpeg_obj *my_obj,
+ uint32_t client_hdl,
+ uint32_t jobId);
+extern int32_t mm_jpeg_close(mm_jpeg_obj *my_obj,
+ uint32_t client_hdl);
+
+/* utiltity fucntion declared in mm-camera-inteface2.c
+ * and need be used by mm-camera and below*/
+uint32_t mm_jpeg_util_generate_handler(uint8_t index);
+uint8_t mm_jpeg_util_get_index_by_handler(uint32_t handler);
+
+/* basic queue functions */
+extern int32_t mm_jpeg_queue_init(mm_jpeg_queue_t* queue);
+extern int32_t mm_jpeg_queue_enq(mm_jpeg_queue_t* queue, void* node);
+extern void* mm_jpeg_queue_deq(mm_jpeg_queue_t* queue);
+extern int32_t mm_jpeg_queue_deinit(mm_jpeg_queue_t* queue);
+extern int32_t mm_jpeg_queue_flush(mm_jpeg_queue_t* queue);
+extern uint32_t mm_jpeg_queue_get_size(mm_jpeg_queue_t* queue);
+
+#endif /* MM_JPEG_H_ */
+
+
diff --git a/camera/QCamera/stack/mm-jpeg-interface/inc/mm_jpeg_dbg.h b/camera/QCamera/stack/mm-jpeg-interface/inc/mm_jpeg_dbg.h
new file mode 100644
index 0000000..971d1b8
--- /dev/null
+++ b/camera/QCamera/stack/mm-jpeg-interface/inc/mm_jpeg_dbg.h
@@ -0,0 +1,70 @@
+/*
+Copyright (c) 2012, The Linux Foundation. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following
+ disclaimer in the documentation and/or other materials provided
+ with the distribution.
+ * Neither the name of The Linux Foundation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+
+#ifndef __MM_JPEG_DBG_H__
+#define __MM_JPEG_DBG_H__
+
+#define LOG_DEBUG 1
+
+#ifndef LOG_DEBUG
+ #ifdef _ANDROID_
+ #undef LOG_NIDEBUG
+ #undef LOG_TAG
+ #define LOG_NIDEBUG 0
+ #define LOG_TAG "mm-jpeg-intf"
+ #include <utils/Log.h>
+ #else
+ #include <stdio.h>
+ #define ALOGE CDBG
+ #endif
+ #undef CDBG
+ #define CDBG(fmt, args...) do{}while(0)
+#else
+ #ifdef _ANDROID_
+ #undef LOG_NIDEBUG
+ #undef LOG_TAG
+ #define LOG_NIDEBUG 0
+ #define LOG_TAG "mm-jpeg-intf"
+ #include <utils/Log.h>
+ #define CDBG(fmt, args...) ALOGE(fmt, ##args)
+ #else
+ #include <stdio.h>
+ #define CDBG(fmt, args...) fprintf(stderr, fmt, ##args)
+ #define ALOGE(fmt, args...) fprintf(stderr, fmt, ##args)
+ #endif
+#endif
+
+#ifdef _ANDROID_
+ #define CDBG_HIGH(fmt, args...) ALOGE(fmt, ##args)
+ #define CDBG_ERROR(fmt, args...) ALOGE(fmt, ##args)
+#else
+ #define CDBG_HIGH(fmt, args...) fprintf(stderr, fmt, ##args)
+ #define CDBG_ERROR(fmt, args...) fprintf(stderr, fmt, ##args)
+#endif
+#endif /* __MM_JPEG_DBG_H__ */
diff --git a/camera/QCamera/stack/mm-jpeg-interface/inc/mm_jpeg_interface.h b/camera/QCamera/stack/mm-jpeg-interface/inc/mm_jpeg_interface.h
new file mode 100644
index 0000000..50eaaa5
--- /dev/null
+++ b/camera/QCamera/stack/mm-jpeg-interface/inc/mm_jpeg_interface.h
@@ -0,0 +1,209 @@
+/* Copyright (c) 2012, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ * * Neither the name of The Linux Foundation nor the names of its
+ * contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef MM_JPEG_INTERFACE_H_
+#define MM_JPEG_INTERFACE_H_
+#include "QCamera_Intf.h"
+
+typedef struct {
+ int width;
+ int height;
+} image_resolution;
+
+typedef enum {
+ JPEG_SRC_IMAGE_FMT_YUV,
+ JPEG_SRC_IMAGE_FMT_BITSTREAM
+} jpeg_enc_src_img_fmt_t;
+
+typedef enum {
+ JPEG_SRC_IMAGE_TYPE_MAIN,
+ JPEG_SRC_IMAGE_TYPE_THUMB,
+ JPEG_SRC_IMAGE_TYPE_MAX
+} jpeg_enc_src_img_type_t;
+
+typedef struct {
+ int32_t offset_x;
+ int32_t offset_y;
+ int32_t width;
+ int32_t height;
+} image_crop_t;
+
+typedef struct {
+ uint8_t sequence; /*for jpeg bit streams, assembling is based on sequence. sequence starts from 0*/
+ uint8_t *buf_vaddr; /*ptr to buf*/
+ int fd; /*fd of buf*/
+ uint32_t buf_size; /* total size of buf (header + image) */
+ uint32_t data_offset; /*data offset*/
+} src_bitstream_buffer_t;
+
+typedef struct {
+ uint8_t *buf_vaddr; /*ptr to buf*/
+ int fd; /*fd of buf*/
+ cam_frame_len_offset_t offset; /*alway use multi-planar, offset is used to skip the metadata header*/
+} src_image_buffer_t;
+
+typedef enum {
+ MM_JPEG_COLOR_FORMAT_YCRCBLP_H2V2,
+ MM_JPEG_COLOR_FORMAT_YCBCRLP_H2V2,
+ MM_JPEG_COLOR_FORMAT_YCRCBLP_H2V1,
+ MM_JPEG_COLOR_FORMAT_YCBCRLP_H2V1,
+ MM_JPEG_COLOR_FORMAT_YCRCBLP_H1V2,
+ MM_JPEG_COLOR_FORMAT_YCBCRLP_H1V2,
+ MM_JPEG_COLOR_FORMAT_YCRCBLP_H1V1,
+ MM_JPEG_COLOR_FORMAT_YCBCRLP_H1V1,
+ MM_JPEG_COLOR_FORMAT_RGB565,
+ MM_JPEG_COLOR_FORMAT_RGB888,
+ MM_JPEG_COLOR_FORMAT_RGBa,
+ MM_JPEG_COLOR_FORMAT_BITSTREAM,
+ MM_JPEG_COLOR_FORMAT_MAX
+} mm_jpeg_color_format;
+
+#define MAX_SRC_BUF_NUM 2
+typedef struct {
+ /* src img format: YUV, Bitstream */
+ jpeg_enc_src_img_fmt_t img_fmt;
+
+ /* num of buf in src img */
+ uint8_t num_bufs;
+
+ /* src img bufs */
+ union {
+ src_bitstream_buffer_t bit_stream[MAX_SRC_BUF_NUM];
+ src_image_buffer_t src_image[MAX_SRC_BUF_NUM];
+ };
+
+ /* src img type: main or thumbnail */
+ jpeg_enc_src_img_type_t type;
+
+ /* color format */
+ mm_jpeg_color_format color_format;
+
+ /* src img dimension */
+ image_resolution src_dim;
+
+ /* jpeg output dimension */
+ image_resolution out_dim;
+
+ /* crop information */
+ image_crop_t crop;
+
+ /* jpeg quality: range 0~100 */
+ uint32_t quality;
+} src_image_buffer_info;
+
+typedef struct {
+ uint8_t *buf_vaddr; /*ptr to buf*/
+ int fd; /*fd of buf*/
+ int buf_len;
+} out_image_buffer_info;
+
+typedef struct {
+ /* num of src imgs: e.g. main/thumbnail img
+ * if main img only: src_img_num = 1;
+ * if main+thumbnail: src_img_num = 2;
+ * No support for thumbnail only case */
+ uint8_t src_img_num;
+
+ /* index 0 is always for main image
+ * if thumbnail presented, it will be in index 1 */
+ src_image_buffer_info src_img[JPEG_SRC_IMAGE_TYPE_MAX];
+} src_image_buffer_config;
+
+typedef struct {
+ src_image_buffer_config src_imgs;
+ out_image_buffer_info sink_img;
+} jpeg_image_buffer_config;
+
+typedef struct {
+ /* config for scr images */
+ jpeg_image_buffer_config buf_info;
+
+ /* rotation informaiton */
+ int rotation;
+
+ /* num of exif entries */
+ int exif_numEntries;
+
+ /* buf to exif entries, caller needs to
+ * take care of the memory manage with insider ptr */
+ exif_tags_info_t *exif_data;
+} mm_jpeg_encode_params;
+
+typedef enum {
+ JPEG_JOB_STATUS_DONE = 0,
+ JPEG_JOB_STATUS_ERROR
+} jpeg_job_status_t;
+
+typedef void (*jpeg_encode_callback_t)(jpeg_job_status_t status,
+ uint8_t thumbnailDroppedFlag,
+ uint32_t client_hdl,
+ uint32_t jobId,
+ uint8_t* out_data,
+ uint32_t data_size,
+ void *userData);
+
+typedef struct {
+ mm_jpeg_encode_params encode_parm;
+ jpeg_encode_callback_t jpeg_cb;
+ void* userdata;
+} mm_jpeg_encode_job;
+
+typedef enum {
+ JPEG_JOB_TYPE_ENCODE,
+ //JPEG_JOB_TYPE_DECODE, /*enable decode later*/
+ JPEG_JOB_TYPE_MAX
+} mm_jpeg_job_type_t;
+
+typedef struct {
+ mm_jpeg_job_type_t job_type;
+ union {
+ mm_jpeg_encode_job encode_job;
+ };
+} mm_jpeg_job;
+
+typedef struct {
+ /* start a job -- async call
+ * the result of job (DONE/ERROR) will rcvd through CB */
+ int32_t (* start_job) (uint32_t client_hdl, mm_jpeg_job* job, uint32_t* jobId);
+
+ /* abort a job -- sync call */
+ int32_t (* abort_job) (uint32_t client_hdl, uint32_t jobId);
+
+ /* close a jpeg client -- sync call */
+ int32_t (* close) (uint32_t clientHdl);
+} mm_jpeg_ops_t;
+
+/* open a jpeg client -- sync call
+ * returns client_handle.
+ * failed if client_handle=0
+ * jpeg ops tbl will be filled in if open succeeds */
+uint32_t jpeg_open(mm_jpeg_ops_t *ops);
+
+#endif /* MM_JPEG_INTERFACE_H_ */
+
+
diff --git a/camera/QCamera/stack/mm-jpeg-interface/src/mm_jpeg.c b/camera/QCamera/stack/mm-jpeg-interface/src/mm_jpeg.c
new file mode 100644
index 0000000..41fba5a
--- /dev/null
+++ b/camera/QCamera/stack/mm-jpeg-interface/src/mm_jpeg.c
@@ -0,0 +1,1297 @@
+/*
+Copyright (c) 2012, The Linux Foundation. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following
+ disclaimer in the documentation and/or other materials provided
+ with the distribution.
+ * Neither the name of The Linux Foundation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+
+#include <pthread.h>
+#include <errno.h>
+#include <sys/ioctl.h>
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <fcntl.h>
+#include <poll.h>
+#include <semaphore.h>
+
+#include "mm_jpeg_dbg.h"
+#include "mm_jpeg_interface.h"
+#include "mm_jpeg.h"
+
+/* define max num of supported concurrent jpeg jobs by OMX engine.
+ * Current, only one per time */
+#define NUM_MAX_JPEG_CNCURRENT_JOBS 1
+
+#define INPUT_PORT_MAIN 0
+#define INPUT_PORT_THUMBNAIL 2
+#define OUTPUT_PORT 1
+
+void mm_jpeg_job_wait_for_event(mm_jpeg_obj *my_obj, uint32_t evt_mask);
+void mm_jpeg_job_wait_for_cmd_complete(mm_jpeg_obj *my_obj,
+ int cmd,
+ int status);
+OMX_ERRORTYPE mm_jpeg_etbdone(OMX_HANDLETYPE hComponent,
+ OMX_PTR pAppData,
+ OMX_BUFFERHEADERTYPE* pBuffer);
+OMX_ERRORTYPE mm_jpeg_ftbdone(OMX_HANDLETYPE hComponent,
+ OMX_PTR pAppData,
+ OMX_BUFFERHEADERTYPE* pBuffer);
+OMX_ERRORTYPE mm_jpeg_handle_omx_event(OMX_HANDLETYPE hComponent,
+ OMX_PTR pAppData,
+ OMX_EVENTTYPE eEvent,
+ OMX_U32 nData1,
+ OMX_U32 nData2,
+ OMX_PTR pEventData);
+/* special queue functions for job queue */
+int32_t mm_jpeg_queue_update_flag(mm_jpeg_queue_t* queue, uint32_t job_id, uint8_t flag);
+mm_jpeg_job_q_node_t* mm_jpeg_queue_remove_job_by_client_id(mm_jpeg_queue_t* queue, uint32_t client_hdl);
+mm_jpeg_job_q_node_t* mm_jpeg_queue_remove_job_by_job_id(mm_jpeg_queue_t* queue, uint32_t job_id);
+
+int32_t mm_jpeg_omx_load(mm_jpeg_obj* my_obj)
+{
+ int32_t rc = 0;
+ OMX_CALLBACKTYPE callbacks;
+
+ callbacks.EmptyBufferDone = mm_jpeg_etbdone;
+ callbacks.FillBufferDone = mm_jpeg_ftbdone;
+ callbacks.EventHandler = mm_jpeg_handle_omx_event;
+ rc = OMX_GetHandle(&my_obj->omx_handle,
+ "OMX.qcom.image.jpeg.encoder",
+ (void*)my_obj,
+ &callbacks);
+
+ if (0 != rc) {
+ CDBG_ERROR("%s : OMX_GetHandle failed (%d)",__func__, rc);
+ return rc;
+ }
+
+ rc = OMX_Init();
+ if (0 != rc) {
+ CDBG_ERROR("%s : OMX_Init failed (%d)",__func__, rc);
+ OMX_FreeHandle(my_obj->omx_handle);
+ }
+
+ return rc;
+}
+
+int32_t mm_jpeg_omx_unload(mm_jpeg_obj *my_obj)
+{
+ int32_t rc = 0;
+ rc = OMX_Deinit();
+ OMX_FreeHandle(my_obj->omx_handle);
+
+ return rc;
+}
+
+int32_t mm_jpeg_omx_abort_job(mm_jpeg_obj *my_obj, mm_jpeg_job_entry* job_entry)
+{
+ int32_t rc = 0;
+ uint8_t i, j;
+
+ OMX_SendCommand(my_obj->omx_handle, OMX_CommandFlush, 0, NULL);
+ mm_jpeg_job_wait_for_event(my_obj,
+ MM_JPEG_EVENT_MASK_JPEG_DONE|MM_JPEG_EVENT_MASK_JPEG_ABORT|MM_JPEG_EVENT_MASK_JPEG_ERROR);
+ CDBG("%s:waitForEvent: OMX_CommandFlush: DONE", __func__);
+
+ OMX_SendCommand(my_obj->omx_handle, OMX_CommandStateSet, OMX_StateIdle, NULL);
+ OMX_SendCommand(my_obj->omx_handle, OMX_CommandStateSet, OMX_StateLoaded, NULL);
+
+ /* free buffers used in OMX processing */
+ for (i = 0; i < JPEG_SRC_IMAGE_TYPE_MAX; i++) {
+ for (j = 0; j < job_entry->src_bufs[i].num_bufs; j++) {
+ if (NULL != job_entry->src_bufs[i].bufs[j].buf_header) {
+ OMX_FreeBuffer(my_obj->omx_handle,
+ job_entry->src_bufs[i].bufs[j].portIdx,
+ job_entry->src_bufs[i].bufs[j].buf_header);
+ }
+ }
+ }
+ OMX_FreeBuffer(my_obj->omx_handle,
+ job_entry->sink_buf.portIdx,
+ job_entry->sink_buf.buf_header);
+
+ return rc;
+}
+
+/* TODO: needs revisit after omx lib supports multi src buffers */
+int32_t mm_jpeg_omx_config_main_buffer_offset(mm_jpeg_obj* my_obj, src_image_buffer_info *src_buf)
+{
+ int32_t rc = 0;
+ uint8_t i;
+ OMX_INDEXTYPE buf_offset_idx;
+ omx_jpeg_buffer_offset buffer_offset;
+
+ for (i = 0; i < src_buf->num_bufs; i++) {
+ OMX_GetExtensionIndex(my_obj->omx_handle,
+ "omx.qcom.jpeg.exttype.buffer_offset",
+ &buf_offset_idx);
+ memset(&buffer_offset, 0, sizeof(buffer_offset));
+
+ switch (src_buf->img_fmt) {
+ case JPEG_SRC_IMAGE_FMT_YUV:
+ if (1 == src_buf->src_image[i].offset.num_planes) {
+ buffer_offset.yOffset =
+ src_buf->src_image[i].offset.sp.y_offset;
+ buffer_offset.cbcrOffset =
+ src_buf->src_image[i].offset.sp.cbcr_offset;
+ buffer_offset.totalSize =
+ src_buf->src_image[i].offset.sp.len;
+ } else {
+ buffer_offset.yOffset =
+ src_buf->src_image[i].offset.mp[0].offset;
+ buffer_offset.cbcrOffset =
+ src_buf->src_image[i].offset.mp[1].offset;
+ buffer_offset.totalSize =
+ src_buf->src_image[i].offset.frame_len;
+ }
+ CDBG("%s: idx=%d, yOffset =%d, cbcrOffset =%d, totalSize = %d\n",
+ __func__, i, buffer_offset.yOffset, buffer_offset.cbcrOffset, buffer_offset.totalSize);
+ OMX_SetParameter(my_obj->omx_handle, buf_offset_idx, &buffer_offset);
+ break;
+ case JPEG_SRC_IMAGE_FMT_BITSTREAM:
+ /* TODO: need visit here when bit stream is supported */
+ buffer_offset.yOffset =
+ src_buf->bit_stream[i].data_offset;
+ buffer_offset.totalSize =
+ src_buf->bit_stream[i].buf_size;
+ CDBG("%s: idx=%d, yOffset =%d, cbcrOffset =%d, totalSize = %d\n",
+ __func__, i, buffer_offset.yOffset, buffer_offset.cbcrOffset, buffer_offset.totalSize);
+ OMX_SetParameter(my_obj->omx_handle, buf_offset_idx, &buffer_offset);
+ break;
+ default:
+ break;
+ }
+ }
+
+ return rc;
+}
+
+/* TODO: needs revisit after omx lib supports multi src buffers */
+int32_t mm_jpeg_omx_config_port(mm_jpeg_obj* my_obj, src_image_buffer_info *src_buf, int port_idx)
+{
+ int32_t rc = 0;
+ uint8_t i;
+ OMX_PARAM_PORTDEFINITIONTYPE input_port;
+
+ for (i = 0; i < src_buf->num_bufs; i++) {
+ memset(&input_port, 0, sizeof(input_port));
+ input_port.nPortIndex = port_idx;
+ OMX_GetParameter(my_obj->omx_handle, OMX_IndexParamPortDefinition, &input_port);
+ input_port.format.image.nFrameWidth = src_buf->src_dim.width;
+ input_port.format.image.nFrameHeight =src_buf->src_dim.height;
+ input_port.format.image.nStride = src_buf->src_dim.width;
+ input_port.format.image.nSliceHeight = src_buf->src_dim.height;
+ switch (src_buf->img_fmt) {
+ case JPEG_SRC_IMAGE_FMT_YUV:
+ input_port.nBufferSize = src_buf->src_image[i].offset.frame_len;
+ break;
+ case JPEG_SRC_IMAGE_FMT_BITSTREAM:
+ input_port.nBufferSize = src_buf->bit_stream[i].buf_size;
+ break;
+ }
+
+ OMX_SetParameter(my_obj->omx_handle, OMX_IndexParamPortDefinition, &input_port);
+ }
+
+ return rc;
+}
+
+omx_jpeg_color_format map_jpeg_format(mm_jpeg_color_format color_fmt)
+{
+ switch (color_fmt) {
+ case MM_JPEG_COLOR_FORMAT_YCRCBLP_H2V2:
+ return OMX_YCRCBLP_H2V2;
+ case MM_JPEG_COLOR_FORMAT_YCBCRLP_H2V2:
+ return OMX_YCBCRLP_H2V2;
+ case MM_JPEG_COLOR_FORMAT_YCRCBLP_H2V1:
+ return OMX_YCRCBLP_H2V1;
+ case MM_JPEG_COLOR_FORMAT_YCBCRLP_H2V1:
+ return OMX_YCBCRLP_H2V1;
+ case MM_JPEG_COLOR_FORMAT_YCRCBLP_H1V2:
+ return OMX_YCRCBLP_H1V2;
+ case MM_JPEG_COLOR_FORMAT_YCBCRLP_H1V2:
+ return OMX_YCBCRLP_H1V2;
+ case MM_JPEG_COLOR_FORMAT_YCRCBLP_H1V1:
+ return OMX_YCRCBLP_H1V1;
+ case MM_JPEG_COLOR_FORMAT_YCBCRLP_H1V1:
+ return OMX_YCBCRLP_H1V1;
+ case MM_JPEG_COLOR_FORMAT_RGB565:
+ return OMX_RGB565;
+ case MM_JPEG_COLOR_FORMAT_RGB888:
+ return OMX_RGB888;
+ case MM_JPEG_COLOR_FORMAT_RGBa:
+ return OMX_RGBa;
+ case MM_JPEG_COLOR_FORMAT_BITSTREAM:
+ /* TODO: need to change to new bitstream value once omx interface changes */
+ return OMX_JPEG_BITSTREAM_H2V2;
+ default:
+ return OMX_JPEG_COLOR_FORMAT_MAX;
+ }
+}
+
+int32_t mm_jpeg_omx_config_user_preference(mm_jpeg_obj* my_obj, mm_jpeg_encode_job* job)
+{
+ int32_t rc = 0;
+ OMX_INDEXTYPE user_pref_idx;
+ omx_jpeg_user_preferences user_preferences;
+
+ memset(&user_preferences, 0, sizeof(user_preferences));
+ user_preferences.color_format =
+ map_jpeg_format(job->encode_parm.buf_info.src_imgs.src_img[JPEG_SRC_IMAGE_TYPE_MAIN].color_format);
+ if (job->encode_parm.buf_info.src_imgs.src_img_num > 1) {
+ user_preferences.thumbnail_color_format =
+ map_jpeg_format(job->encode_parm.buf_info.src_imgs.src_img[JPEG_SRC_IMAGE_TYPE_THUMB].color_format);
+ }
+ OMX_GetExtensionIndex(my_obj->omx_handle,
+ "omx.qcom.jpeg.exttype.user_preferences",
+ &user_pref_idx);
+ CDBG("%s:User Preferences: color_format %d, thumbnail_color_format = %d",
+ __func__, user_preferences.color_format, user_preferences.thumbnail_color_format);
+ OMX_SetParameter(my_obj->omx_handle, user_pref_idx, &user_preferences);
+ return rc;
+}
+
+int32_t mm_jpeg_omx_config_thumbnail(mm_jpeg_obj* my_obj, mm_jpeg_encode_job* job)
+{
+ int32_t rc = -1;
+ OMX_INDEXTYPE thumb_idx, q_idx;
+ omx_jpeg_thumbnail thumbnail;
+ omx_jpeg_thumbnail_quality quality;
+
+ src_image_buffer_info *src_buf =
+ &(job->encode_parm.buf_info.src_imgs.src_img[JPEG_SRC_IMAGE_TYPE_THUMB]);
+
+ /* config port */
+ rc = mm_jpeg_omx_config_port(my_obj, src_buf, INPUT_PORT_THUMBNAIL);
+ if (0 != rc) {
+ CDBG_ERROR("%s: config port failed", __func__);
+ return rc;
+ }
+
+ /* check crop boundary */
+ if ((src_buf->crop.width == 0) || (src_buf->crop.height == 0) ||
+ (src_buf->crop.width + src_buf->crop.offset_x > src_buf->src_dim.width) ||
+ (src_buf->crop.height + src_buf->crop.offset_y > src_buf->src_dim.height)) {
+ CDBG_ERROR("%s: invalid crop boundary (%d, %d) offset (%d, %d) out of (%d, %d)",
+ __func__,
+ src_buf->crop.width,
+ src_buf->crop.height,
+ src_buf->crop.offset_x,
+ src_buf->crop.offset_y,
+ src_buf->src_dim.width,
+ src_buf->src_dim.height);
+ return rc;
+ }
+
+ memset(&thumbnail, 0, sizeof(thumbnail));
+
+ /* thumbnail crop info */
+ thumbnail.cropWidth = CEILING2(src_buf->crop.width);
+ thumbnail.cropHeight = CEILING2(src_buf->crop.height);
+ thumbnail.left = src_buf->crop.offset_x;
+ thumbnail.top = src_buf->crop.offset_y;
+
+ /* thumbnail output dimention */
+ thumbnail.width = src_buf->out_dim.width;
+ thumbnail.height = src_buf->out_dim.height;
+
+ /* set scaling flag */
+ if (thumbnail.left > 0 || thumbnail.top > 0 ||
+ src_buf->crop.width != src_buf->out_dim.width ||
+ src_buf->crop.height != src_buf->out_dim.height) {
+ thumbnail.scaling = 1;
+ }
+
+ /* set omx thumbnail info */
+ OMX_GetExtensionIndex(my_obj->omx_handle, "omx.qcom.jpeg.exttype.thumbnail", &thumb_idx);
+ OMX_SetParameter(my_obj->omx_handle, thumb_idx, &thumbnail);
+
+ OMX_GetExtensionIndex(my_obj->omx_handle, "omx.qcom.jpeg.exttype.thumbnail_quality", &q_idx);
+ OMX_GetParameter(my_obj->omx_handle, q_idx, &quality);
+ quality.nQFactor = src_buf->quality;
+ OMX_SetParameter(my_obj->omx_handle, q_idx, &quality);
+
+ rc = 0;
+ return rc;
+}
+
+int32_t mm_jpeg_omx_config_main_crop(mm_jpeg_obj* my_obj, src_image_buffer_info *src_buf)
+{
+ int32_t rc = 0;
+ OMX_CONFIG_RECTTYPE rect_type;
+
+ /* error check first */
+ if (src_buf->crop.width + src_buf->crop.offset_x > src_buf->src_dim.width ||
+ src_buf->crop.height + src_buf->crop.offset_y > src_buf->src_dim.height) {
+ CDBG_ERROR("%s: invalid crop boundary (%d, %d) out of (%d, %d)", __func__,
+ src_buf->crop.width + src_buf->crop.offset_x,
+ src_buf->crop.height + src_buf->crop.offset_y,
+ src_buf->src_dim.width,
+ src_buf->src_dim.height);
+ return -1;
+ }
+
+ if (src_buf->crop.width && src_buf->crop.width) {
+ /* Scaler information */
+ memset(&rect_type, 0, sizeof(rect_type));
+ rect_type.nWidth = CEILING2(src_buf->crop.width);
+ rect_type.nHeight = CEILING2(src_buf->crop.width);
+ rect_type.nLeft = src_buf->crop.offset_x;
+ rect_type.nTop = src_buf->crop.offset_y;
+ rect_type.nPortIndex = OUTPUT_PORT;
+ OMX_SetConfig(my_obj->omx_handle, OMX_IndexConfigCommonInputCrop, &rect_type);
+
+ if (src_buf->out_dim.width && src_buf->out_dim.height) {
+ memset(&rect_type, 0, sizeof(rect_type));
+ rect_type.nWidth = src_buf->out_dim.width;
+ rect_type.nHeight = src_buf->out_dim.height;
+ rect_type.nPortIndex = OUTPUT_PORT;
+ OMX_SetConfig(my_obj->omx_handle, OMX_IndexConfigCommonOutputCrop, &rect_type);
+ }
+
+ } else {
+ CDBG_ERROR("%s: There is no main image scaling information", __func__);
+ rc = -1;
+ }
+
+ return rc;
+}
+
+int32_t mm_jpeg_omx_config_main(mm_jpeg_obj* my_obj, mm_jpeg_encode_job* job)
+{
+ int32_t rc = 0;
+ src_image_buffer_info *src_buf =
+ &(job->encode_parm.buf_info.src_imgs.src_img[JPEG_SRC_IMAGE_TYPE_MAIN]);
+ OMX_IMAGE_PARAM_QFACTORTYPE q_factor;
+
+ /* config port */
+ rc = mm_jpeg_omx_config_port(my_obj, src_buf, INPUT_PORT_MAIN);
+ if (0 != rc) {
+ CDBG_ERROR("%s: config port failed", __func__);
+ return rc;
+ }
+
+ /* config buffer offset */
+ rc = mm_jpeg_omx_config_main_buffer_offset(my_obj, src_buf);
+ if (0 != rc) {
+ CDBG_ERROR("%s: config buffer offset failed", __func__);
+ return rc;
+ }
+
+ /* config crop */
+ rc = mm_jpeg_omx_config_main_crop(my_obj, src_buf);
+ if (0 != rc) {
+ CDBG_ERROR("%s: config crop failed", __func__);
+ return rc;
+ }
+
+ /* set quality */
+ memset(&q_factor, 0, sizeof(q_factor));
+ q_factor.nPortIndex = INPUT_PORT_MAIN;
+ OMX_GetParameter(my_obj->omx_handle, OMX_IndexParamQFactor, &q_factor);
+ q_factor.nQFactor = src_buf->quality;
+ OMX_SetParameter(my_obj->omx_handle, OMX_IndexParamQFactor, &q_factor);
+
+ return rc;
+}
+
+int32_t mm_jpeg_omx_config_common(mm_jpeg_obj* my_obj, mm_jpeg_encode_job* job)
+{
+ int32_t rc;
+ int i;
+ OMX_INDEXTYPE exif_idx;
+ omx_jpeg_exif_info_tag tag;
+ OMX_CONFIG_ROTATIONTYPE rotate;
+
+ /* config user prefernces */
+ rc = mm_jpeg_omx_config_user_preference(my_obj, job);
+ if (0 != rc) {
+ CDBG_ERROR("%s: config user preferences failed", __func__);
+ return rc;
+ }
+
+ /* set rotation */
+ memset(&rotate, 0, sizeof(rotate));
+ rotate.nPortIndex = OUTPUT_PORT;
+ rotate.nRotation = job->encode_parm.rotation;
+ OMX_SetConfig(my_obj->omx_handle, OMX_IndexConfigCommonRotate, &rotate);
+ CDBG("%s: Set rotation to %d\n", __func__, job->encode_parm.rotation);
+
+ /* set exif tags */
+ OMX_GetExtensionIndex(my_obj->omx_handle, "omx.qcom.jpeg.exttype.exif", &exif_idx);
+ for(i = 0; i < job->encode_parm.exif_numEntries; i++) {
+ memcpy(&tag, job->encode_parm.exif_data + i, sizeof(omx_jpeg_exif_info_tag));
+ OMX_SetParameter(my_obj->omx_handle, exif_idx, &tag);
+ }
+
+ return rc;
+}
+
+int32_t mm_jpeg_omx_use_buf(mm_jpeg_obj* my_obj,
+ src_image_buffer_info *src_buf,
+ mm_jpeg_omx_src_buf* omx_src_buf,
+ int port_idx)
+{
+ int32_t rc = 0;
+ uint8_t i;
+ omx_jpeg_pmem_info pmem_info;
+
+ omx_src_buf->num_bufs = src_buf->num_bufs;
+ for (i = 0; i < src_buf->num_bufs; i++) {
+ memset(&pmem_info, 0, sizeof(pmem_info));
+ switch (src_buf->img_fmt) {
+ case JPEG_SRC_IMAGE_FMT_YUV:
+ pmem_info.fd = src_buf->src_image[i].fd;
+ pmem_info.offset = 0;
+ omx_src_buf->bufs[i].portIdx = port_idx;
+ OMX_UseBuffer(my_obj->omx_handle,
+ &omx_src_buf->bufs[i].buf_header,
+ port_idx,
+ &pmem_info,
+ src_buf->src_image[i].offset.frame_len,
+ (void *)src_buf->src_image[i].buf_vaddr);
+ break;
+ case JPEG_SRC_IMAGE_FMT_BITSTREAM:
+ pmem_info.fd = src_buf->bit_stream[i].fd;
+ pmem_info.offset = 0;
+ omx_src_buf->bufs[i].portIdx = port_idx;
+ OMX_UseBuffer(my_obj->omx_handle,
+ &omx_src_buf->bufs[i].buf_header,
+ port_idx,
+ &pmem_info,
+ src_buf->bit_stream[i].buf_size,
+ (void *)src_buf->bit_stream[i].buf_vaddr);
+ break;
+ default:
+ rc = -1;
+ break;
+ }
+ }
+
+ return rc;
+}
+
+int32_t mm_jpeg_omx_encode(mm_jpeg_obj* my_obj, mm_jpeg_job_entry* job_entry)
+{
+ int32_t rc = 0;
+ uint8_t i;
+ mm_jpeg_encode_job* job = &job_entry->job.encode_job;
+ uint8_t has_thumbnail = job->encode_parm.buf_info.src_imgs.src_img_num > 1? 1 : 0;
+ src_image_buffer_info *src_buf[JPEG_SRC_IMAGE_TYPE_MAX];
+ mm_jpeg_omx_src_buf *omx_src_buf[JPEG_SRC_IMAGE_TYPE_MAX];
+
+ /* config main img */
+ rc = mm_jpeg_omx_config_main(my_obj, job);
+ if (0 != rc) {
+ CDBG_ERROR("%s: config main img failed", __func__);
+ return rc;
+ }
+
+ /* config thumbnail */
+ rc = mm_jpeg_omx_config_thumbnail(my_obj, job);
+ if (0 != rc) {
+ CDBG_ERROR("%s: config thumbnail img failed", __func__);
+ return rc;
+ }
+
+ /* common config */
+ rc = mm_jpeg_omx_config_common(my_obj, job);
+ if (0 != rc) {
+ CDBG_ERROR("%s: config common failed", __func__);
+ return rc;
+ }
+
+ /* config input omx buffer for main input */
+ src_buf[JPEG_SRC_IMAGE_TYPE_MAIN] = &(job->encode_parm.buf_info.src_imgs.src_img[JPEG_SRC_IMAGE_TYPE_MAIN]);
+ omx_src_buf[JPEG_SRC_IMAGE_TYPE_MAIN] = &job_entry->src_bufs[JPEG_SRC_IMAGE_TYPE_MAIN];
+ rc = mm_jpeg_omx_use_buf(my_obj,
+ src_buf[JPEG_SRC_IMAGE_TYPE_MAIN],
+ omx_src_buf[JPEG_SRC_IMAGE_TYPE_MAIN],
+ INPUT_PORT_MAIN);
+ if (0 != rc) {
+ CDBG_ERROR("%s: config main input omx buffer failed", __func__);
+ return rc;
+ }
+
+ /* config input omx buffer for thumbnail input if there is thumbnail */
+ if (has_thumbnail) {
+ src_buf[JPEG_SRC_IMAGE_TYPE_THUMB] = &(job->encode_parm.buf_info.src_imgs.src_img[JPEG_SRC_IMAGE_TYPE_THUMB]);
+ omx_src_buf[JPEG_SRC_IMAGE_TYPE_THUMB] = &job_entry->src_bufs[JPEG_SRC_IMAGE_TYPE_THUMB];
+ rc = mm_jpeg_omx_use_buf(my_obj,
+ src_buf[JPEG_SRC_IMAGE_TYPE_THUMB],
+ omx_src_buf[JPEG_SRC_IMAGE_TYPE_THUMB],
+ INPUT_PORT_THUMBNAIL);
+ if (0 != rc) {
+ CDBG_ERROR("%s: config thumbnail input omx buffer failed", __func__);
+ return rc;
+ }
+ }
+
+ /* config output omx buffer */
+ job_entry->sink_buf.portIdx = OUTPUT_PORT;
+ OMX_UseBuffer(my_obj->omx_handle,
+ &job_entry->sink_buf.buf_header,
+ job_entry->sink_buf.portIdx,
+ NULL,
+ job->encode_parm.buf_info.sink_img.buf_len,
+ (void *)job->encode_parm.buf_info.sink_img.buf_vaddr);
+
+ /* wait for OMX state ready */
+ mm_jpeg_job_wait_for_cmd_complete(my_obj, OMX_CommandStateSet, OMX_StateIdle);
+ CDBG("%s: State changed to OMX_StateIdle\n", __func__);
+
+ /* start OMX encoding by sending executing cmd */
+ OMX_SendCommand(my_obj->omx_handle, OMX_CommandStateSet, OMX_StateExecuting, NULL);
+ mm_jpeg_job_wait_for_cmd_complete(my_obj, OMX_CommandStateSet, OMX_StateExecuting);
+
+ /* start input fedding and output writing */
+ for (i = 0; i < job_entry->src_bufs[JPEG_SRC_IMAGE_TYPE_MAIN].num_bufs; i++) {
+ OMX_EmptyThisBuffer(my_obj->omx_handle,
+ job_entry->src_bufs[JPEG_SRC_IMAGE_TYPE_MAIN].bufs[i].buf_header);
+ }
+ if (has_thumbnail) {
+ for (i = 0; i < job_entry->src_bufs[JPEG_SRC_IMAGE_TYPE_THUMB].num_bufs; i++) {
+ OMX_EmptyThisBuffer(my_obj->omx_handle,
+ job_entry->src_bufs[JPEG_SRC_IMAGE_TYPE_THUMB].bufs[i].buf_header);
+ }
+ }
+ OMX_FillThisBuffer(my_obj->omx_handle, job_entry->sink_buf.buf_header);
+
+ return rc;
+}
+
+static void *mm_jpeg_notify_thread(void *data)
+{
+ mm_jpeg_job_q_node_t* job_node = (mm_jpeg_job_q_node_t *)data;
+ mm_jpeg_job_entry * job_entry = &job_node->entry;
+ mm_jpeg_obj* my_obj = (mm_jpeg_obj *)job_entry->jpeg_obj;
+ void* node = NULL;
+ int32_t rc = 0;
+
+ if (NULL == my_obj) {
+ CDBG_ERROR("%s: jpeg obj is NULL", __func__);
+ return NULL;
+ }
+
+ /* Add to cb queue */
+ rc = mm_jpeg_queue_enq(&my_obj->cb_q, data);
+ if (0 != rc) {
+ CDBG_ERROR("%s: enqueue into cb_q failed", __func__);
+ return NULL;
+ }
+
+ /* call cb */
+ if (NULL != job_entry->job.encode_job.jpeg_cb) {
+ /* has callback, send CB */
+ job_entry->job.encode_job.jpeg_cb(job_entry->job_status,
+ job_entry->thumbnail_dropped,
+ job_entry->client_hdl,
+ job_entry->jobId,
+ job_entry->job.encode_job.encode_parm.buf_info.sink_img.buf_vaddr,
+ job_entry->jpeg_size,
+ job_entry->job.encode_job.userdata);
+ } else {
+ CDBG_ERROR("%s: no cb provided, no action", __func__);
+ }
+
+ /* Remove from cb queue */
+ node = mm_jpeg_queue_remove_job_by_job_id(&my_obj->cb_q, job_entry->jobId);
+ if (NULL != node) {
+ free(node);
+ }
+
+ return NULL;
+}
+
+/* process encoding job */
+int32_t mm_jpeg_process_encoding_job(mm_jpeg_obj *my_obj, mm_jpeg_job_q_node_t* job_node)
+{
+ int32_t rc = 0;
+ mm_jpeg_job_entry* job_entry = &job_node->entry;
+
+ /* call OMX_Encode */
+ rc = mm_jpeg_omx_encode(my_obj, job_entry);
+ if (0 == rc) {
+ /* sent encode cmd to OMX, queue job into ongoing queue */
+ rc = mm_jpeg_queue_enq(&my_obj->ongoing_job_q, job_node);
+ } else {
+ /* OMX encode failed, notify error through callback */
+ job_entry->job_status = JPEG_JOB_STATUS_ERROR;
+ if (NULL != job_entry->job.encode_job.jpeg_cb) {
+ /* has callback, create a thread to send CB */
+ pthread_create(&job_entry->cb_pid,
+ NULL,
+ mm_jpeg_notify_thread,
+ (void *)job_node);
+
+ } else {
+ CDBG_ERROR("%s: no cb provided, return here", __func__);
+ free(job_node);
+ }
+ }
+
+ return rc;
+}
+/* process job (encoding/decoding) */
+int32_t mm_jpeg_process_job(mm_jpeg_obj *my_obj, mm_jpeg_job_q_node_t* job_node)
+{
+ int32_t rc = 0;
+
+ switch (job_node->entry.job.job_type) {
+ case JPEG_JOB_TYPE_ENCODE:
+ rc = mm_jpeg_process_encoding_job(my_obj, job_node);
+ break;
+ default:
+ CDBG_ERROR("%s: job type not supported (%d)",
+ __func__, job_node->entry.job.job_type);
+ rc = -1;
+ break;
+ }
+
+ return rc;
+}
+
+static void *mm_jpeg_jobmgr_thread(void *data)
+{
+ int rc = 0;
+ int running = 1;
+ uint32_t num_ongoing_jobs = 0;
+ mm_jpeg_obj *my_obj = (mm_jpeg_obj*)data;
+ mm_jpeg_job_cmd_thread_t *cmd_thread =
+ (mm_jpeg_job_cmd_thread_t *)data;
+ mm_jpeg_job_q_node_t* node = NULL;
+
+ do {
+ do {
+ rc = sem_wait(&cmd_thread->job_sem);
+ if (rc != 0 && errno != EINVAL) {
+ CDBG_ERROR("%s: sem_wait error (%s)",
+ __func__, strerror(errno));
+ return NULL;
+ }
+ } while (rc != 0);
+
+ /* check ongoing q size */
+ num_ongoing_jobs = mm_jpeg_queue_get_size(&my_obj->ongoing_job_q);
+ if (num_ongoing_jobs >= NUM_MAX_JPEG_CNCURRENT_JOBS) {
+ continue;
+ }
+
+ pthread_mutex_lock(&my_obj->job_lock);
+ /* can go ahead with new work */
+ node = (mm_jpeg_job_q_node_t*)mm_jpeg_queue_deq(&cmd_thread->job_queue);
+ if (node != NULL) {
+ switch (node->type) {
+ case MM_JPEG_CMD_TYPE_JOB:
+ rc = mm_jpeg_process_job(my_obj, node);
+ if (0 != rc) {
+ /* free node in error case */
+ free(node);
+ }
+ break;
+ case MM_JPEG_CMD_TYPE_EXIT:
+ default:
+ /* free node */
+ free(node);
+ /* set running flag to false */
+ running = 0;
+ break;
+ }
+ }
+ pthread_mutex_unlock(&my_obj->job_lock);
+
+ } while (running);
+ return NULL;
+}
+
+int32_t mm_jpeg_jobmgr_thread_launch(mm_jpeg_obj * my_obj)
+{
+ int32_t rc = 0;
+ mm_jpeg_job_cmd_thread_t * job_mgr = &my_obj->job_mgr;
+
+ sem_init(&job_mgr->job_sem, 0, 0);
+ mm_jpeg_queue_init(&job_mgr->job_queue);
+
+ /* launch the thread */
+ pthread_create(&job_mgr->pid,
+ NULL,
+ mm_jpeg_jobmgr_thread,
+ (void *)my_obj);
+ return rc;
+}
+
+int32_t mm_jpeg_jobmgr_thread_release(mm_jpeg_obj * my_obj)
+{
+ int32_t rc = 0;
+ mm_jpeg_job_cmd_thread_t * cmd_thread = &my_obj->job_mgr;
+ mm_jpeg_job_q_node_t* node =
+ (mm_jpeg_job_q_node_t *)malloc(sizeof(mm_jpeg_job_q_node_t));
+ if (NULL == node) {
+ CDBG_ERROR("%s: No memory for mm_jpeg_job_q_node_t", __func__);
+ return -1;
+ }
+
+ memset(node, 0, sizeof(mm_jpeg_job_q_node_t));
+ node->type = MM_JPEG_CMD_TYPE_EXIT;
+
+ mm_jpeg_queue_enq(&cmd_thread->job_queue, node);
+ sem_post(&cmd_thread->job_sem);
+
+ /* wait until cmd thread exits */
+ if (pthread_join(cmd_thread->pid, NULL) != 0) {
+ CDBG("%s: pthread dead already\n", __func__);
+ }
+ mm_jpeg_queue_deinit(&cmd_thread->job_queue);
+
+ sem_destroy(&cmd_thread->job_sem);
+ memset(cmd_thread, 0, sizeof(mm_jpeg_job_cmd_thread_t));
+ return rc;
+}
+
+int32_t mm_jpeg_init(mm_jpeg_obj *my_obj)
+{
+ int32_t rc = 0;
+
+ /* init locks */
+ pthread_mutex_init(&my_obj->job_lock, NULL);
+ pthread_mutex_init(&my_obj->omx_evt_lock, NULL);
+ pthread_cond_init(&my_obj->omx_evt_cond, NULL);
+
+ /* init ongoing job queue */
+ rc = mm_jpeg_queue_init(&my_obj->ongoing_job_q);
+
+ /* init job semaphore and launch jobmgr thread */
+ CDBG("%s : Launch jobmgr thread",__func__);
+ rc = mm_jpeg_jobmgr_thread_launch(my_obj);
+
+ /* load OMX */
+ rc = mm_jpeg_omx_load(my_obj);
+ if (0 != rc) {
+ /* roll back in error case */
+ mm_jpeg_jobmgr_thread_release(my_obj);
+ mm_jpeg_queue_deinit(&my_obj->ongoing_job_q);
+ pthread_mutex_destroy(&my_obj->job_lock);
+ pthread_mutex_destroy(&my_obj->omx_evt_lock);
+ pthread_cond_destroy(&my_obj->omx_evt_cond);
+ }
+
+ return rc;
+}
+
+int32_t mm_jpeg_deinit(mm_jpeg_obj *my_obj)
+{
+ int32_t rc = 0;
+
+ /* unload OMX engine */
+ rc = mm_jpeg_omx_unload(my_obj);
+
+ /* release jobmgr thread */
+ rc = mm_jpeg_jobmgr_thread_release(my_obj);
+
+ /* deinit ongoing job queue */
+ rc = mm_jpeg_queue_deinit(&my_obj->ongoing_job_q);
+
+ /* destroy locks */
+ pthread_mutex_destroy(&my_obj->job_lock);
+ pthread_mutex_destroy(&my_obj->omx_evt_lock);
+ pthread_cond_destroy(&my_obj->omx_evt_cond);
+
+ return rc;
+}
+
+uint32_t mm_jpeg_new_client(mm_jpeg_obj *my_obj)
+{
+ uint32_t client_hdl = 0;
+ uint8_t idx;
+
+ if (my_obj->num_clients >= MAX_JPEG_CLIENT_NUM) {
+ CDBG_ERROR("%s: num of clients reached limit", __func__);
+ return client_hdl;
+ }
+
+ for (idx = 0; idx < MAX_JPEG_CLIENT_NUM; idx++) {
+ if (0 == my_obj->clnt_mgr[idx].is_used) {
+ break;
+ }
+ }
+
+ if (idx < MAX_JPEG_CLIENT_NUM) {
+ /* client entry avail */
+ /* generate client handler by index */
+ client_hdl = mm_jpeg_util_generate_handler(idx);
+
+ /* update client entry */
+ my_obj->clnt_mgr[idx].is_used = 1;
+ my_obj->clnt_mgr[idx].client_handle = client_hdl;
+
+ /* increse client count */
+ my_obj->num_clients++;
+ }
+
+ return client_hdl;
+}
+
+int32_t mm_jpeg_start_job(mm_jpeg_obj *my_obj,
+ uint32_t client_hdl,
+ mm_jpeg_job* job,
+ uint32_t* jobId)
+{
+ int32_t rc = -1;
+ uint8_t clnt_idx = 0;
+ uint32_t job_id = 0;
+ mm_jpeg_job_q_node_t* node = NULL;
+
+ *jobId = 0;
+
+ /* check if valid client */
+ clnt_idx = mm_jpeg_util_get_index_by_handler(client_hdl);
+ if (clnt_idx >= MAX_JPEG_CLIENT_NUM ) {
+ CDBG_ERROR("%s: invalid client with handler (%d)", __func__, client_hdl);
+ return rc;
+ }
+
+ /* generate client handler by index */
+ job_id = mm_jpeg_util_generate_handler(clnt_idx);
+
+ /* enqueue new job into todo job queue */
+ node = (mm_jpeg_job_q_node_t *)malloc(sizeof(mm_jpeg_job_q_node_t));
+ if (NULL == node) {
+ CDBG_ERROR("%s: No memory for mm_jpeg_job_q_node_t", __func__);
+ return -rc;
+ }
+
+ memset(node, 0, sizeof(mm_jpeg_job_q_node_t));
+ node->type = MM_JPEG_CMD_TYPE_JOB;
+ node->entry.client_hdl = client_hdl;
+ node->entry.jobId = job_id;
+ memcpy(&node->entry.job, job, sizeof(mm_jpeg_job));
+ node->entry.jpeg_obj = (void*)my_obj; /* save a ptr to jpeg_obj */
+
+ rc = mm_jpeg_queue_enq(&my_obj->job_mgr.job_queue, node);
+ if (0 == rc) {
+ sem_post(&my_obj->job_mgr.job_sem);
+ *jobId = job_id;
+ }
+
+ return rc;
+}
+
+int32_t mm_jpeg_abort_job(mm_jpeg_obj *my_obj,
+ uint32_t client_hdl,
+ uint32_t jobId)
+{
+ int32_t rc = -1;
+ uint8_t clnt_idx = 0;
+ void * node = NULL;
+ mm_jpeg_job_entry* job_entry = NULL;
+
+ /* check if valid client */
+ clnt_idx = mm_jpeg_util_get_index_by_handler(client_hdl);
+ if (clnt_idx >= MAX_JPEG_CLIENT_NUM ) {
+ CDBG_ERROR("%s: invalid client with handler (%d)", __func__, client_hdl);
+ return rc;
+ }
+
+ pthread_mutex_lock(&my_obj->job_lock);
+
+ /* abort job if in ongoing queue */
+ node = mm_jpeg_queue_remove_job_by_job_id(&my_obj->ongoing_job_q, jobId);
+ if (NULL != node) {
+ /* find job that is OMX ongoing, ask OMX to abort the job */
+ job_entry = &(((mm_jpeg_job_q_node_t *)node)->entry);
+ rc = mm_jpeg_omx_abort_job(my_obj, job_entry);
+ free(node);
+ goto abort_done;
+ }
+
+ /* abort job if in todo queue */
+ node = mm_jpeg_queue_remove_job_by_job_id(&my_obj->job_mgr.job_queue, jobId);
+ if (NULL != node) {
+ /* simply delete it */
+ free(node);
+ goto abort_done;
+ }
+
+ /* abort job if in cb queue */
+ node = mm_jpeg_queue_remove_job_by_job_id(&my_obj->cb_q, jobId);
+ if (NULL != node) {
+ /* join cb thread */
+ job_entry = &(((mm_jpeg_job_q_node_t *)node)->entry);
+ if (pthread_join(job_entry->cb_pid, NULL) != 0) {
+ CDBG("%s: pthread dead already\n", __func__);
+ }
+ free(node);
+ }
+
+abort_done:
+ pthread_mutex_unlock(&my_obj->job_lock);
+
+ /* wake up jobMgr thread to work on new job if there is any */
+ sem_post(&my_obj->job_mgr.job_sem);
+
+ return rc;
+}
+
+int32_t mm_jpeg_close(mm_jpeg_obj *my_obj, uint32_t client_hdl)
+{
+ int32_t rc = -1;
+ uint8_t clnt_idx = 0;
+ void* node = NULL;
+ mm_jpeg_job_entry* job_entry = NULL;
+
+ /* check if valid client */
+ clnt_idx = mm_jpeg_util_get_index_by_handler(client_hdl);
+ if (clnt_idx >= MAX_JPEG_CLIENT_NUM ) {
+ CDBG_ERROR("%s: invalid client with handler (%d)", __func__, client_hdl);
+ return rc;
+ }
+
+ /* abort all jobs from the client */
+ pthread_mutex_lock(&my_obj->job_lock);
+
+ /* abort job if in ongoing queue */
+ node = mm_jpeg_queue_remove_job_by_client_id(&my_obj->ongoing_job_q, client_hdl);
+ while (NULL != node) {
+ /* find job that is OMX ongoing, ask OMX to abort the job */
+ job_entry = &(((mm_jpeg_job_q_node_t *)node)->entry);
+ rc = mm_jpeg_omx_abort_job(my_obj, job_entry);
+ free(node);
+
+ /* find next job from ongoing queue that belongs to this client */
+ node = mm_jpeg_queue_remove_job_by_client_id(&my_obj->ongoing_job_q, client_hdl);
+ }
+
+ /* abort job if in todo queue */
+ node = mm_jpeg_queue_remove_job_by_client_id(&my_obj->job_mgr.job_queue, client_hdl);
+ while (NULL != node) {
+ /* simply delete the job if in todo queue */
+ free(node);
+
+ /* find next job from todo queue that belongs to this client */
+ node = mm_jpeg_queue_remove_job_by_client_id(&my_obj->job_mgr.job_queue, client_hdl);
+ }
+
+ /* abort job if in cb queue */
+ node = mm_jpeg_queue_remove_job_by_client_id(&my_obj->cb_q, client_hdl);
+ while (NULL != node) {
+ /* join cb thread */
+ job_entry = &(((mm_jpeg_job_q_node_t *)node)->entry);
+ if (pthread_join(job_entry->cb_pid, NULL) != 0) {
+ CDBG("%s: pthread dead already\n", __func__);
+ }
+ free(node);
+
+ /* find next job from cb queue that belongs to this client */
+ node = mm_jpeg_queue_remove_job_by_client_id(&my_obj->cb_q, client_hdl);
+ }
+
+ pthread_mutex_unlock(&my_obj->job_lock);
+
+ /* invalidate client entry */
+ memset(&my_obj->clnt_mgr[clnt_idx], 0, sizeof(mm_jpeg_client_t));
+
+ return rc;
+}
+
+void mm_jpeg_job_wait_for_event(mm_jpeg_obj *my_obj, uint32_t evt_mask)
+{
+ pthread_mutex_lock(&my_obj->omx_evt_lock);
+ while (!(my_obj->omx_evt_rcvd.evt & evt_mask)) {
+ pthread_cond_wait(&my_obj->omx_evt_cond, &my_obj->omx_evt_lock);
+ }
+ CDBG("%s:done", __func__);
+ pthread_mutex_unlock(&my_obj->omx_evt_lock);
+}
+
+void mm_jpeg_job_wait_for_cmd_complete(mm_jpeg_obj *my_obj,
+ int cmd,
+ int status)
+{
+ pthread_mutex_lock(&my_obj->omx_evt_lock);
+ while (!((my_obj->omx_evt_rcvd.evt & MM_JPEG_EVENT_MASK_CMD_COMPLETE) &&
+ (my_obj->omx_evt_rcvd.omx_value1 == cmd) &&
+ (my_obj->omx_evt_rcvd.omx_value2 == status))) {
+ pthread_cond_wait(&my_obj->omx_evt_cond, &my_obj->omx_evt_lock);
+ }
+ CDBG("%s:done", __func__);
+ pthread_mutex_unlock(&my_obj->omx_evt_lock);
+}
+
+OMX_ERRORTYPE mm_jpeg_etbdone(OMX_HANDLETYPE hComponent,
+ OMX_PTR pAppData,
+ OMX_BUFFERHEADERTYPE* pBuffer)
+{
+ /* no process needed for etbdone, return here */
+ return 0;
+}
+
+OMX_ERRORTYPE mm_jpeg_ftbdone(OMX_HANDLETYPE hComponent,
+ OMX_PTR pAppData,
+ OMX_BUFFERHEADERTYPE* pBuffer)
+{
+ int rc = 0;
+ void* node = NULL;
+ mm_jpeg_job_entry* job_entry = NULL;
+ mm_jpeg_obj * my_obj = (mm_jpeg_obj*)pAppData;
+
+ if (NULL != my_obj) {
+ CDBG_ERROR("%s: pAppData is NULL, return here", __func__);
+ return rc;
+ }
+
+ /* signal JPEG_DONE event */
+ pthread_mutex_lock(&my_obj->omx_evt_lock);
+ my_obj->omx_evt_rcvd.evt = MM_JPEG_EVENT_MASK_JPEG_DONE;
+ pthread_cond_signal(&my_obj->omx_evt_cond);
+ pthread_mutex_unlock(&my_obj->omx_evt_lock);
+
+ /* If OMX can support multi encoding, it should provide a way to pass jobID.
+ * then we can find job by jobID from ongoing queue.
+ * For now, since OMX only support one job per time, we simply dequeue it. */
+ pthread_mutex_lock(&my_obj->job_lock);
+ node = mm_jpeg_queue_deq(&my_obj->ongoing_job_q);
+ if (NULL != node) {
+ job_entry = &(((mm_jpeg_job_q_node_t *)node)->entry);;
+ /* find job that is OMX ongoing */
+ job_entry->jpeg_size = pBuffer->nFilledLen;
+ job_entry->job_status = JPEG_JOB_STATUS_DONE;
+ CDBG("%s:filled len = %u, status = %d",
+ __func__, job_entry->jpeg_size, job_entry->job_status);
+
+ if (NULL != job_entry->job.encode_job.jpeg_cb) {
+ /* has callback, create a thread to send CB */
+ pthread_create(&job_entry->cb_pid,
+ NULL,
+ mm_jpeg_notify_thread,
+ node);
+
+ } else {
+ CDBG_ERROR("%s: no cb provided, return here", __func__);
+ free(node);
+ }
+ }
+ pthread_mutex_unlock(&my_obj->job_lock);
+
+ /* Wake up jobMgr thread to work on next job if there is any */
+ sem_post(&my_obj->job_mgr.job_sem);
+
+ return rc;
+}
+
+OMX_ERRORTYPE mm_jpeg_handle_omx_event(OMX_HANDLETYPE hComponent,
+ OMX_PTR pAppData,
+ OMX_EVENTTYPE eEvent,
+ OMX_U32 nData1,
+ OMX_U32 nData2,
+ OMX_PTR pEventData)
+{
+ int rc = 0;
+ void* node = NULL;
+ mm_jpeg_job_entry* job_entry = NULL;
+ mm_jpeg_obj * my_obj = (mm_jpeg_obj*)pAppData;
+ uint32_t jobId = 0;
+
+ if (NULL != my_obj) {
+ CDBG_ERROR("%s: pAppData is NULL, return here", __func__);
+ return rc;
+ }
+
+ /* signal event */
+ switch (eEvent) {
+ case OMX_EVENT_JPEG_ABORT:
+ {
+ /* signal error evt */
+ pthread_mutex_lock(&my_obj->omx_evt_lock);
+ my_obj->omx_evt_rcvd.evt = MM_JPEG_EVENT_MASK_JPEG_ABORT;
+ pthread_cond_signal(&my_obj->omx_evt_cond);
+ pthread_mutex_unlock(&my_obj->omx_evt_lock);
+ }
+ break;
+ case OMX_EventError:
+ {
+ switch (nData1) {
+ case OMX_EVENT_THUMBNAIL_DROPPED:
+ {
+ uint8_t thumbnail_dropped_flag = 1;
+ mm_jpeg_queue_update_flag(&my_obj->ongoing_job_q,
+ jobId,
+ thumbnail_dropped_flag);
+ }
+ break;
+ case OMX_EVENT_JPEG_ERROR:
+ {
+ /* signal error evt */
+ pthread_mutex_lock(&my_obj->omx_evt_lock);
+ my_obj->omx_evt_rcvd.evt = MM_JPEG_EVENT_MASK_JPEG_ERROR;
+ pthread_cond_signal(&my_obj->omx_evt_cond);
+ pthread_mutex_unlock(&my_obj->omx_evt_lock);
+
+ /* send CB for error case */
+ /* If OMX can support multi encoding, it should provide a way to pass jobID.
+ * then we can find job by jobID from ongoing queue.
+ * For now, since OMX only support one job per time, we simply dequeue it. */
+ pthread_mutex_lock(&my_obj->job_lock);
+ node = mm_jpeg_queue_deq(&my_obj->ongoing_job_q);
+ if (NULL != node) {
+ job_entry = &(((mm_jpeg_job_q_node_t *)node)->entry);;
+
+ /* find job that is OMX ongoing */
+ job_entry->job_status = JPEG_JOB_STATUS_ERROR;
+ if (NULL != job_entry->job.encode_job.jpeg_cb) {
+ /* has callback, create a thread to send CB */
+ pthread_create(&job_entry->cb_pid,
+ NULL,
+ mm_jpeg_notify_thread,
+ node);
+
+ } else {
+ CDBG_ERROR("%s: no cb provided, return here", __func__);
+ free(node);
+ }
+ }
+ pthread_mutex_unlock(&my_obj->job_lock);
+
+ /* Wake up jobMgr thread to work on next job if there is any */
+ sem_post(&my_obj->job_mgr.job_sem);
+ }
+ break;
+ default:
+ break;
+ }
+ }
+ break;
+ case OMX_EventCmdComplete:
+ {
+ /* signal cmd complete evt */
+ pthread_mutex_lock(&my_obj->omx_evt_lock);
+ my_obj->omx_evt_rcvd.evt = MM_JPEG_EVENT_MASK_CMD_COMPLETE;
+ my_obj->omx_evt_rcvd.omx_value1 = nData1;
+ my_obj->omx_evt_rcvd.omx_value2 = nData2;
+ pthread_cond_signal(&my_obj->omx_evt_cond);
+ pthread_mutex_unlock(&my_obj->omx_evt_lock);
+ }
+ break;
+ default:
+ break;
+ }
+
+ return rc;
+}
+
+/* remove the first job from the queue with matching client handle */
+mm_jpeg_job_q_node_t* mm_jpeg_queue_remove_job_by_client_id(mm_jpeg_queue_t* queue, uint32_t client_hdl)
+{
+ mm_jpeg_q_node_t* node = NULL;
+ mm_jpeg_job_q_node_t* data = NULL;
+ mm_jpeg_job_q_node_t* job_node = NULL;
+ struct cam_list *head = NULL;
+ struct cam_list *pos = NULL;
+
+ pthread_mutex_lock(&queue->lock);
+ head = &queue->head.list;
+ pos = head->next;
+ while(pos != head) {
+ node = member_of(pos, mm_jpeg_q_node_t, list);
+ data = (mm_jpeg_job_q_node_t *)node->data;
+
+ if (data && (data->entry.client_hdl == client_hdl)) {
+ job_node = data;
+ cam_list_del_node(&node->list);
+ queue->size--;
+ break;
+ }
+ pos = pos->next;
+ }
+
+ pthread_mutex_unlock(&queue->lock);
+
+ return job_node;
+}
+
+/* remove job from the queue with matching job id */
+mm_jpeg_job_q_node_t* mm_jpeg_queue_remove_job_by_job_id(mm_jpeg_queue_t* queue, uint32_t job_id)
+{
+ mm_jpeg_q_node_t* node = NULL;
+ mm_jpeg_job_q_node_t* data = NULL;
+ mm_jpeg_job_q_node_t* job_node = NULL;
+ struct cam_list *head = NULL;
+ struct cam_list *pos = NULL;
+
+ pthread_mutex_lock(&queue->lock);
+ head = &queue->head.list;
+ pos = head->next;
+ while(pos != head) {
+ node = member_of(pos, mm_jpeg_q_node_t, list);
+ data = (mm_jpeg_job_q_node_t *)node->data;
+
+ if (data && (data->entry.jobId == job_id)) {
+ job_node = data;
+ cam_list_del_node(&node->list);
+ queue->size--;
+ break;
+ }
+ pos = pos->next;
+ }
+
+ pthread_mutex_unlock(&queue->lock);
+
+ return job_node;
+}
+
+/* update thumbnail dropped flag in job queue */
+int32_t mm_jpeg_queue_update_flag(mm_jpeg_queue_t* queue, uint32_t job_id, uint8_t flag)
+{
+ int32_t rc = 0;
+ mm_jpeg_q_node_t* node = NULL;
+ mm_jpeg_job_q_node_t* data = NULL;
+ mm_jpeg_job_q_node_t* job_node = NULL;
+ struct cam_list *head = NULL;
+ struct cam_list *pos = NULL;
+
+ pthread_mutex_lock(&queue->lock);
+ head = &queue->head.list;
+ pos = head->next;
+ while(pos != head) {
+ node = member_of(pos, mm_jpeg_q_node_t, list);
+ data = (mm_jpeg_job_q_node_t *)node->data;
+
+ if (data && (data->entry.jobId == job_id)) {
+ job_node = data;
+ break;
+ }
+ pos = pos->next;
+ }
+
+ if (job_node) {
+ /* find matching job for its job id */
+ job_node->entry.thumbnail_dropped = flag;
+ } else {
+ CDBG_ERROR("%s: No entry for jobId = %d", __func__, job_id);
+ rc = -1;
+ }
+ pthread_mutex_unlock(&queue->lock);
+
+ return rc;
+}
diff --git a/camera/QCamera/stack/mm-jpeg-interface/src/mm_jpeg_interface.c b/camera/QCamera/stack/mm-jpeg-interface/src/mm_jpeg_interface.c
new file mode 100644
index 0000000..ccf43e7
--- /dev/null
+++ b/camera/QCamera/stack/mm-jpeg-interface/src/mm_jpeg_interface.c
@@ -0,0 +1,202 @@
+/*
+Copyright (c) 2012, The Linux Foundation. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following
+ disclaimer in the documentation and/or other materials provided
+ with the distribution.
+ * Neither the name of The Linux Foundation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+
+#include <pthread.h>
+#include <errno.h>
+#include <sys/ioctl.h>
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <fcntl.h>
+#include <semaphore.h>
+
+#include "mm_jpeg_dbg.h"
+#include "mm_jpeg_interface.h"
+#include "mm_jpeg.h"
+
+static pthread_mutex_t g_intf_lock = PTHREAD_MUTEX_INITIALIZER;
+static mm_jpeg_obj* g_jpeg_obj = NULL;
+
+static pthread_mutex_t g_handler_lock = PTHREAD_MUTEX_INITIALIZER;
+static uint16_t g_handler_history_count = 0; /* history count for handler */
+
+/* utility function to generate handler */
+uint32_t mm_jpeg_util_generate_handler(uint8_t index)
+{
+ uint32_t handler = 0;
+ pthread_mutex_lock(&g_handler_lock);
+ g_handler_history_count++;
+ if (0 == g_handler_history_count) {
+ g_handler_history_count++;
+ }
+ handler = g_handler_history_count;
+ handler = (handler<<8) | index;
+ pthread_mutex_unlock(&g_handler_lock);
+ return handler;
+}
+
+uint8_t mm_jpeg_util_get_index_by_handler(uint32_t handler)
+{
+ return (handler&0x000000ff);
+}
+
+static int32_t mm_jpeg_intf_start_job(uint32_t client_hdl, mm_jpeg_job* job, uint32_t* jobId)
+{
+ int32_t rc = -1;
+
+ if (0 == client_hdl ||
+ NULL == job ||
+ NULL == jobId) {
+ CDBG_ERROR("%s: invalid parameters for client_hdl, job or jobId", __func__);
+ return rc;
+ }
+
+ pthread_mutex_lock(&g_intf_lock);
+ if (NULL == g_jpeg_obj) {
+ /* mm_jpeg obj not exists, return error */
+ CDBG_ERROR("%s: mm_jpeg is not opened yet", __func__);
+ pthread_mutex_unlock(&g_intf_lock);
+ return rc;
+ }
+
+ rc = mm_jpeg_start_job(g_jpeg_obj, client_hdl, job, jobId);
+ pthread_mutex_unlock(&g_intf_lock);
+ return rc;
+}
+
+static int32_t mm_jpeg_intf_abort_job(uint32_t client_hdl, uint32_t jobId)
+{
+ int32_t rc = -1;
+
+ if (0 == client_hdl || 0 == jobId) {
+ CDBG_ERROR("%s: invalid client_hdl or jobId", __func__);
+ return rc;
+ }
+
+ pthread_mutex_lock(&g_intf_lock);
+ if (NULL == g_jpeg_obj) {
+ /* mm_jpeg obj not exists, return error */
+ CDBG_ERROR("%s: mm_jpeg is not opened yet", __func__);
+ pthread_mutex_unlock(&g_intf_lock);
+ return rc;
+ }
+
+ rc = mm_jpeg_abort_job(g_jpeg_obj, client_hdl, jobId);
+ pthread_mutex_unlock(&g_intf_lock);
+ return rc;
+}
+
+static int32_t mm_jpeg_intf_close(uint32_t client_hdl)
+{
+ int32_t rc = -1;
+
+ if (0 == client_hdl) {
+ CDBG_ERROR("%s: invalid client_hdl", __func__);
+ return rc;
+ }
+
+ pthread_mutex_lock(&g_intf_lock);
+ if (NULL == g_jpeg_obj) {
+ /* mm_jpeg obj not exists, return error */
+ CDBG_ERROR("%s: mm_jpeg is not opened yet", __func__);
+ pthread_mutex_unlock(&g_intf_lock);
+ return rc;
+ }
+
+ rc = mm_jpeg_close(g_jpeg_obj, client_hdl);
+
+ if(0 == rc) {
+ if (0 == g_jpeg_obj->num_clients) {
+ /* No client, close jpeg internally */
+ rc = mm_jpeg_deinit(g_jpeg_obj);
+ free(g_jpeg_obj);
+ g_jpeg_obj = NULL;
+ }
+ }
+
+ pthread_mutex_unlock(&g_intf_lock);
+ return rc;
+}
+
+/* open jpeg client */
+uint32_t jpeg_open(mm_jpeg_ops_t *ops)
+{
+ int32_t rc = 0;
+ uint32_t clnt_hdl = 0;
+ mm_jpeg_obj* jpeg_obj = NULL;
+
+ pthread_mutex_lock(&g_intf_lock);
+ /* first time open */
+ if(NULL == g_jpeg_obj) {
+ jpeg_obj = (mm_jpeg_obj *)malloc(sizeof(mm_jpeg_obj));
+ if(NULL == jpeg_obj) {
+ CDBG_ERROR("%s: no mem", __func__);
+ pthread_mutex_unlock(&g_intf_lock);
+ return clnt_hdl;
+ }
+
+ /* initialize jpeg obj */
+ memset(jpeg_obj, 0, sizeof(mm_jpeg_obj));
+ rc = mm_jpeg_init(jpeg_obj);
+ if(0 != rc) {
+ CDBG_ERROR("%s: mm_jpeg_init err = %d", __func__, rc);
+ free(jpeg_obj);
+ pthread_mutex_unlock(&g_intf_lock);
+ return clnt_hdl;
+ }
+
+ /* remember in global variable */
+ g_jpeg_obj = jpeg_obj;
+ }
+
+ /* open new client */
+ clnt_hdl = mm_jpeg_new_client(g_jpeg_obj);
+ if (clnt_hdl > 0) {
+ /* valid client */
+ if (NULL != ops) {
+ /* fill in ops tbl if ptr not NULL */
+ ops->start_job = mm_jpeg_intf_start_job;
+ ops->abort_job = mm_jpeg_intf_abort_job;
+ //ops->abort_job_all = mm_jpeg_intf_close,
+ ops->close = mm_jpeg_intf_close;
+ }
+ } else {
+ /* failed new client */
+ CDBG_ERROR("%s: mm_jpeg_new_client failed", __func__);
+
+ if (0 == g_jpeg_obj->num_clients) {
+ /* no client, close jpeg */
+ mm_jpeg_deinit(g_jpeg_obj);
+ free(g_jpeg_obj);
+ g_jpeg_obj = NULL;
+ }
+ }
+
+ pthread_mutex_unlock(&g_intf_lock);
+ return clnt_hdl;
+}
diff --git a/camera/QCamera/stack/mm-jpeg-interface/src/mm_jpeg_queue.c b/camera/QCamera/stack/mm-jpeg-interface/src/mm_jpeg_queue.c
new file mode 100644
index 0000000..d802843
--- /dev/null
+++ b/camera/QCamera/stack/mm-jpeg-interface/src/mm_jpeg_queue.c
@@ -0,0 +1,134 @@
+/*
+Copyright (c) 2012, The Linux Foundation. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following
+ disclaimer in the documentation and/or other materials provided
+ with the distribution.
+ * Neither the name of The Linux Foundation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+
+#include <pthread.h>
+#include "mm_jpeg_dbg.h"
+#include "mm_jpeg.h"
+
+int32_t mm_jpeg_queue_init(mm_jpeg_queue_t* queue)
+{
+ pthread_mutex_init(&queue->lock, NULL);
+ cam_list_init(&queue->head.list);
+ queue->size = 0;
+ return 0;
+}
+
+int32_t mm_jpeg_queue_enq(mm_jpeg_queue_t* queue, void* data)
+{
+ mm_jpeg_q_node_t* node =
+ (mm_jpeg_q_node_t *)malloc(sizeof(mm_jpeg_q_node_t));
+ if (NULL == node) {
+ CDBG_ERROR("%s: No memory for mm_jpeg_q_node_t", __func__);
+ return -1;
+ }
+
+ memset(node, 0, sizeof(mm_jpeg_q_node_t));
+ node->data = data;
+
+ pthread_mutex_lock(&queue->lock);
+ cam_list_add_tail_node(&node->list, &queue->head.list);
+ queue->size++;
+ pthread_mutex_unlock(&queue->lock);
+
+ return 0;
+
+}
+
+void* mm_jpeg_queue_deq(mm_jpeg_queue_t* queue)
+{
+ mm_jpeg_q_node_t* node = NULL;
+ void* data = NULL;
+ struct cam_list *head = NULL;
+ struct cam_list *pos = NULL;
+
+ pthread_mutex_lock(&queue->lock);
+ head = &queue->head.list;
+ pos = head->next;
+ if (pos != head) {
+ node = member_of(pos, mm_jpeg_q_node_t, list);
+ cam_list_del_node(&node->list);
+ queue->size--;
+ }
+ pthread_mutex_unlock(&queue->lock);
+
+ if (NULL != node) {
+ data = node->data;
+ free(node);
+ }
+
+ return data;
+}
+
+uint32_t mm_jpeg_queue_get_size(mm_jpeg_queue_t* queue)
+{
+ uint32_t size = 0;
+
+ pthread_mutex_lock(&queue->lock);
+ size = queue->size;
+ pthread_mutex_unlock(&queue->lock);
+
+ return size;
+
+}
+
+int32_t mm_jpeg_queue_deinit(mm_jpeg_queue_t* queue)
+{
+ mm_jpeg_queue_flush(queue);
+ pthread_mutex_destroy(&queue->lock);
+ return 0;
+}
+
+int32_t mm_jpeg_queue_flush(mm_jpeg_queue_t* queue)
+{
+ mm_jpeg_q_node_t* node = NULL;
+ void* data = NULL;
+ struct cam_list *head = NULL;
+ struct cam_list *pos = NULL;
+
+ pthread_mutex_lock(&queue->lock);
+ head = &queue->head.list;
+ pos = head->next;
+
+ while(pos != head) {
+ node = member_of(pos, mm_jpeg_q_node_t, list);
+ cam_list_del_node(&node->list);
+ queue->size--;
+
+ /* for now we only assume there is no ptr inside data
+ * so we free data directly */
+ if (NULL != node->data) {
+ free(node->data);
+ }
+ free(node);
+ pos = pos->next;
+ }
+ queue->size = 0;
+ pthread_mutex_unlock(&queue->lock);
+ return 0;
+}
diff --git a/camera/QCameraHAL.cpp b/camera/QCameraHAL.cpp
new file mode 100644
index 0000000..dea0457
--- /dev/null
+++ b/camera/QCameraHAL.cpp
@@ -0,0 +1,132 @@
+/*
+** Copyright (c) 2011 The Linux Foundation. All rights reserved.
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+** http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+/*#error uncomment this for compiler test!*/
+
+//#define ALOG_NDEBUG 0
+#define ALOG_NIDEBUG 0
+#define LOG_TAG "QCameraHAL"
+#include <utils/Log.h>
+#include <utils/threads.h>
+#include <fcntl.h>
+#include <sys/mman.h>
+
+/* include QCamera Hardware Interface Header*/
+#include "QCameraHAL.h"
+
+int HAL_numOfCameras = 0;
+qcamera_info_t HAL_cameraInfo[MSM_MAX_CAMERA_SENSORS];
+mm_camera_t * HAL_camerahandle[MSM_MAX_CAMERA_SENSORS];
+int HAL_currentCameraMode;
+
+namespace android {
+/* HAL function implementation goes here*/
+
+/**
+ * The functions need to be provided by the camera HAL.
+ *
+ * If getNumberOfCameras() returns N, the valid cameraId for getCameraInfo()
+ * and openCameraHardware() is 0 to N-1.
+ */
+extern "C" int HAL_getNumberOfCameras()
+{
+ /* try to query every time we get the call!*/
+ uint8_t num_camera = 0;
+ mm_camera_t * handle_base = 0;
+ ALOGV("%s: E", __func__);
+
+ handle_base= mm_camera_query(&num_camera);
+
+ if (!handle_base) {
+ HAL_numOfCameras = 0;
+ }
+ else
+ {
+ qcamera_info_t* p_camera_info = 0;
+ HAL_numOfCameras=num_camera;
+
+ ALOGI("Handle base =0x%p",handle_base);
+ ALOGI("getCameraInfo: numOfCameras = %d", HAL_numOfCameras);
+ for(int i = 0; i < HAL_numOfCameras; i++) {
+ ALOGI("Handle [%d]=0x%p",i,handle_base+i);
+ HAL_camerahandle[i]=handle_base + i;
+ p_camera_info = &(HAL_camerahandle[i]->camera_info);
+ if (p_camera_info) {
+ ALOGI("Camera sensor %d info:", i);
+ ALOGI("camera_id: %d", p_camera_info->camera_id);
+ ALOGI("modes_supported: %x", p_camera_info->modes_supported);
+ ALOGI("position: %d", p_camera_info->position);
+ ALOGI("sensor_mount_angle: %d", p_camera_info->sensor_mount_angle);
+ }
+ }
+ }
+
+ ALOGV("%s: X", __func__);
+
+ return HAL_numOfCameras;
+}
+
+extern "C" int HAL_isIn3DMode()
+{
+ return HAL_currentCameraMode == CAMERA_MODE_3D;
+}
+
+extern "C" void HAL_getCameraInfo(int cameraId, struct CameraInfo* cameraInfo)
+{
+ mm_camera_t *mm_camer_obj = 0;
+ ALOGV("%s: E", __func__);
+
+ if (!HAL_numOfCameras || HAL_numOfCameras < cameraId || !cameraInfo)
+ return;
+ else
+ mm_camer_obj = HAL_camerahandle[cameraId];
+
+ if (!mm_camer_obj)
+ return;
+ else {
+ cameraInfo->facing =
+ (FRONT_CAMERA == mm_camer_obj->camera_info.position)?
+ CAMERA_FACING_FRONT : CAMERA_FACING_BACK;
+
+ cameraInfo->orientation = mm_camer_obj->camera_info.sensor_mount_angle;
+#if 0
+ // TODO: fix me
+ /* We always supprot ZSL in our stack*/
+ cameraInfo->mode = CAMERA_SUPPORT_MODE_ZSL;
+ if (mm_camer_obj->camera_info.modes_supported & CAMERA_MODE_2D) {
+ cameraInfo->mode |= CAMERA_SUPPORT_MODE_2D;
+ }
+ if (mm_camer_obj->camera_info.modes_supported & CAMERA_MODE_3D) {
+ cameraInfo->mode |= CAMERA_SUPPORT_MODE_3D;
+ }
+#endif
+ }
+ ALOGV("%s: X", __func__);
+ return;
+}
+
+/* HAL should return NULL if it fails to open camera hardware. */
+extern "C" void * HAL_openCameraHardware(int cameraId, int mode)
+{
+ ALOGV("%s: E", __func__);
+ if (!HAL_numOfCameras || HAL_numOfCameras < cameraId ||cameraId < 0) {
+ return NULL;
+ }
+ return QCameraHAL_openCameraHardware(cameraId, mode);
+}
+
+
+}; // namespace android
diff --git a/camera/QCameraHAL.h b/camera/QCameraHAL.h
new file mode 100644
index 0000000..4450e4f
--- /dev/null
+++ b/camera/QCameraHAL.h
@@ -0,0 +1,37 @@
+/*
+** Copyright (c) 2011 The Linux Foundation. All rights reserved.
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+** http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+#ifndef ANDROID_HARDWARE_QCAMERA_HAL_H
+#define ANDROID_HARDWARE_QCAMERA_HAL_H
+
+
+#include "QCameraHWI.h"
+
+extern "C" {
+#include <mm_camera_interface2.h>
+}
+namespace android {
+
+/* HAL should return NULL if it fails to open camera hardware. */
+extern "C" void *
+ QCameraHAL_openCameraHardware(int cameraId, int mode);
+extern "C" int HAL_getNumberOfCameras();
+extern "C" void HAL_getCameraInfo(int cameraId, struct CameraInfo* cameraInfo);
+
+}; // namespace android
+
+#endif
+
diff --git a/camera/QCameraHWI.cpp b/camera/QCameraHWI.cpp
new file mode 100755
index 0000000..5b7b1ce
--- /dev/null
+++ b/camera/QCameraHWI.cpp
@@ -0,0 +1,2769 @@
+/*
+** Copyright (c) 2011-2012 The Linux Foundation. All rights reserved.
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+** http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+/*#error uncomment this for compiler test!*/
+#define ALOG_NIDEBUG 0
+
+#define LOG_TAG "QCameraHWI"
+#include <utils/Log.h>
+#include <utils/threads.h>
+#include <cutils/properties.h>
+#include <fcntl.h>
+#include <sys/mman.h>
+#include <string.h>
+#include <dlfcn.h>
+
+#include "QCameraHAL.h"
+#include "QCameraHWI.h"
+
+/* QCameraHardwareInterface class implementation goes here*/
+/* following code implement the contol logic of this class*/
+
+namespace android {
+static void HAL_event_cb(mm_camera_event_t *evt, void *user_data)
+{
+ QCameraHardwareInterface *obj = (QCameraHardwareInterface *)user_data;
+ if (obj) {
+ obj->processEvent(evt);
+ } else {
+ ALOGE("%s: NULL user_data", __func__);
+ }
+}
+
+int32_t QCameraHardwareInterface::createRecord()
+{
+ int32_t ret = MM_CAMERA_OK;
+ ALOGV("%s : BEGIN",__func__);
+
+ /*
+ * Creating Instance of record stream.
+ */
+ ALOGV("Mymode Record = %d",myMode);
+ mStreamRecord = QCameraStream_record::createInstance(mCameraId,
+ myMode);
+
+ if (!mStreamRecord) {
+ ALOGE("%s: error - can't creat record stream!", __func__);
+ return BAD_VALUE;
+ }
+
+ /* Store HAL object in record stream Object */
+ mStreamRecord->setHALCameraControl(this);
+
+ /*Init Channel */
+ ret = mStreamRecord->init();
+ if (MM_CAMERA_OK != ret){
+ ALOGE("%s: error - can't init Record channel!", __func__);
+ return BAD_VALUE;
+ }
+ ALOGV("%s : END",__func__);
+ return ret;
+}
+
+int32_t QCameraHardwareInterface::createSnapshot()
+{
+ int32_t ret = MM_CAMERA_OK;
+ ALOGV("%s : BEGIN",__func__);
+
+ /*
+ * Creating Instance of Snapshot stream.
+ */
+ ALOGV("Mymode Snap = %d",myMode);
+ mStreamSnap = QCameraStream_Snapshot::createInstance(mCameraId,
+ myMode);
+ if (!mStreamSnap) {
+ ALOGE("%s: error - can't creat snapshot stream!", __func__);
+ return BAD_VALUE;
+ }
+
+ /* Store HAL object in Snapshot stream Object */
+ mStreamSnap->setHALCameraControl(this);
+
+ /*Init Channel */
+ ret = mStreamSnap->init();
+ if (MM_CAMERA_OK != ret){
+ ALOGE("%s: error - can't init Snapshot channel!", __func__);
+ return BAD_VALUE;
+ }
+ ALOGV("%s : END",__func__);
+ return ret;
+}
+
+int32_t QCameraHardwareInterface::createPreview()
+{
+ int32_t ret = MM_CAMERA_OK;
+ ALOGV("%s : BEGIN",__func__);
+
+ ALOGV("Mymode Preview = %d",myMode);
+ mStreamDisplay = QCameraStream_preview::createInstance(mCameraId,
+ myMode);
+ if (!mStreamDisplay) {
+ ALOGE("%s: error - can't creat preview stream!", __func__);
+ return BAD_VALUE;
+ }
+
+ mStreamDisplay->setHALCameraControl(this);
+
+ /*now init all the buffers and send to steam object*/
+ ret = mStreamDisplay->init();
+ if (MM_CAMERA_OK != ret){
+ ALOGE("%s: error - can't init Preview channel!", __func__);
+ return BAD_VALUE;
+ }
+ ALOGV("%s : END",__func__);
+ return ret;
+}
+
+/* constructor */
+QCameraHardwareInterface::
+QCameraHardwareInterface(int cameraId, int mode)
+ : mCameraId(cameraId),
+ mParameters(),
+ mMsgEnabled(0),
+ mNotifyCb(0),
+ mDataCb(0),
+ mDataCbTimestamp(0),
+ mCallbackCookie(0),
+ //mPreviewHeap(0),
+ mStreamDisplay (NULL), mStreamRecord(NULL), mStreamSnap(NULL),
+ mFps(0),
+ mDebugFps(0),
+ mMaxZoom(0),
+ mCurrentZoom(0),
+ mSupportedPictureSizesCount(15),
+ mDumpFrmCnt(0), mDumpSkipCnt(0),
+ mPictureSizeCount(15),
+ mPreviewSizeCount(13),
+ mVideoSizeCount(0),
+ mAutoFocusRunning(false),
+ mNeedToUnlockCaf(false),
+ mHasAutoFocusSupport(false),
+ mInitialized(false),
+ mIs3DModeOn(0),
+ mSmoothZoomRunning(false),
+ mParamStringInitialized(false),
+ mFaceDetectOn(0),
+ mDisEnabled(0),
+ mZoomSupported(false),
+ mFullLiveshotEnabled(false),
+ mRecordingHint(false),
+ mAppRecordingHint(false),
+ mStatsOn(0), mCurrentHisto(-1), mSendData(false), mStatHeap(NULL),
+ mZslLookBackMode(0),
+ mZslLookBackValue(0),
+ mZslEmptyQueueFlag(false),
+ mPictureSizes(NULL),
+ mVideoSizes(NULL),
+ mCameraState(CAMERA_STATE_UNINITED),
+ mPostPreviewHeap(NULL),
+ mHdrMode(HDR_BRACKETING_OFF),
+ mStreamLiveSnap(NULL),
+ mExifTableNumEntries(0),
+ mDenoiseValue(0),
+ mSnapshotFormat(0),
+ mStartRecording(0),
+ mZslInterval(1),
+ mNoDisplayMode(0),
+ mBrightness(0),
+ mContrast(0),
+ mEffects(0),
+ mBestShotMode(0),
+ mHJR(0),
+ mSkinToneEnhancement(0),
+ mRotation(0),
+ mFocusMode(AF_MODE_MAX),
+ mPreviewFormat(CAMERA_YUV_420_NV21),
+ mRestartPreview(false),
+ mReleasedRecordingFrame(false),
+ mStateLiveshot(false),
+ isCameraOpen(false),
+ mPauseFramedispatch(false)
+{
+ ALOGV("QCameraHardwareInterface: E");
+ int32_t result = MM_CAMERA_E_GENERAL;
+ char value[PROPERTY_VALUE_MAX];
+
+ pthread_mutex_init(&mAsyncCmdMutex, NULL);
+ pthread_cond_init(&mAsyncCmdWait, NULL);
+ mFlashCond = false;
+
+ property_get("persist.debug.sf.showfps", value, "0");
+ mDebugFps = atoi(value);
+ mPreviewState = QCAMERA_HAL_PREVIEW_STOPPED;
+ mPreviewWindow = NULL;
+ property_get("camera.hal.fps", value, "0");
+ mFps = atoi(value);
+
+ ALOGV("Init mPreviewState = %d", mPreviewState);
+
+ property_get("persist.camera.hal.multitouchaf", value, "0");
+ mMultiTouch = atoi(value);
+
+ property_get("persist.camera.full.liveshot", value, "0");
+ mFullLiveshotEnabled = atoi(value);
+
+ property_get("persist.camera.hal.dis", value, "0");
+ mDisEnabled = atoi(value);
+
+ /* Open camera stack! */
+ result=cam_ops_open(mCameraId, MM_CAMERA_OP_MODE_NOTUSED);
+ if (result == MM_CAMERA_OK) {
+ int i;
+ mm_camera_event_type_t evt;
+ for (i = 0; i < MM_CAMERA_EVT_TYPE_MAX; i++) {
+ evt = (mm_camera_event_type_t) i;
+ if (cam_evt_is_event_supported(mCameraId, evt)){
+ cam_evt_register_event_notify(mCameraId,
+ HAL_event_cb, (void *)this, evt);
+ }
+ }
+ }
+ ALOGV("Cam open returned %d",result);
+ if(MM_CAMERA_OK != result) {
+ ALOGE("startCamera: cam_ops_open failed: id = %d", mCameraId);
+ return;
+ }
+
+ loadTables();
+ /* Setup Picture Size and Preview size tables */
+ setPictureSizeTable();
+ ALOGV("%s: Picture table size: %d", __func__, mPictureSizeCount);
+ ALOGV("%s: Picture table: ", __func__);
+ for(unsigned int i=0; i < mPictureSizeCount;i++) {
+ ALOGV(" %d %d", mPictureSizes[i].width, mPictureSizes[i].height);
+ }
+
+ setPreviewSizeTable();
+ ALOGV("%s: Preview table size: %d", __func__, mPreviewSizeCount);
+ ALOGV("%s: Preview table: ", __func__);
+ for(unsigned int i=0; i < mPreviewSizeCount;i++) {
+ ALOGV(" %d %d", mPreviewSizes[i].width, mPreviewSizes[i].height);
+ }
+
+ setVideoSizeTable();
+ ALOGV("%s: Video table size: %d", __func__, mVideoSizeCount);
+ ALOGV("%s: Video table: ", __func__);
+ for(unsigned int i=0; i < mVideoSizeCount;i++) {
+ ALOGV(" %d %d", mVideoSizes[i].width, mVideoSizes[i].height);
+ }
+
+ isCameraOpen = true;
+ /* set my mode - update myMode member variable due to difference in
+ enum definition between upper and lower layer*/
+ setMyMode(mode);
+ initDefaultParameters();
+
+ //Create Stream Objects
+ //Preview
+ result = createPreview();
+ if(result != MM_CAMERA_OK) {
+ ALOGE("%s X: Failed to create Preview Object",__func__);
+ return;
+ }
+
+ //Record
+ result = createRecord();
+ if(result != MM_CAMERA_OK) {
+ ALOGE("%s X: Failed to create Record Object",__func__);
+ return;
+ }
+
+ //Snapshot
+ result = createSnapshot();
+ if(result != MM_CAMERA_OK) {
+ ALOGE("%s X: Failed to create Record Object",__func__);
+ return;
+ }
+ mCameraState = CAMERA_STATE_READY;
+ libdnr = dlopen("libmorpho_noise_reduction.so", RTLD_NOW);
+ if (libdnr) {
+ ALOGV("Open MM camera DL libmorpho_noise_reduction loaded at %p & %p ", libdnr);
+ *(void **)&LINK_morpho_DNR_ProcessFrame = dlsym(libdnr, "LINK_mm_camera_morpho_noise_reduction");
+ }
+ else
+ ALOGE("failed to open libmorpho_noise_reduction");
+
+ ALOGV("QCameraHardwareInterface: X");
+}
+
+QCameraHardwareInterface::~QCameraHardwareInterface()
+{
+ ALOGV("~QCameraHardwareInterface: E");
+ int result;
+
+ switch(mPreviewState) {
+ case QCAMERA_HAL_PREVIEW_STOPPED:
+ break;
+ case QCAMERA_HAL_PREVIEW_START:
+ break;
+ case QCAMERA_HAL_PREVIEW_STARTED:
+ stopPreview();
+ break;
+ case QCAMERA_HAL_RECORDING_STARTED:
+ stopRecordingInternal();
+ stopPreview();
+ break;
+ case QCAMERA_HAL_TAKE_PICTURE:
+ cancelPictureInternal();
+ break;
+ default:
+ break;
+ }
+ mPreviewState = QCAMERA_HAL_PREVIEW_STOPPED;
+
+ if (isCameraOpen) {
+ freePictureTable();
+ freeVideoSizeTable();
+ if(mStatHeap != NULL) {
+ mStatHeap.clear( );
+ mStatHeap = NULL;
+ }
+ }
+ /* Join the threads, complete operations and then delete
+ the instances. */
+ cam_ops_close(mCameraId);
+ if(mStreamDisplay){
+ QCameraStream_preview::deleteInstance (mStreamDisplay);
+ mStreamDisplay = NULL;
+ }
+ if(mStreamRecord) {
+ QCameraStream_record::deleteInstance (mStreamRecord);
+ mStreamRecord = NULL;
+ }
+ if(mStreamSnap) {
+ QCameraStream_Snapshot::deleteInstance (mStreamSnap);
+ mStreamSnap = NULL;
+ }
+ if (libdnr != NULL) {
+ dlclose(libdnr);
+ libdnr = NULL;
+ ALOGV("closed libmorpho_noise_reduction.so");
+ }
+
+ if (mStreamLiveSnap){
+ QCameraStream_Snapshot::deleteInstance (mStreamLiveSnap);
+ mStreamLiveSnap = NULL;
+ }
+
+ pthread_mutex_destroy(&mAsyncCmdMutex);
+ pthread_cond_destroy(&mAsyncCmdWait);
+ isCameraOpen = false;
+
+ ALOGV("~QCameraHardwareInterface: X");
+}
+
+bool QCameraHardwareInterface::isCameraReady()
+{
+ ALOGV("isCameraReady mCameraState %d", mCameraState);
+ return (mCameraState == CAMERA_STATE_READY);
+}
+
+void QCameraHardwareInterface::release()
+{
+ ALOGV("release: E");
+ Mutex::Autolock l(&mLock);
+
+ switch(mPreviewState) {
+ case QCAMERA_HAL_PREVIEW_STOPPED:
+ break;
+ case QCAMERA_HAL_PREVIEW_START:
+ break;
+ case QCAMERA_HAL_PREVIEW_STARTED:
+ stopPreviewInternal();
+ break;
+ case QCAMERA_HAL_RECORDING_STARTED:
+ stopRecordingInternal();
+ stopPreviewInternal();
+ break;
+ case QCAMERA_HAL_TAKE_PICTURE:
+ cancelPictureInternal();
+ break;
+ default:
+ break;
+ }
+#if 0
+ if (isRecordingRunning()) {
+ stopRecordingInternal();
+ ALOGI("release: stopRecordingInternal done.");
+ }
+ if (isPreviewRunning()) {
+ stopPreview(); //stopPreviewInternal();
+ ALOGI("release: stopPreviewInternal done.");
+ }
+ if (isSnapshotRunning()) {
+ cancelPictureInternal();
+ ALOGI("release: cancelPictureInternal done.");
+ }
+ if (mCameraState == CAMERA_STATE_ERROR) {
+ //TBD: If Error occurs then tear down
+ ALOGI("release: Tear down.");
+ }
+#endif
+ mPreviewState = QCAMERA_HAL_PREVIEW_STOPPED;
+ ALOGV("release: X");
+}
+
+void QCameraHardwareInterface::setCallbacks(
+ camera_notify_callback notify_cb,
+ camera_data_callback data_cb,
+ camera_data_timestamp_callback data_cb_timestamp,
+ camera_request_memory get_memory,
+ void *user)
+{
+ ALOGV("setCallbacks: E");
+ Mutex::Autolock lock(mLock);
+ mNotifyCb = notify_cb;
+ mDataCb = data_cb;
+ mDataCbTimestamp = data_cb_timestamp;
+ mGetMemory = get_memory;
+ mCallbackCookie = user;
+ ALOGV("setCallbacks: X");
+}
+
+void QCameraHardwareInterface::enableMsgType(int32_t msgType)
+{
+ ALOGV("enableMsgType: E, msgType =0x%x", msgType);
+ Mutex::Autolock lock(mLock);
+ mMsgEnabled |= msgType;
+ ALOGV("enableMsgType: X, msgType =0x%x, mMsgEnabled=0x%x", msgType, mMsgEnabled);
+}
+
+void QCameraHardwareInterface::disableMsgType(int32_t msgType)
+{
+ ALOGV("disableMsgType: E");
+ Mutex::Autolock lock(mLock);
+ mMsgEnabled &= ~msgType;
+ ALOGV("disableMsgType: X, msgType =0x%x, mMsgEnabled=0x%x", msgType, mMsgEnabled);
+}
+
+int QCameraHardwareInterface::msgTypeEnabled(int32_t msgType)
+{
+ ALOGV("msgTypeEnabled: E");
+ Mutex::Autolock lock(mLock);
+ return (mMsgEnabled & msgType);
+ ALOGV("msgTypeEnabled: X");
+}
+#if 0
+status_t QCameraHardwareInterface::dump(int fd, const Vector<String16>& args) const
+{
+ ALOGI("dump: E");
+ const size_t SIZE = 256;
+ char buffer[SIZE];
+ String8 result;
+ AutoMutex lock(&mLock);
+ write(fd, result.string(), result.size());
+ ALOGI("dump: E");
+ return NO_ERROR;
+}
+#endif
+
+int QCameraHardwareInterface::dump(int fd)
+{
+ ALOGE("%s: not supported yet", __func__);
+ return -1;
+}
+
+status_t QCameraHardwareInterface::sendCommand(int32_t command, int32_t arg1,
+ int32_t arg2)
+{
+ ALOGV("sendCommand: E");
+ status_t rc = NO_ERROR;
+ Mutex::Autolock l(&mLock);
+
+ switch (command) {
+ case CAMERA_CMD_START_FACE_DETECTION:
+ if(supportsFaceDetection() == false){
+ ALOGE("Face detection support is not available");
+ return NO_ERROR;
+ }
+ setFaceDetection("on");
+ return runFaceDetection();
+ case CAMERA_CMD_STOP_FACE_DETECTION:
+ if(supportsFaceDetection() == false){
+ ALOGE("Face detection support is not available");
+ return NO_ERROR;
+ }
+ setFaceDetection("off");
+ return runFaceDetection();
+
+ #if 0
+ case CAMERA_CMD_HISTOGRAM_ON:
+ ALOGE("histogram set to on");
+ rc = setHistogram(1);
+ break;
+ case CAMERA_CMD_HISTOGRAM_OFF:
+ ALOGE("histogram set to off");
+ rc = setHistogram(0);
+ break;
+ case CAMERA_CMD_HISTOGRAM_SEND_DATA:
+ ALOGE("histogram send data");
+ mSendData = true;
+ rc = NO_ERROR;
+ break;
+ case CAMERA_CMD_SEND_META_DATA:
+ mMetaDataWaitLock.lock();
+ if(mFaceDetectOn == true) {
+ mSendMetaData = true;
+ }
+ mMetaDataWaitLock.unlock();
+ return NO_ERROR;
+#endif
+#if 0 /* To Do: will enable it later */
+ case CAMERA_CMD_START_SMOOTH_ZOOM :
+ ALOGV("HAL sendcmd start smooth zoom %d %d", arg1 , arg2);
+ /*TO DO: get MaxZoom from parameter*/
+ int MaxZoom = 100;
+
+ switch(mCameraState ) {
+ case CAMERA_STATE_PREVIEW:
+ case CAMERA_STATE_RECORD_CMD_SENT:
+ case CAMERA_STATE_RECORD:
+ mTargetSmoothZoom = arg1;
+ mCurrentZoom = mParameters.getInt("zoom");
+ mSmoothZoomStep = (mCurrentZoom > mTargetSmoothZoom)? -1: 1;
+ if(mCurrentZoom == mTargetSmoothZoom) {
+ ALOGV("Smoothzoom target zoom value is same as "
+ "current zoom value, return...");
+ mNotifyCallback(CAMERA_MSG_ZOOM,
+ mCurrentZoom, 1, mCallbackCookie);
+ } else if(mCurrentZoom < 0 || mCurrentZoom > MaxZoom ||
+ mTargetSmoothZoom < 0 || mTargetSmoothZoom > MaxZoom) {
+ ALOGE(" ERROR : beyond supported zoom values, break..");
+ mNotifyCallback(CAMERA_MSG_ZOOM,
+ mCurrentZoom, 0, mCallbackCookie);
+ } else {
+ mSmoothZoomRunning = true;
+ mCurrentZoom += mSmoothZoomStep;
+ if ((mSmoothZoomStep < 0 && mCurrentZoom < mTargetSmoothZoom)||
+ (mSmoothZoomStep > 0 && mCurrentZoom > mTargetSmoothZoom )) {
+ mCurrentZoom = mTargetSmoothZoom;
+ }
+ mParameters.set("zoom", mCurrentZoom);
+ setZoom(mParameters);
+ }
+ break;
+ default:
+ ALOGV(" No preview, no smoothzoom ");
+ break;
+ }
+ rc = NO_ERROR;
+ break;
+
+ case CAMERA_CMD_STOP_SMOOTH_ZOOM:
+ if(mSmoothZoomRunning) {
+ mSmoothZoomRunning = false;
+ /*To Do: send cmd to stop zooming*/
+ }
+ ALOGV("HAL sendcmd stop smooth zoom");
+ rc = NO_ERROR;
+ break;
+#endif
+ default:
+ break;
+ }
+ ALOGV("sendCommand: X");
+ return rc;
+}
+
+void QCameraHardwareInterface::setMyMode(int mode)
+{
+ ALOGV("setMyMode: E");
+ //if (mode & CAMERA_SUPPORT_MODE_3D) {
+ // myMode = CAMERA_MODE_3D;
+ //}else {
+ /* default mode is 2D */
+ myMode = CAMERA_MODE_2D;
+ //}
+
+ //if (mode & CAMERA_SUPPORT_MODE_ZSL) {
+ // myMode = (camera_mode_t)(myMode |CAMERA_ZSL_MODE);
+ //}else {
+ myMode = (camera_mode_t) (myMode | CAMERA_NONZSL_MODE);
+ //}
+ ALOGV("setMyMode: Set mode to %d (passed mode: %d)", myMode, mode);
+ ALOGV("setMyMode: X");
+}
+/* static factory function */
+QCameraHardwareInterface *QCameraHardwareInterface::createInstance(int cameraId, int mode)
+{
+ ALOGV("createInstance: E");
+ QCameraHardwareInterface *cam = new QCameraHardwareInterface(cameraId, mode);
+ if (cam ) {
+ if (cam->mCameraState != CAMERA_STATE_READY) {
+ ALOGE("createInstance: Failed");
+ delete cam;
+ cam = NULL;
+ }
+ }
+
+ if (cam) {
+ //sp<CameraHardwareInterface> hardware(cam);
+ ALOGV("createInstance: X");
+ return cam;
+ } else {
+ return NULL;
+ }
+}
+/* external plug in function */
+extern "C" void *
+QCameraHAL_openCameraHardware(int cameraId, int mode)
+{
+ ALOGV("QCameraHAL_openCameraHardware: E");
+ return (void *) QCameraHardwareInterface::createInstance(cameraId, mode);
+}
+
+bool QCameraHardwareInterface::isPreviewRunning() {
+ ALOGV("isPreviewRunning: E");
+ bool ret = false;
+ ALOGV("isPreviewRunning: camera state:%d", mCameraState);
+
+ if((mCameraState == CAMERA_STATE_PREVIEW) ||
+ (mCameraState == CAMERA_STATE_PREVIEW_START_CMD_SENT) ||
+ (mCameraState == CAMERA_STATE_RECORD) ||
+ (mCameraState == CAMERA_STATE_RECORD_START_CMD_SENT) ||
+ (mCameraState == CAMERA_STATE_ZSL) ||
+ (mCameraState == CAMERA_STATE_ZSL_START_CMD_SENT)){
+ return true;
+ }
+ ALOGV("isPreviewRunning: X");
+ return ret;
+}
+
+bool QCameraHardwareInterface::isRecordingRunning() {
+ ALOGV("isRecordingRunning: E");
+ bool ret = false;
+ if(QCAMERA_HAL_RECORDING_STARTED == mPreviewState)
+ ret = true;
+ //if((mCameraState == CAMERA_STATE_RECORD) ||
+ // (mCameraState == CAMERA_STATE_RECORD_START_CMD_SENT)) {
+ // return true;
+ //}
+ ALOGV("isRecordingRunning: X");
+ return ret;
+}
+
+bool QCameraHardwareInterface::isSnapshotRunning() {
+ ALOGV("isSnapshotRunning: E");
+ bool ret = false;
+ //if((mCameraState == CAMERA_STATE_SNAP_CMD_ACKED) ||
+ // (mCameraState == CAMERA_STATE_SNAP_START_CMD_SENT)) {
+ // return true;
+ //}
+ switch(mPreviewState) {
+ case QCAMERA_HAL_PREVIEW_STOPPED:
+ case QCAMERA_HAL_PREVIEW_START:
+ case QCAMERA_HAL_PREVIEW_STARTED:
+ case QCAMERA_HAL_RECORDING_STARTED:
+ default:
+ break;
+ case QCAMERA_HAL_TAKE_PICTURE:
+ ret = true;
+ break;
+ }
+ ALOGV("isSnapshotRunning: X");
+ return ret;
+}
+
+bool QCameraHardwareInterface::isZSLMode() {
+ return (myMode & CAMERA_ZSL_MODE);
+}
+
+int QCameraHardwareInterface::getHDRMode() {
+ return mHdrMode;
+}
+
+void QCameraHardwareInterface::debugShowPreviewFPS() const
+{
+ static int mFrameCount;
+ static int mLastFrameCount = 0;
+ static nsecs_t mLastFpsTime = 0;
+ static float mFps = 0;
+ mFrameCount++;
+ nsecs_t now = systemTime();
+ nsecs_t diff = now - mLastFpsTime;
+ if (diff > ms2ns(250)) {
+ mFps = ((mFrameCount - mLastFrameCount) * float(s2ns(1))) / diff;
+ ALOGV("Preview Frames Per Second: %.4f", mFps);
+ mLastFpsTime = now;
+ mLastFrameCount = mFrameCount;
+ }
+}
+
+void QCameraHardwareInterface::
+processPreviewChannelEvent(mm_camera_ch_event_type_t channelEvent, app_notify_cb_t *app_cb) {
+ ALOGV("processPreviewChannelEvent: E");
+ switch(channelEvent) {
+ case MM_CAMERA_CH_EVT_STREAMING_ON:
+ mCameraState =
+ isZSLMode() ? CAMERA_STATE_ZSL : CAMERA_STATE_PREVIEW;
+ break;
+ case MM_CAMERA_CH_EVT_STREAMING_OFF:
+ mCameraState = CAMERA_STATE_READY;
+ break;
+ case MM_CAMERA_CH_EVT_DATA_DELIVERY_DONE:
+ break;
+ default:
+ break;
+ }
+ ALOGV("processPreviewChannelEvent: X");
+ return;
+}
+
+void QCameraHardwareInterface::processRecordChannelEvent(
+ mm_camera_ch_event_type_t channelEvent, app_notify_cb_t *app_cb) {
+ ALOGV("processRecordChannelEvent: E");
+ switch(channelEvent) {
+ case MM_CAMERA_CH_EVT_STREAMING_ON:
+ mCameraState = CAMERA_STATE_RECORD;
+ break;
+ case MM_CAMERA_CH_EVT_STREAMING_OFF:
+ mCameraState = CAMERA_STATE_PREVIEW;
+ break;
+ case MM_CAMERA_CH_EVT_DATA_DELIVERY_DONE:
+ break;
+ default:
+ break;
+ }
+ ALOGV("processRecordChannelEvent: X");
+ return;
+}
+
+void QCameraHardwareInterface::
+processSnapshotChannelEvent(mm_camera_ch_event_type_t channelEvent, app_notify_cb_t *app_cb) {
+ ALOGV("processSnapshotChannelEvent: E evt=%d state=%d", channelEvent,
+ mCameraState);
+ switch(channelEvent) {
+ case MM_CAMERA_CH_EVT_STREAMING_ON:
+ if (!mStateLiveshot) {
+ mCameraState =
+ isZSLMode() ? CAMERA_STATE_ZSL : CAMERA_STATE_SNAP_CMD_ACKED;
+ }
+ break;
+ case MM_CAMERA_CH_EVT_STREAMING_OFF:
+ if (!mStateLiveshot) {
+ mCameraState = CAMERA_STATE_READY;
+ }
+ break;
+ case MM_CAMERA_CH_EVT_DATA_DELIVERY_DONE:
+ break;
+ case MM_CAMERA_CH_EVT_DATA_REQUEST_MORE:
+ if (isZSLMode()) {
+ /* ZSL Mode: In ZSL Burst Mode, users may request for number of
+ snapshots larger than internal size of ZSL queue. So we'll need
+ process the remaining frames as they become available.
+ In such case, we'll get this event */
+ if(NULL != mStreamSnap)
+ mStreamSnap->takePictureZSL();
+ }
+ break;
+ default:
+ break;
+ }
+ ALOGV("processSnapshotChannelEvent: X");
+ return;
+}
+
+void QCameraHardwareInterface::processChannelEvent(
+ mm_camera_ch_event_t *event, app_notify_cb_t *app_cb)
+{
+ ALOGV("processChannelEvent: E");
+ Mutex::Autolock lock(mLock);
+ switch(event->ch) {
+ case MM_CAMERA_CH_PREVIEW:
+ processPreviewChannelEvent(event->evt, app_cb);
+ break;
+ case MM_CAMERA_CH_VIDEO:
+ processRecordChannelEvent(event->evt, app_cb);
+ break;
+ case MM_CAMERA_CH_SNAPSHOT:
+ processSnapshotChannelEvent(event->evt, app_cb);
+ break;
+ default:
+ break;
+ }
+ ALOGV("processChannelEvent: X");
+ return;
+}
+
+void QCameraHardwareInterface::processCtrlEvent(mm_camera_ctrl_event_t *event, app_notify_cb_t *app_cb)
+{
+ ALOGV("processCtrlEvent: %d, E",event->evt);
+ Mutex::Autolock lock(mLock);
+ switch(event->evt)
+ {
+ case MM_CAMERA_CTRL_EVT_ZOOM_DONE:
+ zoomEvent(&event->status, app_cb);
+ break;
+ case MM_CAMERA_CTRL_EVT_AUTO_FOCUS_DONE:
+ autoFocusEvent(&event->status, app_cb);
+ break;
+ case MM_CAMERA_CTRL_EVT_AUTO_FOCUS_MOVE:
+ autoFocusMoveEvent(&event->status, app_cb);
+ break;
+ case MM_CAMERA_CTRL_EVT_PREP_SNAPSHOT:
+ break;
+ case MM_CAMERA_CTRL_EVT_WDN_DONE:
+ wdenoiseEvent(event->status, (void *)(event->cookie));
+ break;
+ case MM_CAMERA_CTRL_EVT_HDR_DONE:
+ hdrEvent(event->status, (void *)(event->cookie));
+ break;
+ case MM_CAMERA_CTRL_EVT_ERROR:
+ app_cb->notifyCb = mNotifyCb;
+ app_cb->argm_notify.msg_type = CAMERA_MSG_ERROR;
+ app_cb->argm_notify.ext1 = CAMERA_ERROR_UNKNOWN;
+ app_cb->argm_notify.cookie = mCallbackCookie;
+ break;
+ case MM_CAMERA_CTRL_EVT_SNAPSHOT_CONFIG_DONE:
+ ALOGV("%s: MM_CAMERA_CTRL_EVT_SNAPSHOT_CONFIG_DONE", __func__);
+ app_cb->notifyCb = mNotifyCb;
+ app_cb->argm_notify.msg_type = CAMERA_MSG_SHUTTER;
+ app_cb->argm_notify.ext1 = 0;
+ app_cb->argm_notify.ext2 = true;
+ app_cb->argm_notify.cookie = mCallbackCookie;
+ mShutterSoundPlayed = true;
+ default:
+ break;
+ }
+ ALOGV("processCtrlEvent: X");
+ return;
+}
+
+void QCameraHardwareInterface::processStatsEvent(
+ mm_camera_stats_event_t *event, app_notify_cb_t *app_cb)
+{
+ ALOGV("processStatsEvent: E");
+ if (!isPreviewRunning( )) {
+ ALOGE("preview is not running");
+ return;
+ }
+
+ switch (event->event_id) {
+
+ case MM_CAMERA_STATS_EVT_HISTO:
+ {
+ #if 0
+ ALOGE("HAL process Histo: mMsgEnabled=0x%x, mStatsOn=%d, mSendData=%d, mDataCb=%p ",
+ (mMsgEnabled & CAMERA_MSG_STATS_DATA), mStatsOn, mSendData, mDataCb);
+ int msgEnabled = mMsgEnabled;
+ /*get stats buffer based on index*/
+ camera_preview_histogram_info* hist_info =
+ (camera_preview_histogram_info*) mHistServer.camera_memory[event->e.stats_histo.index]->data;
+
+ if(mStatsOn == QCAMERA_PARM_ENABLE && mSendData &&
+ mDataCb && (msgEnabled & CAMERA_MSG_STATS_DATA) ) {
+ uint32_t *dest;
+ mSendData = false;
+ mCurrentHisto = (mCurrentHisto + 1) % 3;
+ // The first element of the array will contain the maximum hist value provided by driver.
+ *(uint32_t *)((unsigned int)(mStatsMapped[mCurrentHisto]->data)) = hist_info->max_value;
+ memcpy((uint32_t *)((unsigned int)mStatsMapped[mCurrentHisto]->data + sizeof(int32_t)),
+ (uint32_t *)hist_info->buffer,(sizeof(int32_t) * 256));
+
+ app_cb->dataCb = mDataCb;
+ app_cb->argm_data_cb.msg_type = CAMERA_MSG_STATS_DATA;
+ app_cb->argm_data_cb.data = mStatsMapped[mCurrentHisto];
+ app_cb->argm_data_cb.index = 0;
+ app_cb->argm_data_cb.metadata = NULL;
+ app_cb->argm_data_cb.cookie = mCallbackCookie;
+ }
+ #endif
+ break;
+
+ }
+ default:
+ break;
+ }
+ ALOGV("receiveCameraStats X");
+}
+
+void QCameraHardwareInterface::processInfoEvent(
+ mm_camera_info_event_t *event, app_notify_cb_t *app_cb) {
+ ALOGV("processInfoEvent: %d, E",event->event_id);
+ //Mutex::Autolock lock(eventLock);
+ switch(event->event_id)
+ {
+ case MM_CAMERA_INFO_EVT_ROI:
+ roiEvent(event->e.roi, app_cb);
+ break;
+ default:
+ break;
+ }
+ ALOGV("processInfoEvent: X");
+ return;
+}
+
+void QCameraHardwareInterface::processEvent(mm_camera_event_t *event)
+{
+ app_notify_cb_t app_cb;
+ ALOGV("processEvent: type :%d E",event->event_type);
+ if(mPreviewState == QCAMERA_HAL_PREVIEW_STOPPED){
+ ALOGV("Stop recording issued. Return from process Event");
+ return;
+ }
+ memset(&app_cb, 0, sizeof(app_notify_cb_t));
+ switch(event->event_type)
+ {
+ case MM_CAMERA_EVT_TYPE_CH:
+ processChannelEvent(&event->e.ch, &app_cb);
+ break;
+ case MM_CAMERA_EVT_TYPE_CTRL:
+ processCtrlEvent(&event->e.ctrl, &app_cb);
+ break;
+ case MM_CAMERA_EVT_TYPE_STATS:
+ processStatsEvent(&event->e.stats, &app_cb);
+ break;
+ case MM_CAMERA_EVT_TYPE_INFO:
+ processInfoEvent(&event->e.info, &app_cb);
+ break;
+ default:
+ break;
+ }
+ ALOGV(" App_cb Notify %p, datacb=%p", app_cb.notifyCb, app_cb.dataCb);
+ if (app_cb.notifyCb) {
+ app_cb.notifyCb(app_cb.argm_notify.msg_type,
+ app_cb.argm_notify.ext1, app_cb.argm_notify.ext2,
+ app_cb.argm_notify.cookie);
+ }
+ if (app_cb.dataCb) {
+ app_cb.dataCb(app_cb.argm_data_cb.msg_type,
+ app_cb.argm_data_cb.data, app_cb.argm_data_cb.index,
+ app_cb.argm_data_cb.metadata, app_cb.argm_data_cb.cookie);
+ }
+ ALOGV("processEvent: X");
+ return;
+}
+
+bool QCameraHardwareInterface::preview_parm_config (cam_ctrl_dimension_t* dim,
+ QCameraParameters& parm)
+{
+ ALOGV("preview_parm_config: E");
+ bool matching = true;
+ int display_width = 0; /* width of display */
+ int display_height = 0; /* height of display */
+ uint16_t video_width = 0; /* width of the video */
+ uint16_t video_height = 0; /* height of the video */
+
+ /* First check if the preview resolution is the same, if not, change it*/
+ parm.getPreviewSize(&display_width, &display_height);
+ if (display_width && display_height) {
+ matching = (display_width == dim->display_width) &&
+ (display_height == dim->display_height);
+
+ if (!matching) {
+ dim->display_width = display_width;
+ dim->display_height = display_height;
+ }
+ }
+ else
+ matching = false;
+
+ cam_format_t value = getPreviewFormat();
+
+ if(value != NOT_FOUND && value != dim->prev_format ) {
+ //Setting to Parameter requested by the Upper layer
+ dim->prev_format = value;
+ } else if (value == NOT_FOUND) {
+ //Setting to default Format.
+ dim->prev_format = CAMERA_YUV_420_NV21;
+ }
+ mPreviewFormat = dim->prev_format;
+
+ dim->prev_padding_format = getPreviewPadding( );
+
+ dim->enc_format = CAMERA_YUV_420_NV12;
+ dim->orig_video_width = mDimension.orig_video_width;
+ dim->orig_video_height = mDimension.orig_video_height;
+ dim->video_width = mDimension.video_width;
+ dim->video_height = mDimension.video_height;
+ dim->video_chroma_width = mDimension.video_width;
+ dim->video_chroma_height = mDimension.video_height;
+ /* Reset the Main image and thumbnail formats here,
+ * since they might have been changed when video size
+ * livesnapshot was taken. */
+ if (mSnapshotFormat == 1)
+ dim->main_img_format = CAMERA_YUV_422_NV61;
+ else
+ dim->main_img_format = CAMERA_YUV_420_NV21;
+ dim->thumb_format = CAMERA_YUV_420_NV21;
+ ALOGV("preview_parm_config: X");
+ return matching;
+}
+
+status_t QCameraHardwareInterface::startPreview()
+{
+ status_t retVal = NO_ERROR;
+
+ ALOGV("%s: mPreviewState =%d", __func__, mPreviewState);
+ Mutex::Autolock lock(mLock);
+
+ if(mPauseFramedispatch){
+ //In ZSL case after snapshot we need to restart
+ //if stop preview not issued.
+ stopPreviewInternal();
+ mPreviewState = QCAMERA_HAL_PREVIEW_STOPPED;
+ mPauseFramedispatch = false;
+ }
+
+ switch(mPreviewState) {
+ case QCAMERA_HAL_PREVIEW_STOPPED:
+ case QCAMERA_HAL_TAKE_PICTURE:
+ mPreviewState = QCAMERA_HAL_PREVIEW_START;
+ ALOGV("%s: HAL::startPreview begin", __func__);
+
+ if(QCAMERA_HAL_PREVIEW_START == mPreviewState &&
+ (mPreviewWindow || isNoDisplayMode())) {
+ ALOGD("%s: start preview now", __func__);
+ retVal = startPreview2();
+ if(retVal == NO_ERROR)
+ mPreviewState = QCAMERA_HAL_PREVIEW_STARTED;
+ } else {
+ ALOGV("%s: received startPreview, but preview window = null", __func__);
+ }
+ break;
+ case QCAMERA_HAL_PREVIEW_START:
+ case QCAMERA_HAL_PREVIEW_STARTED:
+ break;
+ case QCAMERA_HAL_RECORDING_STARTED:
+ ALOGV("%s: cannot start preview in recording state", __func__);
+ break;
+ default:
+ ALOGE("%s: unknow state %d received", __func__, mPreviewState);
+ retVal = UNKNOWN_ERROR;
+ break;
+ }
+ return retVal;
+}
+
+status_t QCameraHardwareInterface::startPreview2()
+{
+ ALOGV("startPreview2: E");
+ status_t ret = NO_ERROR;
+
+ cam_ctrl_dimension_t dim;
+ mm_camera_dimension_t maxDim;
+ bool initPreview = false;
+
+ mPauseFramedispatch = false;
+ if (mPreviewState == QCAMERA_HAL_PREVIEW_STARTED) { //isPreviewRunning()){
+ ALOGE("%s:Preview already started mCameraState = %d!", __func__, mCameraState);
+ ALOGV("%s: X", __func__);
+ return NO_ERROR;
+ }
+
+ const char *str = mParameters.get(QCameraParameters::KEY_SCENE_MODE);
+
+ if (mRecordingHint || mFlashCond || !strcmp(str, "hdr")) {
+ ALOGI("%s:Setting non-ZSL mode",__func__);
+ mParameters.set(QCameraParameters::KEY_CAMERA_MODE, 0);
+ myMode = (camera_mode_t)(myMode & ~CAMERA_ZSL_MODE);
+ mParameters.setPreviewFrameRateMode("frame-rate-auto");
+ setPreviewFrameRateMode(mParameters);
+ } else {
+ ALOGI("%s:Setting ZSL mode",__func__);
+ mParameters.set(QCameraParameters::KEY_CAMERA_MODE, 1);
+ myMode = (camera_mode_t)(myMode | CAMERA_ZSL_MODE);
+ mParameters.setPreviewFrameRateMode("frame-rate-auto");
+ setPreviewFrameRateMode(mParameters);
+
+ if (mHasAutoFocusSupport) {
+ int cafSupport = true;
+ int caf_type = 2;
+ native_set_parms(MM_CAMERA_PARM_CAF_TYPE, sizeof(caf_type), (void *)&caf_type);
+ native_set_parms(MM_CAMERA_PARM_CONTINUOUS_AF, sizeof(cafSupport),
+ (void *)&cafSupport);
+ }
+ }
+
+ if (mHasAutoFocusSupport && strcmp(str, "auto")) {
+ int cafSupport = true;
+ int caf_type = 2;
+ native_set_parms(MM_CAMERA_PARM_CAF_TYPE, sizeof(caf_type), (void *)&caf_type);
+ native_set_parms(MM_CAMERA_PARM_CONTINUOUS_AF, sizeof(cafSupport),
+ (void *)&cafSupport);
+ }
+ /* get existing preview information, by qury mm_camera*/
+ memset(&dim, 0, sizeof(cam_ctrl_dimension_t));
+ ret = cam_config_get_parm(mCameraId, MM_CAMERA_PARM_DIMENSION,&dim);
+
+ if (MM_CAMERA_OK != ret) {
+ ALOGE("%s: error - can't get preview dimension!", __func__);
+ ALOGV("%s: X", __func__);
+ return BAD_VALUE;
+ }
+
+ /* config the parmeters and see if we need to re-init the stream*/
+ initPreview = preview_parm_config (&dim, mParameters);
+
+ if (mRecordingHint && mFullLiveshotEnabled) {
+#if 0
+ /* Camcorder mode and Full resolution liveshot enabled
+ * TBD lookup table for correct aspect ratio matching size */
+ memset(&maxDim, 0, sizeof(mm_camera_dimension_t));
+ getMaxPictureDimension(&maxDim);
+ if (!maxDim.width || !maxDim.height) {
+ maxDim.width = DEFAULT_LIVESHOT_WIDTH;
+ maxDim.height = DEFAULT_LIVESHOT_HEIGHT;
+ }
+ /* TODO Remove this hack after adding code to get live shot dimension */
+ if (!mCameraId) {
+ maxDim.width = DEFAULT_LIVESHOT_WIDTH;
+ maxDim.height = DEFAULT_LIVESHOT_HEIGHT;
+ }
+ dim.picture_width = maxDim.width;
+ dim.picture_height = maxDim.height;
+ mParameters.setPictureSize(dim.picture_width, dim.picture_height);
+ ALOGI("%s Setting Liveshot dimension as %d x %d", __func__,
+ maxDim.width, maxDim.height);
+#endif
+ int mPictureWidth, mPictureHeight;
+ bool matching;
+ /* First check if the picture resolution is the same, if not, change it*/
+ getPictureSize(&mPictureWidth, &mPictureHeight);
+
+ matching = (mPictureWidth == dim.picture_width) &&
+ (mPictureHeight == dim.picture_height);
+
+ if (!matching) {
+ dim.picture_width = mPictureWidth;
+ dim.picture_height = mPictureHeight;
+ //For FullSize snapshot Main image Buffer is used as Thumanail.
+ dim.ui_thumbnail_height = mPictureWidth;
+ dim.ui_thumbnail_width = mPictureHeight;
+ }
+ ALOGI("%s: Fullsize Liveshaot Picture size to set: %d x %d", __func__,
+ dim.picture_width, dim.picture_height);
+ mParameters.setPictureSize(dim.picture_width, dim.picture_height);
+ }
+
+ ret = cam_config_set_parm(mCameraId, MM_CAMERA_PARM_DIMENSION,&dim);
+ if (MM_CAMERA_OK != ret) {
+ ALOGE("%s X: error - can't config preview parms!", __func__);
+ return BAD_VALUE;
+ }
+
+ mStreamDisplay->setMode(myMode & CAMERA_ZSL_MODE);
+ mStreamSnap->setMode(myMode & CAMERA_ZSL_MODE);
+ mStreamRecord->setMode(myMode & CAMERA_ZSL_MODE);
+ ALOGV("%s: myMode = %d", __func__, myMode);
+
+ ALOGV("%s: setPreviewWindow", __func__);
+ mStreamDisplay->setPreviewWindow(mPreviewWindow);
+ ret = cam_config_set_parm(mCameraId, MM_CAMERA_PARM_INFORM_STARTPRVIEW, NULL);
+ if(ret<0)
+ {
+ ALOGE("%s: Failed to Check MM_CAMERA_PARM_INFORM_STARTPRVIEW, rc %d", __func__, ret);
+ }
+
+ if(isZSLMode()) {
+ /* Start preview streaming */
+ ret = mStreamDisplay->start();
+ if (MM_CAMERA_OK != ret){
+ ALOGE("%s: X -error - can't start nonZSL stream!", __func__);
+ return BAD_VALUE;
+ }
+
+ /* Start ZSL stream */
+ ret = mStreamSnap->start();
+ if (MM_CAMERA_OK != ret){
+ ALOGE("%s: error - can't start Snapshot stream!", __func__);
+ mStreamDisplay->stop();
+ return BAD_VALUE;
+ }
+ }else{
+ ret = mStreamDisplay->start();
+ }
+
+ /*call QCameraStream_noneZSL::start() */
+ if (MM_CAMERA_OK != ret){
+ ALOGE("%s: X error - can't start stream!", __func__);
+ return BAD_VALUE;
+ }
+ if(MM_CAMERA_OK == ret)
+ mCameraState = CAMERA_STATE_PREVIEW_START_CMD_SENT;
+ else
+ mCameraState = CAMERA_STATE_ERROR;
+
+ if(mPostPreviewHeap != NULL) {
+ mPostPreviewHeap.clear();
+ mPostPreviewHeap = NULL;
+ }
+
+ mRestartPreview = false;
+
+ ALOGV("startPreview: X");
+ return ret;
+}
+
+void QCameraHardwareInterface::stopPreview()
+{
+ ALOGV("%s: stopPreview: E", __func__);
+ Mutex::Autolock lock(mLock);
+ mm_camera_util_profile("HAL: stopPreview(): E");
+ mFaceDetectOn = false;
+
+ // reset recording hint to the value passed from Apps
+ const char * str = mParameters.get(QCameraParameters::KEY_RECORDING_HINT);
+ if((str != NULL) && !strcmp(str, "true")){
+ mRecordingHint = true;
+ } else {
+ mRecordingHint = false;
+ }
+
+ const char * str_fd = mParameters.get(QCameraParameters::KEY_FACE_DETECTION);
+ if((str_fd != NULL) && !strcmp(str_fd, "on")){
+ if(supportsFaceDetection() == false){
+ ALOGE("Face detection support is not available");
+ }
+ setFaceDetection("off");
+ runFaceDetection();
+ }
+
+ switch(mPreviewState) {
+ case QCAMERA_HAL_PREVIEW_START:
+ //mPreviewWindow = NULL;
+ mPreviewState = QCAMERA_HAL_PREVIEW_STOPPED;
+ break;
+ case QCAMERA_HAL_PREVIEW_STARTED:
+ stopPreviewInternal();
+ mPreviewState = QCAMERA_HAL_PREVIEW_STOPPED;
+ break;
+ case QCAMERA_HAL_RECORDING_STARTED:
+ stopRecordingInternal();
+ stopPreviewInternal();
+ mPreviewState = QCAMERA_HAL_PREVIEW_STOPPED;
+ break;
+ case QCAMERA_HAL_TAKE_PICTURE:
+ case QCAMERA_HAL_PREVIEW_STOPPED:
+ default:
+ break;
+ }
+ ALOGV("stopPreview: X, mPreviewState = %d", mPreviewState);
+}
+
+#if 0 //mzhu
+void QCameraHardwareInterface::stopPreviewZSL()
+{
+ ALOGI("stopPreviewZSL: E");
+
+ if(!mStreamDisplay || !mStreamSnap) {
+ ALOGE("mStreamDisplay/mStreamSnap is null");
+ return;
+ }
+ ALOGI("stopPreview: X, mPreviewState = %d", mPreviewState);
+}
+#endif
+void QCameraHardwareInterface::stopPreviewInternal()
+{
+ ALOGV("stopPreviewInternal: E");
+ status_t ret = NO_ERROR;
+
+ if(!mStreamDisplay) {
+ ALOGE("mStreamDisplay is null");
+ return;
+ }
+
+ if(isZSLMode()) {
+ /* take care snapshot object for ZSL mode */
+ mStreamSnap->stop();
+ }
+ mStreamDisplay->stop();
+
+ mPauseFramedispatch = false;
+ mCameraState = CAMERA_STATE_PREVIEW_STOP_CMD_SENT;
+ ALOGV("stopPreviewInternal: X");
+}
+
+int QCameraHardwareInterface::previewEnabled()
+{
+ ALOGV("previewEnabled: E");
+ Mutex::Autolock lock(mLock);
+ ALOGV("%s: mCameraState = %d", __func__, mCameraState);
+ if (mPauseFramedispatch) {
+ return false;
+ }
+ switch(mPreviewState) {
+ case QCAMERA_HAL_PREVIEW_STOPPED:
+ case QCAMERA_HAL_TAKE_PICTURE:
+ default:
+ return false;
+ break;
+ case QCAMERA_HAL_PREVIEW_START:
+ case QCAMERA_HAL_PREVIEW_STARTED:
+ case QCAMERA_HAL_RECORDING_STARTED:
+ return true;
+ break;
+ }
+ return false;
+}
+
+status_t QCameraHardwareInterface::startRecording()
+{
+ ALOGV("startRecording: E");
+ status_t ret = NO_ERROR;
+ Mutex::Autolock lock(mLock);
+
+ switch(mPreviewState) {
+ case QCAMERA_HAL_PREVIEW_STOPPED:
+ ALOGE("%s: preview has not been started", __func__);
+ ret = UNKNOWN_ERROR;
+ break;
+ case QCAMERA_HAL_PREVIEW_START:
+ ALOGE("%s: no preview native window", __func__);
+ ret = UNKNOWN_ERROR;
+ break;
+ case QCAMERA_HAL_PREVIEW_STARTED:
+ //remember recordinghint value set by app
+ mAppRecordingHint = mRecordingHint;
+ pausePreviewForVideo();
+ ret = mStreamRecord->start();
+ if (MM_CAMERA_OK != ret){
+ ALOGE("%s: error - mStreamRecord->start!", __func__);
+ ret = BAD_VALUE;
+ break;
+ }
+ if(MM_CAMERA_OK == ret)
+ mCameraState = CAMERA_STATE_RECORD_START_CMD_SENT;
+ else
+ mCameraState = CAMERA_STATE_ERROR;
+ mPreviewState = QCAMERA_HAL_RECORDING_STARTED;
+ break;
+ case QCAMERA_HAL_RECORDING_STARTED:
+ ALOGV("%s: ", __func__);
+ break;
+ case QCAMERA_HAL_TAKE_PICTURE:
+ default:
+ ret = BAD_VALUE;
+ break;
+ }
+ ALOGV("startRecording: X");
+ return ret;
+}
+
+void QCameraHardwareInterface::stopRecording()
+{
+ ALOGV("stopRecording: E");
+ Mutex::Autolock lock(mLock);
+ switch(mPreviewState) {
+ case QCAMERA_HAL_PREVIEW_STOPPED:
+ case QCAMERA_HAL_PREVIEW_START:
+ case QCAMERA_HAL_PREVIEW_STARTED:
+ break;
+ case QCAMERA_HAL_RECORDING_STARTED:
+ mRecordingHint = mAppRecordingHint;
+ stopRecordingInternal();
+ mPreviewState = QCAMERA_HAL_PREVIEW_STARTED;
+ break;
+ case QCAMERA_HAL_TAKE_PICTURE:
+ default:
+ break;
+ }
+ ALOGV("stopRecording: X");
+
+}
+void QCameraHardwareInterface::stopRecordingInternal()
+{
+ ALOGV("stopRecordingInternal: E");
+ status_t ret = NO_ERROR;
+
+ if(!mStreamRecord) {
+ ALOGE("mStreamRecord is null");
+ return;
+ }
+
+ if(mStateLiveshot && mStreamLiveSnap != NULL) {
+ mStreamLiveSnap->stop();
+ mStateLiveshot = false;
+ }
+ /*
+ * call QCameraStream_record::stop()
+ * Unregister Callback, action stop
+ */
+ mStreamRecord->stop();
+ mCameraState = CAMERA_STATE_PREVIEW; //TODO : Apurva : Hacked for 2nd time Recording
+ mPreviewState = QCAMERA_HAL_PREVIEW_STARTED;
+ ALOGV("stopRecordingInternal: X");
+ return;
+}
+
+int QCameraHardwareInterface::recordingEnabled()
+{
+ int ret = 0;
+ Mutex::Autolock lock(mLock);
+ ALOGV("%s: E", __func__);
+ switch(mPreviewState) {
+ case QCAMERA_HAL_PREVIEW_STOPPED:
+ case QCAMERA_HAL_PREVIEW_START:
+ case QCAMERA_HAL_PREVIEW_STARTED:
+ break;
+ case QCAMERA_HAL_RECORDING_STARTED:
+ ret = 1;
+ break;
+ case QCAMERA_HAL_TAKE_PICTURE:
+ default:
+ break;
+ }
+ ALOGV("%s: X, ret = %d", __func__, ret);
+ return ret; //isRecordingRunning();
+}
+
+/**
+* Release a record frame previously returned by CAMERA_MSG_VIDEO_FRAME.
+*/
+void QCameraHardwareInterface::releaseRecordingFrame(const void *opaque)
+{
+ ALOGV("%s : BEGIN",__func__);
+ if(mStreamRecord == NULL) {
+ ALOGE("Record stream Not Initialized");
+ return;
+ }
+ mStreamRecord->releaseRecordingFrame(opaque);
+ ALOGV("%s : END",__func__);
+ return;
+}
+
+status_t QCameraHardwareInterface::autoFocusMoveEvent(cam_ctrl_status_t *status, app_notify_cb_t *app_cb)
+{
+ ALOGV("autoFocusMoveEvent: E");
+ int ret = NO_ERROR;
+
+ isp3a_af_mode_t afMode = getAutoFocusMode(mParameters);
+
+ if (afMode == AF_MODE_CAF && mNotifyCb && ( mMsgEnabled & CAMERA_MSG_FOCUS_MOVE)){
+ ALOGV("%s:Issuing AF Move callback to service",__func__);
+
+ app_cb->notifyCb = mNotifyCb;
+ app_cb->argm_notify.msg_type = CAMERA_MSG_FOCUS_MOVE;
+ app_cb->argm_notify.ext2 = 0;
+ app_cb->argm_notify.cookie = mCallbackCookie;
+
+ ALOGV("Auto focus state =%d", *status);
+ if(*status == 1) {
+ app_cb->argm_notify.ext1 = true;
+ }else if(*status == 0){
+ app_cb->argm_notify.ext1 = false;
+ }else{
+ app_cb->notifyCb = NULL;
+ ALOGE("%s:Unknown AF Move Status received (%d) received",__func__,*status);
+ }
+ }
+ ALOGV("autoFocusMoveEvent: X");
+ return ret;
+}
+
+status_t QCameraHardwareInterface::autoFocusEvent(cam_ctrl_status_t *status, app_notify_cb_t *app_cb)
+{
+ ALOGV("autoFocusEvent: E");
+ int ret = NO_ERROR;
+/************************************************************
+ BEGIN MUTEX CODE
+*************************************************************/
+
+ ALOGV("%s:%d: Trying to acquire AF bit lock",__func__,__LINE__);
+ mAutofocusLock.lock();
+ ALOGV("%s:%d: Acquired AF bit lock",__func__,__LINE__);
+
+ if(mAutoFocusRunning==false) {
+ ALOGV("%s:AF not running, discarding stale event",__func__);
+ mAutofocusLock.unlock();
+ return ret;
+ }
+ /* If autofocus call has been made during CAF, CAF will be locked.
+ * We specifically need to call cancelAutoFocus to unlock CAF.
+ * In that sense, AF is still running.*/
+ isp3a_af_mode_t afMode = getAutoFocusMode(mParameters);
+ if (afMode == AF_MODE_CAF)
+ mNeedToUnlockCaf = true;
+ mAutoFocusRunning = false;
+ mAutofocusLock.unlock();
+
+/************************************************************
+ END MUTEX CODE
+*************************************************************/
+ if(status==NULL) {
+ ALOGE("%s:NULL ptr received for status",__func__);
+ return BAD_VALUE;
+ }
+
+ /* update focus distances after autofocus is done */
+ if((*status != CAM_CTRL_FAILED) && updateFocusDistances() != NO_ERROR) {
+ ALOGE("%s: updateFocusDistances failed for %d", __FUNCTION__, mFocusMode);
+ }
+
+ /*(Do?) we need to make sure that the call back is the
+ last possible step in the execution flow since the same
+ context might be used if a fail triggers another round
+ of AF then the mAutoFocusRunning flag and other state
+ variables' validity will be under question*/
+
+ if (mNotifyCb && ( mMsgEnabled & CAMERA_MSG_FOCUS)){
+ ALOGV("%s:Issuing AF callback to service",__func__);
+
+ /* "Accepted" status is not appropriate it should be used for
+ initial cmd, event reporting should only give use SUCCESS/FAIL
+ */
+
+ app_cb->notifyCb = mNotifyCb;
+ app_cb->argm_notify.msg_type = CAMERA_MSG_FOCUS;
+ app_cb->argm_notify.ext2 = 0;
+ app_cb->argm_notify.cookie = mCallbackCookie;
+
+ ALOGV("Auto focus state =%d", *status);
+ if(*status==CAM_CTRL_SUCCESS) {
+ app_cb->argm_notify.ext1 = true;
+ }
+ else if(*status==CAM_CTRL_FAILED){
+ app_cb->argm_notify.ext1 = false;
+ }
+ else{
+ app_cb->notifyCb = NULL;
+ ALOGE("%s:Unknown AF status (%d) received",__func__,*status);
+ }
+
+ }/*(mNotifyCb && ( mMsgEnabled & CAMERA_MSG_FOCUS))*/
+ else{
+ ALOGE("%s:Call back not enabled",__func__);
+ }
+
+ ALOGV("autoFocusEvent: X");
+ return ret;
+
+}
+
+status_t QCameraHardwareInterface::cancelPicture()
+{
+ ALOGV("cancelPicture: E");
+ status_t ret = MM_CAMERA_OK;
+ Mutex::Autolock lock(mLock);
+
+ switch(mPreviewState) {
+ case QCAMERA_HAL_PREVIEW_STOPPED:
+ case QCAMERA_HAL_PREVIEW_START:
+ case QCAMERA_HAL_PREVIEW_STARTED:
+ break;
+ case QCAMERA_HAL_RECORDING_STARTED:
+ if(mStateLiveshot && (mStreamLiveSnap != NULL)) {
+ mStreamLiveSnap->stop();
+ mStateLiveshot = false;
+ }
+ break;
+ case QCAMERA_HAL_TAKE_PICTURE:
+ ret = cancelPictureInternal();
+ break;
+ default:
+ break;
+ }
+ ALOGV("cancelPicture: X");
+ return ret;
+}
+
+status_t QCameraHardwareInterface::cancelPictureInternal()
+{
+ ALOGV("cancelPictureInternal: E");
+ status_t ret = MM_CAMERA_OK;
+ if(mCameraState != CAMERA_STATE_READY) {
+ if(mStreamSnap) {
+ mStreamSnap->stop();
+ mCameraState = CAMERA_STATE_SNAP_STOP_CMD_SENT;
+ }
+ } else {
+ ALOGE("%s: Cannot process cancel picture as snapshot is already done",__func__);
+ }
+ ALOGV("cancelPictureInternal: X");
+ return ret;
+}
+
+void QCameraHardwareInterface::pausePreviewForSnapshot()
+{
+ stopPreviewInternal( );
+}
+status_t QCameraHardwareInterface::resumePreviewAfterSnapshot()
+{
+ status_t ret = NO_ERROR;
+ ret = mStreamDisplay->start();
+ return ret;
+}
+
+void liveshot_callback(mm_camera_ch_data_buf_t *recvd_frame,
+ void *user_data)
+{
+ QCameraHardwareInterface *pme = (QCameraHardwareInterface *)user_data;
+ cam_ctrl_dimension_t dim;
+ int mJpegMaxSize;
+ int mNuberOfVFEOutputs = 0;
+ status_t ret;
+ ALOGV("%s: E", __func__);
+
+ if (!pme->mStateLiveshot) {
+ ALOGE("%s: Returning Buffer. Picture Cancelled", __func__);
+ return;
+ }
+
+ ALOGV("Live Snapshot Enabled");
+ if (pme->mStreamLiveSnap){
+ ALOGE("%s:Deleting old Snapshot stream instance",__func__);
+ QCameraStream_Snapshot::deleteInstance (pme->mStreamLiveSnap);
+ pme->mStreamLiveSnap = NULL;
+ }
+
+ pme->mStreamLiveSnap = (QCameraStream_Snapshot*)QCameraStream_Snapshot::createInstance(pme->mCameraId,
+ pme->myMode);
+
+ if (!pme->mStreamLiveSnap) {
+ ALOGE("%s: error - can't creat snapshot stream!", __func__);
+ cam_evt_buf_done(pme->mCameraId, recvd_frame);
+ return ;
+ }
+ pme->mStreamLiveSnap->setModeLiveSnapshot(true);
+ pme->mStreamLiveSnap->setHALCameraControl(pme);
+
+ ret = ((QCameraStream_Snapshot*)(pme->mStreamLiveSnap))->takePictureLiveshot(recvd_frame);
+ if (MM_CAMERA_OK != ret) {
+ ALOGE("%s: Error : returned from takePictureLiveshot",__func__);
+ return;
+ }
+ ALOGV("%s: X", __func__);
+
+}
+
+status_t QCameraHardwareInterface::takePicture()
+{
+ ALOGV("takePicture: E");
+ status_t ret = MM_CAMERA_OK;
+ int mNuberOfVFEOutputs = 0;
+ Mutex::Autolock lock(mLock);
+ bool hdr;
+ int frm_num = 1, rc = 0;
+ int exp[MAX_HDR_EXP_FRAME_NUM];
+
+ if(QCAMERA_HAL_RECORDING_STARTED != mPreviewState){
+ isp3a_af_mode_t afMode = getAutoFocusMode(mParameters);
+ if (afMode != AF_MODE_CAF && !mFlashCond)
+ {
+ mFlashCond = getFlashCondition();
+ }
+ ALOGV("%s: Flash Contidion %d", __func__, mFlashCond);
+ if(mFlashCond) {
+ mRestartPreview = true;
+ pausePreviewForZSL();
+ }
+ mFlashCond = false;
+ }
+
+ rc = cam_config_set_parm(mCameraId, MM_CAMERA_PARM_LG_CAF_LOCK, NULL);
+ if(rc<0)
+ {
+ ALOGE("%s: Failed to Check MM_CAMERA_PARM_LG_CAF_LOCK, rc %d", __func__, rc);
+ }
+ hdr = getHdrInfoAndSetExp(MAX_HDR_EXP_FRAME_NUM, &frm_num, exp);
+ mStreamSnap->resetSnapshotCounters();
+ mStreamSnap->InitHdrInfoForSnapshot(hdr, frm_num, exp);
+ switch(mPreviewState) {
+ case QCAMERA_HAL_PREVIEW_STARTED:
+ //set the fullsize liveshot to false
+ mFullLiveshotEnabled = false;
+ setFullLiveshot();
+ mStreamSnap->setFullSizeLiveshot(false);
+ if (isZSLMode()) {
+ if (mStreamSnap != NULL) {
+ pausePreviewForZSL();
+ ret = mStreamSnap->takePictureZSL();
+ if (ret != MM_CAMERA_OK) {
+ ALOGE("%s: Error taking ZSL snapshot!", __func__);
+ ret = BAD_VALUE;
+ }
+ }
+ else {
+ ALOGE("%s: ZSL stream not active! Failure!!", __func__);
+ ret = BAD_VALUE;
+ }
+ return ret;
+ }
+ /*prepare snapshot, e.g LED*/
+ takePicturePrepareHardware( );
+ /* There's an issue where we have a glimpse of corrupted data between
+ a time we stop a preview and display the postview. It happens because
+ when we call stopPreview we deallocate the preview buffers hence overlay
+ displays garbage value till we enqueue postview buffer to be displayed.
+ Hence for temporary fix, we'll do memcopy of the last frame displayed and
+ queue it to overlay*/
+ // mzhu storePreviewFrameForPostview();
+
+ /* stop preview */
+ pausePreviewForSnapshot();
+
+ /* call Snapshot start() :*/
+ ret = mStreamSnap->start();
+ if (MM_CAMERA_OK != ret){
+ /* mzhu: fix me, restore preview */
+ ALOGE("%s: error - can't start Snapshot stream!", __func__);
+ return BAD_VALUE;
+ }
+
+ if(MM_CAMERA_OK == ret)
+ mCameraState = CAMERA_STATE_SNAP_START_CMD_SENT;
+ else
+ mCameraState = CAMERA_STATE_ERROR;
+ mPreviewState = QCAMERA_HAL_TAKE_PICTURE;
+ break;
+ case QCAMERA_HAL_TAKE_PICTURE:
+ break;
+ case QCAMERA_HAL_PREVIEW_STOPPED:
+ case QCAMERA_HAL_PREVIEW_START:
+ ret = UNKNOWN_ERROR;
+ break;
+ case QCAMERA_HAL_RECORDING_STARTED:
+ if(mStateLiveshot) {
+ ALOGE("takePicture : Duplicate TakePicture Call");
+ return ret;
+ }
+ if (canTakeFullSizeLiveshot()) {
+ ALOGV(" Calling takeFullSizeLiveshot");
+ takeFullSizeLiveshot();
+ }else{
+ ret = cam_config_get_parm(mCameraId, MM_CAMERA_PARM_VFE_OUTPUT_ENABLE,
+ &mNuberOfVFEOutputs);
+ if (ret != MM_CAMERA_OK) {
+ ALOGE("get parm MM_CAMERA_PARM_VFE_OUTPUT_ENABLE failed");
+ ret = BAD_VALUE;
+ }
+ if (mNuberOfVFEOutputs == 1){
+ (void) cam_evt_register_buf_notify(mCameraId, MM_CAMERA_CH_PREVIEW,
+ liveshot_callback,
+ MM_CAMERA_REG_BUF_CB_COUNT,
+ 1,
+ this);
+ } else {
+ (void) cam_evt_register_buf_notify(mCameraId, MM_CAMERA_CH_VIDEO,
+ liveshot_callback,
+ MM_CAMERA_REG_BUF_CB_COUNT,
+ 1,
+ this);
+ }
+ }
+ mStateLiveshot = true;
+ break;
+ default:
+ ret = UNKNOWN_ERROR;
+ break;
+ }
+ ALOGV("takePicture: X");
+ return ret;
+}
+
+bool QCameraHardwareInterface::canTakeFullSizeLiveshot() {
+ bool ret;
+ if (mFullLiveshotEnabled && !isLowPowerCamcorder()) {
+ /* Full size liveshot enabled. */
+
+ /* If Picture size is same as video size, switch to Video size
+ * live snapshot */
+ if ((mDimension.picture_width == mVideoWidth) &&
+ (mDimension.picture_height == mVideoHeight)) {
+ return false;
+ }
+
+ if (mDisEnabled) {
+ /* If DIS is enabled and Picture size is
+ * less than (video size + 10% DIS Margin)
+ * then fall back to Video size liveshot. */
+ if ((mDimension.picture_width <
+ (int)(mVideoWidth * 1.1)) ||
+ (mDimension.picture_height <
+ (int)(mVideoHeight * 1.1))) {
+ ret = false;
+ } else {
+ /* Go with Full size live snapshot. */
+ ret = true;
+ }
+ } else {
+ /* DIS Disabled. Go with Full size live snapshot */
+ ret = true;
+ }
+ } else {
+ /* Full size liveshot disabled. Fallback to Video size liveshot. */
+ ret = false;
+ }
+
+ return ret;
+}
+
+status_t QCameraHardwareInterface::takeFullSizeLiveshot()
+{
+ status_t ret = NO_ERROR;
+ if (mStreamLiveSnap){
+ ALOGV("%s:Deleting old Snapshot stream instance",__func__);
+ QCameraStream_Snapshot::deleteInstance (mStreamLiveSnap);
+ mStreamLiveSnap = NULL;
+ }
+ mStreamLiveSnap = QCameraStream_Snapshot::createInstance(mCameraId, myMode);
+
+ if (!mStreamLiveSnap) {
+ ALOGE("%s: error - can't creat snapshot stream!", __func__);
+ /* mzhu: fix me, restore preview */
+ return BAD_VALUE;
+ }
+
+ /* Store HAL object in snapshot stream Object */
+ mStreamLiveSnap->setHALCameraControl(this);
+
+ mStreamLiveSnap->setFullSizeLiveshot(true);
+
+ /* Call snapshot init*/
+ ret = mStreamLiveSnap->init();
+ if (MM_CAMERA_OK != ret){
+ ALOGE("%s: error - can't init Snapshot stream!", __func__);
+ return BAD_VALUE;
+ }
+
+ /* call Snapshot start() :*/
+ mStreamLiveSnap->resetSnapshotCounters( );
+ ret = mStreamLiveSnap->start();
+ if (MM_CAMERA_OK != ret){
+ /* mzhu: fix me, restore preview */
+ ALOGE("%s: error - can't start Snapshot stream!", __func__);
+ return BAD_VALUE;
+ }
+ return ret;
+}
+
+status_t QCameraHardwareInterface::takeLiveSnapshot()
+{
+ status_t ret = NO_ERROR;
+ ALOGV("takeLiveSnapshot: E");
+ mStreamRecord->takeLiveSnapshot();
+ ALOGV("takeLiveSnapshot: X");
+ return ret;
+}
+
+status_t QCameraHardwareInterface::autoFocus()
+{
+ ALOGV("autoFocus: E");
+ status_t ret = NO_ERROR;
+ mFlashCond = getFlashCondition();
+ ALOGV("autoFocus: mFlashCond = %d", mFlashCond);
+ Mutex::Autolock lock(mLock);
+ ALOGV("autoFocus: Got lock");
+ bool status = true;
+ isp3a_af_mode_t afMode = getAutoFocusMode(mParameters);
+
+ if(mAutoFocusRunning==true){
+ ALOGV("%s:AF already running should not have got this call",__func__);
+ return NO_ERROR;
+ }
+
+ if (afMode == AF_MODE_MAX) {
+ /* This should never happen. We cannot send a
+ * callback notifying error from this place because
+ * the CameraService has called this function after
+ * acquiring the lock. So if we try to issue a callback
+ * from this place, the callback will try to acquire
+ * the same lock in CameraService and it will result
+ * in deadlock. So, let the call go in to the lower
+ * layer. The lower layer will anyway return error if
+ * the autofocus is not supported or if the focus
+ * value is invalid.
+ * Just print out the error. */
+ ALOGE("%s:Invalid AF mode (%d)", __func__, afMode);
+ }
+
+ ALOGV("%s:AF start (mode %d)", __func__, afMode);
+ if(MM_CAMERA_OK != cam_ops_action(mCameraId, true,
+ MM_CAMERA_OPS_FOCUS, &afMode)) {
+ ALOGE("%s: AF command failed err:%d error %s",
+ __func__, errno, strerror(errno));
+ return UNKNOWN_ERROR;
+ }
+
+ mAutoFocusRunning = true;
+ ALOGV("autoFocus: X");
+ return ret;
+}
+
+status_t QCameraHardwareInterface::cancelAutoFocus()
+{
+ ALOGV("cancelAutoFocus: E");
+ status_t ret = NO_ERROR;
+ Mutex::Autolock lock(mLock);
+
+/**************************************************************
+ BEGIN MUTEX CODE
+*************************************************************/
+
+ mAutofocusLock.lock();
+ if(mAutoFocusRunning || mNeedToUnlockCaf) {
+ mNeedToUnlockCaf = false;
+ mAutoFocusRunning = false;
+ mAutofocusLock.unlock();
+
+ }else/*(!mAutoFocusRunning)*/{
+
+ mAutofocusLock.unlock();
+ ALOGV("%s:Af not running",__func__);
+ return NO_ERROR;
+ }
+/**************************************************************
+ END MUTEX CODE
+*************************************************************/
+
+
+ if(MM_CAMERA_OK!=cam_ops_action(mCameraId,false,MM_CAMERA_OPS_FOCUS,NULL )) {
+ ALOGE("%s: AF command failed err:%d error %s",__func__, errno,strerror(errno));
+ }
+
+ ALOGV("cancelAutoFocus: X");
+ return NO_ERROR;
+}
+
+#if 0 //mzhu
+/*==========================================================================
+ * FUNCTION - prepareSnapshotAndWait -
+ *
+ * DESCRIPTION: invoke preparesnapshot and wait for it done
+ it can be called within takepicture, so no need
+ to grab mLock.
+ *=========================================================================*/
+void QCameraHardwareInterface::prepareSnapshotAndWait()
+{
+ ALOGI("prepareSnapshotAndWait: E");
+ int rc = 0;
+ /*To Do: call mm camera preparesnapshot */
+ if(!rc ) {
+ mPreparingSnapshot = true;
+ pthread_mutex_lock(&mAsyncCmdMutex);
+ pthread_cond_wait(&mAsyncCmdWait, &mAsyncCmdMutex);
+ pthread_mutex_unlock(&mAsyncCmdMutex);
+ mPreparingSnapshot = false;
+ }
+ ALOGI("prepareSnapshotAndWait: X");
+}
+#endif //mzhu
+
+/*==========================================================================
+ * FUNCTION - processprepareSnapshotEvent -
+ *
+ * DESCRIPTION: Process the event of preparesnapshot done msg
+ unblock prepareSnapshotAndWait( )
+ *=========================================================================*/
+void QCameraHardwareInterface::processprepareSnapshotEvent(cam_ctrl_status_t *status)
+{
+ ALOGV("processprepareSnapshotEvent: E");
+ pthread_mutex_lock(&mAsyncCmdMutex);
+ pthread_cond_signal(&mAsyncCmdWait);
+ pthread_mutex_unlock(&mAsyncCmdMutex);
+ ALOGV("processprepareSnapshotEvent: X");
+}
+
+void QCameraHardwareInterface::roiEvent(fd_roi_t roi,app_notify_cb_t *app_cb)
+{
+ ALOGV("roiEvent: E");
+
+ if(mStreamDisplay) mStreamDisplay->notifyROIEvent(roi);
+#if 0 //TODO: move to preview obj
+ mCallbackLock.lock();
+ data_callback mcb = mDataCb;
+ void *mdata = mCallbackCookie;
+ int msgEnabled = mMsgEnabled;
+ mCallbackLock.unlock();
+
+ mMetaDataWaitLock.lock();
+ if (mFaceDetectOn == true && mSendMetaData == true) {
+ mSendMetaData = false;
+ int faces_detected = roi.rect_num;
+ int max_faces_detected = MAX_ROI * 4;
+ int array[max_faces_detected + 1];
+
+ array[0] = faces_detected * 4;
+ for (int i = 1, j = 0;j < MAX_ROI; j++, i = i + 4) {
+ if (j < faces_detected) {
+ array[i] = roi.faces[j].x;
+ array[i+1] = roi.faces[j].y;
+ array[i+2] = roi.faces[j].dx;
+ array[i+3] = roi.faces[j].dy;
+ } else {
+ array[i] = -1;
+ array[i+1] = -1;
+ array[i+2] = -1;
+ array[i+3] = -1;
+ }
+ }
+ if(mMetaDataHeap != NULL){
+ ALOGV("mMetaDataHEap is non-NULL");
+ memcpy((uint32_t *)mMetaDataHeap->mHeap->base(), (uint32_t *)array, (sizeof(int)*(MAX_ROI*4+1)));
+ mMetaDataWaitLock.unlock();
+
+ if (mcb != NULL && (msgEnabled & CAMERA_MSG_META_DATA)) {
+ mcb(CAMERA_MSG_META_DATA, mMetaDataHeap->mBuffers[0], mdata);
+ }
+ } else {
+ mMetaDataWaitLock.unlock();
+ ALOGE("runPreviewThread mMetaDataHeap is NULL");
+ }
+ } else {
+ mMetaDataWaitLock.unlock();
+ }
+#endif // mzhu
+ ALOGV("roiEvent: X");
+}
+
+
+void QCameraHardwareInterface::handleZoomEventForSnapshot(void)
+{
+ mm_camera_ch_crop_t v4l2_crop;
+
+
+ ALOGV("%s: E", __func__);
+
+ memset(&v4l2_crop,0,sizeof(v4l2_crop));
+ v4l2_crop.ch_type=MM_CAMERA_CH_SNAPSHOT;
+
+ ALOGV("%s: Fetching crop info", __func__);
+ cam_config_get_parm(mCameraId,MM_CAMERA_PARM_CROP,&v4l2_crop);
+
+ ALOGV("%s: Crop info received for main: %d, %d, %d, %d ", __func__,
+ v4l2_crop.snapshot.main_crop.left,
+ v4l2_crop.snapshot.main_crop.top,
+ v4l2_crop.snapshot.main_crop.width,
+ v4l2_crop.snapshot.main_crop.height);
+ ALOGV("%s: Crop info received for thumbnail: %d, %d, %d, %d ",__func__,
+ v4l2_crop.snapshot.thumbnail_crop.left,
+ v4l2_crop.snapshot.thumbnail_crop.top,
+ v4l2_crop.snapshot.thumbnail_crop.width,
+ v4l2_crop.snapshot.thumbnail_crop.height);
+
+ if(mStreamSnap) {
+ ALOGV("%s: Setting crop info for snapshot", __func__);
+ memcpy(&(mStreamSnap->mCrop), &v4l2_crop, sizeof(v4l2_crop));
+ }
+ if(mFullLiveshotEnabled && mStreamLiveSnap){
+ ALOGV("%s: Setting crop info for snapshot", __func__);
+ memcpy(&(mStreamLiveSnap->mCrop), &v4l2_crop, sizeof(v4l2_crop));
+ }
+ ALOGV("%s: X", __func__);
+}
+
+void QCameraHardwareInterface::handleZoomEventForPreview(app_notify_cb_t *app_cb)
+{
+ mm_camera_ch_crop_t v4l2_crop;
+
+ ALOGV("%s: E", __func__);
+
+ /*regular zooming or smooth zoom stopped*/
+ if (!mSmoothZoomRunning && mPreviewWindow) {
+ memset(&v4l2_crop, 0, sizeof(v4l2_crop));
+ v4l2_crop.ch_type = MM_CAMERA_CH_PREVIEW;
+
+ ALOGV("%s: Fetching crop info", __func__);
+ cam_config_get_parm(mCameraId,MM_CAMERA_PARM_CROP,&v4l2_crop);
+
+ ALOGV("%s: Crop info received: %d, %d, %d, %d ", __func__,
+ v4l2_crop.crop.left,
+ v4l2_crop.crop.top,
+ v4l2_crop.crop.width,
+ v4l2_crop.crop.height);
+
+ mPreviewWindow->set_crop(mPreviewWindow,
+ v4l2_crop.crop.left,
+ v4l2_crop.crop.top,
+ v4l2_crop.crop.left + v4l2_crop.crop.width,
+ v4l2_crop.crop.top + v4l2_crop.crop.height);
+ ALOGV("%s: Done setting crop", __func__);
+ ALOGV("%s: Currrent zoom :%d",__func__, mCurrentZoom);
+ }
+
+ ALOGV("%s: X", __func__);
+}
+
+void QCameraHardwareInterface::zoomEvent(cam_ctrl_status_t *status, app_notify_cb_t *app_cb)
+{
+ ALOGV("zoomEvent: state:%d E",mPreviewState);
+ switch (mPreviewState) {
+ case QCAMERA_HAL_PREVIEW_STOPPED:
+ break;
+ case QCAMERA_HAL_PREVIEW_START:
+ break;
+ case QCAMERA_HAL_PREVIEW_STARTED:
+ handleZoomEventForPreview(app_cb);
+ if (isZSLMode())
+ handleZoomEventForSnapshot();
+ break;
+ case QCAMERA_HAL_RECORDING_STARTED:
+ handleZoomEventForPreview(app_cb);
+ if (mFullLiveshotEnabled)
+ handleZoomEventForSnapshot();
+ break;
+ case QCAMERA_HAL_TAKE_PICTURE:
+ if(isZSLMode())
+ handleZoomEventForPreview(app_cb);
+ handleZoomEventForSnapshot();
+ break;
+ default:
+ break;
+ }
+ ALOGV("zoomEvent: X");
+}
+
+void QCameraHardwareInterface::dumpFrameToFile(const void * data, uint32_t size, char* name, char* ext, int index)
+{
+#if 0
+ char buf[32], value[PROPERTY_VALUE_MAX];
+ int file_fd, enabled = 0;
+ static int i = 0 ;
+ property_get("persist.camera.dumpimage", value, "0");
+ enabled = atoi(value);
+
+ if ( data != NULL && enabled) {
+ char * str;
+ snprintf(buf, sizeof(buf), "/data/%s_%d.%s", name, index, ext);
+ ALOGE("%s size =%d", buf, size);
+ file_fd = open(buf, O_RDWR | O_CREAT, 0777);
+ write(file_fd, data, size);
+ close(file_fd);
+ i++;
+ }
+#endif
+}
+
+void QCameraHardwareInterface::dumpFrameToFile(struct msm_frame* newFrame,
+ HAL_cam_dump_frm_type_t frm_type)
+{
+#if 0
+ int32_t enabled = 0;
+ int frm_num;
+ uint32_t skip_mode;
+ char value[PROPERTY_VALUE_MAX];
+ char buf[32];
+ int main_422 = 1;
+ property_get("persist.camera.dumpimg", value, "0");
+ enabled = atoi(value);
+
+ ALOGV(" newFrame =%p, frm_type = %d", newFrame, frm_type);
+ if(enabled & HAL_DUMP_FRM_MASK_ALL) {
+ if((enabled & frm_type) && newFrame) {
+ frm_num = ((enabled & 0xffff0000) >> 16);
+ if(frm_num == 0) frm_num = 10; /*default 10 frames*/
+ if(frm_num > 256) frm_num = 256; /*256 buffers cycle around*/
+ skip_mode = ((enabled & 0x0000ff00) >> 8);
+ if(skip_mode == 0) skip_mode = 1; /*no -skip */
+
+ if( mDumpSkipCnt % skip_mode == 0) {
+ if (mDumpFrmCnt >= 0 && mDumpFrmCnt <= frm_num) {
+ int w, h;
+ int file_fd;
+ switch (frm_type) {
+ case HAL_DUMP_FRM_PREVIEW:
+ w = mDimension.display_width;
+ h = mDimension.display_height;
+ snprintf(buf, sizeof(buf), "/data/%dp_%dx%d.yuv", mDumpFrmCnt, w, h);
+ file_fd = open(buf, O_RDWR | O_CREAT, 0777);
+ break;
+ case HAL_DUMP_FRM_VIDEO:
+ w = mVideoWidth;
+ h = mVideoHeight;
+ snprintf(buf, sizeof(buf),"/data/%dv_%dx%d.yuv", mDumpFrmCnt, w, h);
+ file_fd = open(buf, O_RDWR | O_CREAT, 0777);
+ break;
+ case HAL_DUMP_FRM_MAIN:
+ w = mDimension.picture_width;
+ h = mDimension.picture_height;
+ snprintf(buf, sizeof(buf), "/data/%dm_%dx%d.yuv", mDumpFrmCnt, w, h);
+ file_fd = open(buf, O_RDWR | O_CREAT, 0777);
+ if (mDimension.main_img_format == CAMERA_YUV_422_NV16 ||
+ mDimension.main_img_format == CAMERA_YUV_422_NV61)
+ main_422 = 2;
+ break;
+ case HAL_DUMP_FRM_THUMBNAIL:
+ w = mDimension.ui_thumbnail_width;
+ h = mDimension.ui_thumbnail_height;
+ snprintf(buf, sizeof(buf),"/data/%dt_%dx%d.yuv", mDumpFrmCnt, w, h);
+ file_fd = open(buf, O_RDWR | O_CREAT, 0777);
+ break;
+ default:
+ w = h = 0;
+ file_fd = -1;
+ break;
+ }
+
+ if (file_fd < 0) {
+ ALOGE("%s: cannot open file:type=%d\n", __func__, frm_type);
+ } else {
+ ALOGV("%s: %d %d", __func__, newFrame->y_off, newFrame->cbcr_off);
+ write(file_fd, (const void *)(newFrame->buffer+newFrame->y_off), w * h);
+ write(file_fd, (const void *)
+ (newFrame->buffer + newFrame->cbcr_off), w * h / 2 * main_422);
+ close(file_fd);
+ ALOGV("dump %s", buf);
+ }
+ } else if(frm_num == 256){
+ mDumpFrmCnt = 0;
+ }
+ mDumpFrmCnt++;
+ }
+ mDumpSkipCnt++;
+ }
+ } else {
+ mDumpFrmCnt = 0;
+ }
+#endif
+}
+
+status_t QCameraHardwareInterface::setPreviewWindow(preview_stream_ops_t* window)
+{
+ status_t retVal = NO_ERROR;
+ ALOGV(" %s: E mPreviewState = %d, mStreamDisplay = %p", __FUNCTION__, mPreviewState, mStreamDisplay);
+ if( window == NULL) {
+ ALOGE("%s:Received Setting NULL preview window", __func__);
+ }
+ Mutex::Autolock lock(mLock);
+ switch(mPreviewState) {
+ case QCAMERA_HAL_PREVIEW_START:
+ mPreviewWindow = window;
+ if(mPreviewWindow) {
+ /* we have valid surface now, start preview */
+ retVal = startPreview2();
+ if(retVal == NO_ERROR)
+ mPreviewState = QCAMERA_HAL_PREVIEW_STARTED;
+ ALOGV("%s: startPreview2 done, mPreviewState = %d", __func__, mPreviewState);
+ } else
+ ALOGE("%s: null window received, mPreviewState = %d", __func__, mPreviewState);
+ break;
+ case QCAMERA_HAL_PREVIEW_STARTED:
+ /* new window comes */
+ ALOGE("%s: bug, cannot handle new window in started state", __func__);
+ //retVal = UNKNOWN_ERROR;
+ break;
+ case QCAMERA_HAL_PREVIEW_STOPPED:
+ case QCAMERA_HAL_TAKE_PICTURE:
+ mPreviewWindow = window;
+ ALOGE("%s: mPreviewWindow = 0x%p, mStreamDisplay = 0x%p",
+ __func__, mPreviewWindow, mStreamDisplay);
+ if(mStreamDisplay)
+ retVal = mStreamDisplay->setPreviewWindow(window);
+ break;
+ default:
+ ALOGE("%s: bug, cannot handle new window in state %d", __func__, mPreviewState);
+ retVal = UNKNOWN_ERROR;
+ break;
+ }
+ ALOGV(" %s : X, mPreviewState = %d", __FUNCTION__, mPreviewState);
+ return retVal;
+}
+
+int QCameraHardwareInterface::storeMetaDataInBuffers(int enable)
+{
+ /* this is a dummy func now. fix me later */
+ mStoreMetaDataInFrame = enable;
+ return 0;
+}
+
+status_t QCameraHardwareInterface::sendMappingBuf(int ext_mode, int idx, int fd,
+ uint32_t size, int cameraid,
+ mm_camera_socket_msg_type msg_type)
+{
+ cam_sock_packet_t packet;
+ memset(&packet, 0, sizeof(cam_sock_packet_t));
+ packet.msg_type = msg_type;
+ packet.payload.frame_fd_map.ext_mode = ext_mode;
+ packet.payload.frame_fd_map.frame_idx = idx;
+ packet.payload.frame_fd_map.fd = fd;
+ packet.payload.frame_fd_map.size = size;
+
+ if ( cam_ops_sendmsg(cameraid, &packet, sizeof(cam_sock_packet_t), packet.payload.frame_fd_map.fd) <= 0 ) {
+ ALOGE("%s: sending frame mapping buf msg Failed", __func__);
+ return FAILED_TRANSACTION;
+ }
+ return NO_ERROR;
+}
+
+status_t QCameraHardwareInterface::sendUnMappingBuf(int ext_mode, int idx, int cameraid,
+ mm_camera_socket_msg_type msg_type)
+{
+ cam_sock_packet_t packet;
+ memset(&packet, 0, sizeof(cam_sock_packet_t));
+ packet.msg_type = msg_type;
+ packet.payload.frame_fd_unmap.ext_mode = ext_mode;
+ packet.payload.frame_fd_unmap.frame_idx = idx;
+ if ( cam_ops_sendmsg(cameraid, &packet, sizeof(cam_sock_packet_t), 0) <= 0 ) {
+ ALOGE("%s: sending frame unmapping buf msg Failed", __func__);
+ return FAILED_TRANSACTION;
+ }
+ return NO_ERROR;
+}
+
+int QCameraHardwareInterface::allocate_ion_memory(QCameraHalHeap_t *p_camera_memory, int cnt, int ion_type)
+{
+ int rc = 0;
+ struct ion_handle_data handle_data;
+
+ p_camera_memory->main_ion_fd[cnt] = open("/dev/ion", O_RDONLY);
+ if (p_camera_memory->main_ion_fd[cnt] < 0) {
+ ALOGE("Ion dev open failed\n");
+ ALOGE("Error is %s\n", strerror(errno));
+ goto ION_OPEN_FAILED;
+ }
+ p_camera_memory->alloc[cnt].len = p_camera_memory->size;
+ /* to make it page size aligned */
+ p_camera_memory->alloc[cnt].len = (p_camera_memory->alloc[cnt].len + 4095) & (~4095);
+ p_camera_memory->alloc[cnt].align = 4096;
+ p_camera_memory->alloc[cnt].flags = ION_FLAG_CACHED;
+ p_camera_memory->alloc[cnt].heap_mask = ion_type;
+
+ rc = ioctl(p_camera_memory->main_ion_fd[cnt], ION_IOC_ALLOC, &p_camera_memory->alloc[cnt]);
+ if (rc < 0) {
+ ALOGE("ION allocation failed\n");
+ goto ION_ALLOC_FAILED;
+ }
+
+ p_camera_memory->ion_info_fd[cnt].handle = p_camera_memory->alloc[cnt].handle;
+ rc = ioctl(p_camera_memory->main_ion_fd[cnt], ION_IOC_SHARE, &p_camera_memory->ion_info_fd[cnt]);
+ if (rc < 0) {
+ ALOGE("ION map failed %s\n", strerror(errno));
+ goto ION_MAP_FAILED;
+ }
+ p_camera_memory->fd[cnt] = p_camera_memory->ion_info_fd[cnt].fd;
+ return 0;
+
+ION_MAP_FAILED:
+ handle_data.handle = p_camera_memory->ion_info_fd[cnt].handle;
+ ioctl(p_camera_memory->main_ion_fd[cnt], ION_IOC_FREE, &handle_data);
+ION_ALLOC_FAILED:
+ close(p_camera_memory->main_ion_fd[cnt]);
+ p_camera_memory->main_ion_fd[cnt] = -1;
+ION_OPEN_FAILED:
+ return -1;
+}
+
+int QCameraHardwareInterface::deallocate_ion_memory(QCameraHalHeap_t *p_camera_memory, int cnt)
+{
+ struct ion_handle_data handle_data;
+ int rc = 0;
+
+ if (p_camera_memory->main_ion_fd[cnt] > 0) {
+ handle_data.handle = p_camera_memory->ion_info_fd[cnt].handle;
+ ioctl(p_camera_memory->main_ion_fd[cnt], ION_IOC_FREE, &handle_data);
+ close(p_camera_memory->main_ion_fd[cnt]);
+ p_camera_memory->main_ion_fd[cnt] = -1;
+ }
+ return rc;
+}
+
+int QCameraHardwareInterface::allocate_ion_memory(QCameraStatHeap_t *p_camera_memory, int cnt, int ion_type)
+{
+ int rc = 0;
+ struct ion_handle_data handle_data;
+
+ p_camera_memory->main_ion_fd[cnt] = open("/dev/ion", O_RDONLY);
+ if (p_camera_memory->main_ion_fd[cnt] < 0) {
+ ALOGE("Ion dev open failed\n");
+ ALOGE("Error is %s\n", strerror(errno));
+ goto ION_OPEN_FAILED;
+ }
+ p_camera_memory->alloc[cnt].len = p_camera_memory->size;
+ /* to make it page size aligned */
+ p_camera_memory->alloc[cnt].len = (p_camera_memory->alloc[cnt].len + 4095) & (~4095);
+ p_camera_memory->alloc[cnt].align = 4096;
+ p_camera_memory->alloc[cnt].flags = ION_FLAG_CACHED;
+ p_camera_memory->alloc[cnt].heap_mask = (0x1 << ion_type | 0x1 << ION_IOMMU_HEAP_ID);
+
+ rc = ioctl(p_camera_memory->main_ion_fd[cnt], ION_IOC_ALLOC, &p_camera_memory->alloc[cnt]);
+ if (rc < 0) {
+ ALOGE("ION allocation failed\n");
+ goto ION_ALLOC_FAILED;
+ }
+
+ p_camera_memory->ion_info_fd[cnt].handle = p_camera_memory->alloc[cnt].handle;
+ rc = ioctl(p_camera_memory->main_ion_fd[cnt], ION_IOC_SHARE, &p_camera_memory->ion_info_fd[cnt]);
+ if (rc < 0) {
+ ALOGE("ION map failed %s\n", strerror(errno));
+ goto ION_MAP_FAILED;
+ }
+ p_camera_memory->fd[cnt] = p_camera_memory->ion_info_fd[cnt].fd;
+ return 0;
+
+ION_MAP_FAILED:
+ handle_data.handle = p_camera_memory->ion_info_fd[cnt].handle;
+ ioctl(p_camera_memory->main_ion_fd[cnt], ION_IOC_FREE, &handle_data);
+ION_ALLOC_FAILED:
+ close(p_camera_memory->main_ion_fd[cnt]);
+ p_camera_memory->main_ion_fd[cnt] = -1;
+ION_OPEN_FAILED:
+ return -1;
+}
+
+int QCameraHardwareInterface::cache_ops(int ion_fd,
+ struct ion_flush_data *cache_data, int type)
+{
+ int rc = 0;
+ struct ion_custom_data data;
+ data.cmd = type;
+ data.arg = (unsigned long)cache_data;
+
+ rc = ioctl(ion_fd, ION_IOC_CUSTOM, &data);
+ if (rc < 0)
+ ALOGE("%s: Cache Invalidate failed\n", __func__);
+ else
+ ALOGV("%s: Cache OPs type(%d) success", __func__);
+
+ return rc;
+}
+
+int QCameraHardwareInterface::deallocate_ion_memory(QCameraStatHeap_t *p_camera_memory, int cnt)
+{
+ struct ion_handle_data handle_data;
+ int rc = 0;
+
+ if (p_camera_memory->main_ion_fd[cnt] > 0) {
+ handle_data.handle = p_camera_memory->ion_info_fd[cnt].handle;
+ ioctl(p_camera_memory->main_ion_fd[cnt], ION_IOC_FREE, &handle_data);
+ close(p_camera_memory->main_ion_fd[cnt]);
+ p_camera_memory->main_ion_fd[cnt] = -1;
+ }
+ return rc;
+}
+
+int QCameraHardwareInterface::initHeapMem( QCameraHalHeap_t *heap,
+ int num_of_buf,
+ int buf_len,
+ int y_off,
+ int cbcr_off,
+ int pmem_type,
+ mm_cameara_stream_buf_t *StreamBuf,
+ mm_camera_buf_def_t *buf_def,
+ uint8_t num_planes,
+ uint32_t *planes
+)
+{
+ int rc = 0;
+ int i;
+ int path;
+ struct msm_frame *frame;
+ ALOGV("Init Heap =%p. stream_buf =%p, pmem_type =%d, num_of_buf=%d. buf_len=%d, cbcr_off=%d",
+ heap, StreamBuf, pmem_type, num_of_buf, buf_len, cbcr_off);
+ if(num_of_buf > MM_CAMERA_MAX_NUM_FRAMES || heap == NULL ||
+ mGetMemory == NULL ) {
+ ALOGE("Init Heap error");
+ rc = -1;
+ return rc;
+ }
+ memset(heap, 0, sizeof(QCameraHalHeap_t));
+ for (i=0; i<MM_CAMERA_MAX_NUM_FRAMES;i++) {
+ heap->main_ion_fd[i] = -1;
+ heap->fd[i] = -1;
+ }
+ heap->buffer_count = num_of_buf;
+ heap->size = buf_len;
+ heap->y_offset = y_off;
+ heap->cbcr_offset = cbcr_off;
+
+ if (StreamBuf != NULL) {
+ StreamBuf->num = num_of_buf;
+ StreamBuf->frame_len = buf_len;
+ switch (pmem_type) {
+ case MSM_PMEM_MAINIMG:
+ case MSM_PMEM_RAW_MAINIMG:
+ path = OUTPUT_TYPE_S;
+ break;
+
+ case MSM_PMEM_THUMBNAIL:
+ path = OUTPUT_TYPE_T;
+ break;
+
+ default:
+ rc = -1;
+ return rc;
+ }
+ }
+
+
+ for(i = 0; i < num_of_buf; i++) {
+#ifdef USE_ION
+ if (isZSLMode())
+ rc = allocate_ion_memory(heap, i, ((0x1 << CAMERA_ZSL_ION_HEAP_ID) |
+ (0x1 << CAMERA_ZSL_ION_FALLBACK_HEAP_ID)));
+ else
+ rc = allocate_ion_memory(heap, i, ((0x1 << CAMERA_ION_HEAP_ID) |
+ (0x1 << CAMERA_ION_FALLBACK_HEAP_ID)));
+
+ if (rc < 0) {
+ ALOGE("%s: ION allocation failed\n", __func__);
+ break;
+ }
+#else
+ if (pmem_type == MSM_PMEM_MAX)
+ heap->fd[i] = -1;
+ else {
+ heap->fd[i] = open("/dev/pmem_adsp", O_RDWR|O_SYNC);
+ if ( heap->fd[i] <= 0) {
+ rc = -1;
+ ALOGE("Open fail: heap->fd[%d] =%d", i, heap->fd[i]);
+ break;
+ }
+ }
+#endif
+ heap->camera_memory[i] = mGetMemory( heap->fd[i], buf_len, 1, (void *)this);
+
+ if (heap->camera_memory[i] == NULL ) {
+ ALOGE("Getmem fail %d: ", i);
+ rc = -1;
+ break;
+ }
+ if (StreamBuf != NULL) {
+ frame = &(StreamBuf->frame[i]);
+ memset(frame, 0, sizeof(struct msm_frame));
+ frame->fd = heap->fd[i];
+ frame->phy_offset = 0;
+ frame->buffer = (uint32_t) heap->camera_memory[i]->data;
+ frame->path = path;
+ frame->cbcr_off = planes[0]+heap->cbcr_offset;
+ frame->y_off = heap->y_offset;
+ frame->fd_data = heap->ion_info_fd[i];
+ frame->ion_alloc = heap->alloc[i];
+ frame->ion_dev_fd = heap->main_ion_fd[i];
+ ALOGV("%s: Buffer idx: %d addr: %x fd: %d phy_offset: %d"
+ "cbcr_off: %d y_off: %d frame_len: %d", __func__,
+ i, (unsigned int)frame->buffer, frame->fd,
+ frame->phy_offset, cbcr_off, y_off, frame->ion_alloc.len);
+
+ buf_def->buf.mp[i].frame = *frame;
+ buf_def->buf.mp[i].frame_offset = 0;
+ buf_def->buf.mp[i].num_planes = num_planes;
+ /* Plane 0 needs to be set seperately. Set other planes
+ * in a loop. */
+ buf_def->buf.mp[i].planes[0].length = planes[0];
+ buf_def->buf.mp[i].planes[0].m.userptr = frame->fd;
+ buf_def->buf.mp[i].planes[0].data_offset = y_off;
+ buf_def->buf.mp[i].planes[0].reserved[0] =
+ buf_def->buf.mp[i].frame_offset;
+ for (int j = 1; j < num_planes; j++) {
+ buf_def->buf.mp[i].planes[j].length = planes[j];
+ buf_def->buf.mp[i].planes[j].m.userptr = frame->fd;
+ buf_def->buf.mp[i].planes[j].data_offset = cbcr_off;
+ buf_def->buf.mp[i].planes[j].reserved[0] =
+ buf_def->buf.mp[i].planes[j-1].reserved[0] +
+ buf_def->buf.mp[i].planes[j-1].length;
+ }
+ } else {
+ }
+
+ ALOGV("heap->fd[%d] =%d, camera_memory=%p", i, heap->fd[i], heap->camera_memory[i]);
+ heap->local_flag[i] = 1;
+ }
+ if( rc < 0) {
+ releaseHeapMem(heap);
+ }
+ return rc;
+}
+
+int QCameraHardwareInterface::releaseHeapMem( QCameraHalHeap_t *heap)
+{
+ int rc = 0;
+ ALOGV("Release %p", heap);
+ if (heap != NULL) {
+
+ for (int i = 0; i < heap->buffer_count; i++) {
+ if(heap->camera_memory[i] != NULL) {
+ heap->camera_memory[i]->release( heap->camera_memory[i] );
+ heap->camera_memory[i] = NULL;
+ } else if (heap->fd[i] <= 0) {
+ ALOGE("impossible: amera_memory[%d] = %p, fd = %d",
+ i, heap->camera_memory[i], heap->fd[i]);
+ }
+
+ if(heap->fd[i] > 0) {
+ close(heap->fd[i]);
+ heap->fd[i] = -1;
+ }
+#ifdef USE_ION
+ deallocate_ion_memory(heap, i);
+#endif
+ }
+ heap->buffer_count = 0;
+ heap->size = 0;
+ heap->y_offset = 0;
+ heap->cbcr_offset = 0;
+ }
+ return rc;
+}
+
+preview_format_info_t QCameraHardwareInterface::getPreviewFormatInfo( )
+{
+ return mPreviewFormatInfo;
+}
+
+void QCameraHardwareInterface::wdenoiseEvent(cam_ctrl_status_t status, void *cookie)
+{
+ ALOGV("wdnEvent: preview state:%d E",mPreviewState);
+ if (mStreamSnap != NULL) {
+ ALOGV("notifyWDNEvent to snapshot stream");
+ mStreamSnap->notifyWDenoiseEvent(status, cookie);
+ }
+}
+
+bool QCameraHardwareInterface::isWDenoiseEnabled()
+{
+ return mDenoiseValue;
+}
+
+void QCameraHardwareInterface::takePicturePrepareHardware()
+{
+ ALOGV("%s: E", __func__);
+
+ /* Prepare snapshot*/
+ cam_ops_action(mCameraId,
+ true,
+ MM_CAMERA_OPS_PREPARE_SNAPSHOT,
+ this);
+ ALOGV("%s: X", __func__);
+}
+
+bool QCameraHardwareInterface::isNoDisplayMode()
+{
+ return (mNoDisplayMode != 0);
+}
+
+void QCameraHardwareInterface::pausePreviewForZSL()
+{
+ ALOGV("%s: mRestartPreview %d", __func__, mRestartPreview);
+ if(mRestartPreview) {
+ ALOGE("%s: Restarting Preview",__func__);
+ stopPreviewInternal();
+ mPreviewState = QCAMERA_HAL_PREVIEW_STOPPED;
+ startPreview2();
+ mPreviewState = QCAMERA_HAL_PREVIEW_STARTED;
+ }
+}
+
+void QCameraHardwareInterface::pausePreviewForVideo()
+{
+ status_t ret = NO_ERROR;
+ bool restart = false;
+ cam_ctrl_dimension_t dim;
+
+ /* get existing preview information, by qury mm_camera*/
+ memset(&dim, 0, sizeof(cam_ctrl_dimension_t));
+ ret = cam_config_get_parm(mCameraId, MM_CAMERA_PARM_DIMENSION,&dim);
+
+ if (MM_CAMERA_OK != ret) {
+ ALOGE("%s: X error- can't get preview dimension!", __func__);
+ return;
+ }
+
+ if (mRestartPreview) {
+ mRestartPreview = false;
+ ALOGV("%s: Restarting Preview. Video Size changed",__func__);
+ restart |= false;
+ }
+ if (mRecordingHint == false) {
+ ALOGV("%s: Restarting Preview. Hint not set",__func__);
+ restart |= true;
+ }
+
+ if(dim.video_width != mVideoWidth || dim.video_height != mVideoHeight){
+ ALOGV("%s: Restarting Preview. New Video Size set",__func__);
+ restart |= true;
+ }
+
+ //VFE output1 shouldn't be greater than VFE output2.
+ if( (mPreviewWidth > mVideoWidth) || (mPreviewHeight > mVideoHeight)) {
+ //Set preview sizes as record sizes.
+ ALOGV("Preview size %dx%d is greater than record size %dx%d,\
+ resetting preview size to record size",mPreviewWidth,
+ mPreviewHeight, mVideoWidth, mVideoHeight);
+ mPreviewWidth = mVideoWidth;
+ mPreviewHeight = mVideoHeight;
+ mParameters.setPreviewSize(mPreviewWidth, mPreviewHeight);
+ restart |= true;
+ }
+ if (restart) {
+ stopPreviewInternal();
+ mPreviewState = QCAMERA_HAL_PREVIEW_STOPPED;
+
+ // Set recording hint to true
+ mRecordingHint = true;
+ setRecordingHintValue(mRecordingHint);
+
+ mDimension.display_width = mPreviewWidth;
+ mDimension.display_height= mPreviewHeight;
+ mDimension.orig_video_width = mVideoWidth;
+ mDimension.orig_video_height = mVideoHeight;
+ mDimension.video_width = mVideoWidth;
+ mDimension.video_height = mVideoHeight;
+
+ // start preview again
+ mPreviewState = QCAMERA_HAL_PREVIEW_START;
+ if (startPreview2() == NO_ERROR){
+ mPreviewState = QCAMERA_HAL_PREVIEW_STARTED;
+ }else{
+ ALOGE("%s: Restart for Video Failed",__func__);
+ }
+ }
+}
+
+/**/
+bool QCameraHardwareInterface::getHdrInfoAndSetExp( int max_num_frm, int *num_frame, int *exp)
+{
+ bool rc = false;
+
+ if (mHdrMode == HDR_MODE && num_frame != NULL && exp != NULL &&
+ mRecordingHint != true &&
+ mPreviewState != QCAMERA_HAL_RECORDING_STARTED ) {
+ int ret = 0;
+ *num_frame = 1;
+ exp_bracketing_t temp;
+ memset(&temp, 0, sizeof(exp_bracketing_t));
+ ret = cam_config_get_parm(mCameraId, MM_CAMERA_PARM_HDR, (void *)&temp );
+ if (ret == NO_ERROR && max_num_frm > 0) {
+ /*set as AE Bracketing mode*/
+ temp.hdr_enable = false;
+ temp.mode = HDR_MODE;
+ temp.total_hal_frames = temp.total_frames;
+ ret = native_set_parms(MM_CAMERA_PARM_HDR,
+ sizeof(exp_bracketing_t), (void *)&temp);
+ if (ret) {
+ char *val, *exp_value, *prev_value;
+ int i;
+ exp_value = (char *) temp.values;
+ i = 0;
+ val = strtok_r(exp_value,",", &prev_value);
+ while (val != NULL ){
+ exp[i++] = atoi(val);
+ if(i >= max_num_frm )
+ break;
+ val = strtok_r(NULL, ",", &prev_value);
+ }
+ *num_frame =temp.total_frames;
+ rc = true;
+ }
+ } else {
+ temp.total_frames = 1;
+ }
+ /* Application waits until this many snapshots before restarting preview */
+ mParameters.set("num-snaps-per-shutter", 2);
+ }
+ return rc;
+}
+
+void QCameraHardwareInterface::hdrEvent(cam_ctrl_status_t status, void *cookie)
+{
+ QCameraStream * snapStream = (QCameraStream *)cookie;
+ ALOGV("HdrEvent: preview state: E");
+ if (snapStream != NULL && mStreamSnap != NULL) {
+ ALOGV("HdrEvent to snapshot stream");
+ snapStream->notifyHdrEvent(status, cookie);
+ }
+}
+
+}; // namespace android
diff --git a/camera/QCameraHWI.h b/camera/QCameraHWI.h
new file mode 100644
index 0000000..3d47bf9
--- /dev/null
+++ b/camera/QCameraHWI.h
@@ -0,0 +1,854 @@
+/*
+** Copyright (c) 2011-2012 The Linux Foundation. All rights reserved.
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+** http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+#ifndef ANDROID_HARDWARE_QCAMERA_HARDWARE_INTERFACE_H
+#define ANDROID_HARDWARE_QCAMERA_HARDWARE_INTERFACE_H
+
+
+#include <utils/threads.h>
+//#include <camera/CameraHardwareInterface.h>
+#include <hardware/camera.h>
+#include <binder/MemoryBase.h>
+#include <binder/MemoryHeapBase.h>
+//#include <binder/MemoryHeapPmem.h>
+#include <utils/threads.h>
+#include <cutils/properties.h>
+#include <camera/Camera.h>
+#include "QCameraParameters.h"
+#include <system/window.h>
+#include <system/camera.h>
+#include <hardware/camera.h>
+#include <gralloc_priv.h>
+#include <QComOMXMetadata.h>
+
+extern "C" {
+#include <linux/android_pmem.h>
+#include <linux/msm_ion.h>
+#include <mm_camera_interface2.h>
+#include "mm_omx_jpeg_encoder.h"
+} //extern C
+
+#include "QCameraHWI_Mem.h"
+#include "QCameraStream.h"
+#include "QCamera_Intf.h"
+
+#include "hdr/include/morpho_noise_reduction_ext.h"
+//Error codes
+#define NOT_FOUND -1
+#define MAX_ZOOM_RATIOS 62
+
+#ifdef Q12
+#undef Q12
+#endif
+
+#define Q12 4096
+#define QCAMERA_PARM_ENABLE 1
+#define QCAMERA_PARM_DISABLE 0
+#define PREVIEW_TBL_MAX_SIZE 14
+#define VIDEO_TBL_MAX_SIZE 14
+#define THUMB_TBL_MAX_SIZE 16
+#define HFR_TBL_MAX_SIZE 2
+
+struct str_map {
+ const char *const desc;
+ int val;
+};
+
+struct preview_format_info_t {
+ int Hal_format;
+ cam_format_t mm_cam_format;
+ cam_pad_format_t padding;
+ int num_planar;
+};
+
+typedef enum {
+ CAMERA_STATE_UNINITED,
+ CAMERA_STATE_READY,
+ CAMERA_STATE_PREVIEW_START_CMD_SENT,
+ CAMERA_STATE_PREVIEW_STOP_CMD_SENT,
+ CAMERA_STATE_PREVIEW,
+ CAMERA_STATE_RECORD_START_CMD_SENT, /*5*/
+ CAMERA_STATE_RECORD_STOP_CMD_SENT,
+ CAMERA_STATE_RECORD,
+ CAMERA_STATE_SNAP_START_CMD_SENT,
+ CAMERA_STATE_SNAP_STOP_CMD_SENT,
+ CAMERA_STATE_SNAP_CMD_ACKED, /*10 - snapshot comd acked, snapshot not done yet*/
+ CAMERA_STATE_ZSL_START_CMD_SENT,
+ CAMERA_STATE_ZSL,
+ CAMERA_STATE_AF_START_CMD_SENT,
+ CAMERA_STATE_AF_STOP_CMD_SENT,
+ CAMERA_STATE_ERROR, /*15*/
+
+ /*Add any new state above*/
+ CAMERA_STATE_MAX
+} HAL_camera_state_type_t;
+
+enum {
+ BUFFER_NOT_OWNED,
+ BUFFER_UNLOCKED,
+ BUFFER_LOCKED,
+};
+
+typedef enum {
+ HAL_DUMP_FRM_PREVIEW = 1,
+ HAL_DUMP_FRM_VIDEO = 1<<1,
+ HAL_DUMP_FRM_MAIN = 1<<2,
+ HAL_DUMP_FRM_THUMBNAIL = 1<<3,
+
+ /*8 bits mask*/
+ HAL_DUMP_FRM_MAX = 1 << 8
+} HAL_cam_dump_frm_type_t;
+
+
+typedef enum {
+ HAL_CAM_MODE_ZSL = 1,
+
+ /*add new entry before and update the max entry*/
+ HAL_CAM_MODE_MAX = HAL_CAM_MODE_ZSL << 1,
+} qQamera_mode_t;
+
+#define HAL_DUMP_FRM_MASK_ALL ( HAL_DUMP_FRM_PREVIEW + HAL_DUMP_FRM_VIDEO + \
+ HAL_DUMP_FRM_MAIN + HAL_DUMP_FRM_THUMBNAIL)
+#define QCAMERA_HAL_PREVIEW_STOPPED 0
+#define QCAMERA_HAL_PREVIEW_START 1
+#define QCAMERA_HAL_PREVIEW_STARTED 2
+#define QCAMERA_HAL_RECORDING_STARTED 3
+#define QCAMERA_HAL_TAKE_PICTURE 4
+
+
+typedef struct {
+ int buffer_count;
+ buffer_handle_t *buffer_handle[MM_CAMERA_MAX_NUM_FRAMES];
+ struct private_handle_t *private_buffer_handle[MM_CAMERA_MAX_NUM_FRAMES];
+ int stride[MM_CAMERA_MAX_NUM_FRAMES];
+ uint32_t addr_offset[MM_CAMERA_MAX_NUM_FRAMES];
+ uint8_t local_flag[MM_CAMERA_MAX_NUM_FRAMES];
+ camera_memory_t *camera_memory[MM_CAMERA_MAX_NUM_FRAMES];
+ int main_ion_fd[MM_CAMERA_MAX_NUM_FRAMES];
+ struct ion_fd_data ion_info_fd[MM_CAMERA_MAX_NUM_FRAMES];
+} QCameraHalMemory_t;
+
+
+typedef struct {
+ int buffer_count;
+ uint32_t size;
+ uint32_t y_offset;
+ uint32_t cbcr_offset;
+ int fd[MM_CAMERA_MAX_NUM_FRAMES];
+ int local_flag[MM_CAMERA_MAX_NUM_FRAMES];
+ camera_memory_t* camera_memory[MM_CAMERA_MAX_NUM_FRAMES];
+ camera_memory_t* metadata_memory[MM_CAMERA_MAX_NUM_FRAMES];
+ int main_ion_fd[MM_CAMERA_MAX_NUM_FRAMES];
+ struct ion_allocation_data alloc[MM_CAMERA_MAX_NUM_FRAMES];
+ struct ion_fd_data ion_info_fd[MM_CAMERA_MAX_NUM_FRAMES];
+} QCameraHalHeap_t;
+
+typedef struct {
+ camera_memory_t* camera_memory[3];
+ int main_ion_fd[3];
+ struct ion_allocation_data alloc[3];
+ struct ion_fd_data ion_info_fd[3];
+ int fd[3];
+ int size;
+} QCameraStatHeap_t;
+
+typedef struct {
+ int32_t msg_type;
+ int32_t ext1;
+ int32_t ext2;
+ void *cookie;
+} argm_notify_t;
+
+typedef struct {
+ int32_t msg_type;
+ camera_memory_t *data;
+ unsigned int index;
+ camera_frame_metadata_t *metadata;
+ void *cookie;
+} argm_data_cb_t;
+
+typedef struct {
+ camera_notify_callback notifyCb;
+ camera_data_callback dataCb;
+ argm_notify_t argm_notify;
+ argm_data_cb_t argm_data_cb;
+} app_notify_cb_t;
+
+/* camera_area_t
+ * rectangle with weight to store the focus and metering areas.
+ * x1, y1, x2, y2: from -1000 to 1000
+ * weight: 0 to 1000
+ */
+typedef struct {
+ int x1, y1, x2, y2;
+ int weight;
+} camera_area_t;
+
+//EXIF globals
+static const char ExifAsciiPrefix[] = { 0x41, 0x53, 0x43, 0x49, 0x49, 0x0, 0x0, 0x0 }; // "ASCII\0\0\0"
+static const char ExifUndefinedPrefix[] = { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 }; // "\0\0\0\0\0\0\0\0"
+
+//EXIF detfines
+#define MAX_EXIF_TABLE_ENTRIES 20
+#define GPS_PROCESSING_METHOD_SIZE 101
+#define FOCAL_LENGTH_DECIMAL_PRECISION 100
+#define EXIF_ASCII_PREFIX_SIZE 8 //(sizeof(ExifAsciiPrefix))
+
+typedef struct{
+ //GPS tags
+ rat_t latitude[3];
+ rat_t longitude[3];
+ char lonRef[2];
+ char latRef[2];
+ rat_t altitude;
+ rat_t gpsTimeStamp[3];
+ char gpsDateStamp[20];
+ char gpsProcessingMethod[EXIF_ASCII_PREFIX_SIZE+GPS_PROCESSING_METHOD_SIZE];
+ //Other tags
+ char dateTime[20];
+ rat_t focalLength;
+ uint16_t flashMode;
+ uint16_t isoSpeed;
+ rat_t exposure_time;
+
+ bool mAltitude;
+ bool mLongitude;
+ bool mLatitude;
+ bool mTimeStamp;
+ bool mGpsProcess;
+
+ int mAltitude_ref;
+ long mGPSTimestamp;
+
+} exif_values_t;
+
+namespace android {
+
+class QCameraStream;
+
+class QCameraHardwareInterface : public virtual RefBase {
+public:
+
+ QCameraHardwareInterface(int cameraId, int mode);
+
+ /** Set the ANativeWindow to which preview frames are sent */
+ int setPreviewWindow(preview_stream_ops_t* window);
+
+ /** Set the notification and data callbacks */
+ void setCallbacks(camera_notify_callback notify_cb,
+ camera_data_callback data_cb,
+ camera_data_timestamp_callback data_cb_timestamp,
+ camera_request_memory get_memory,
+ void *user);
+
+ /**
+ * The following three functions all take a msg_type, which is a bitmask of
+ * the messages defined in include/ui/Camera.h
+ */
+
+ /**
+ * Enable a message, or set of messages.
+ */
+ void enableMsgType(int32_t msg_type);
+
+ /**
+ * Disable a message, or a set of messages.
+ *
+ * Once received a call to disableMsgType(CAMERA_MSG_VIDEO_FRAME), camera
+ * HAL should not rely on its client to call releaseRecordingFrame() to
+ * release video recording frames sent out by the cameral HAL before and
+ * after the disableMsgType(CAMERA_MSG_VIDEO_FRAME) call. Camera HAL
+ * clients must not modify/access any video recording frame after calling
+ * disableMsgType(CAMERA_MSG_VIDEO_FRAME).
+ */
+ void disableMsgType(int32_t msg_type);
+
+ /**
+ * Query whether a message, or a set of messages, is enabled. Note that
+ * this is operates as an AND, if any of the messages queried are off, this
+ * will return false.
+ */
+ int msgTypeEnabled(int32_t msg_type);
+
+ /**
+ * Start preview mode.
+ */
+ int startPreview();
+ int startPreview2();
+
+ /**
+ * Stop a previously started preview.
+ */
+ void stopPreview();
+
+ /**
+ * Returns true if preview is enabled.
+ */
+ int previewEnabled();
+
+
+ /**
+ * Request the camera HAL to store meta data or real YUV data in the video
+ * buffers sent out via CAMERA_MSG_VIDEO_FRAME for a recording session. If
+ * it is not called, the default camera HAL behavior is to store real YUV
+ * data in the video buffers.
+ *
+ * This method should be called before startRecording() in order to be
+ * effective.
+ *
+ * If meta data is stored in the video buffers, it is up to the receiver of
+ * the video buffers to interpret the contents and to find the actual frame
+ * data with the help of the meta data in the buffer. How this is done is
+ * outside of the scope of this method.
+ *
+ * Some camera HALs may not support storing meta data in the video buffers,
+ * but all camera HALs should support storing real YUV data in the video
+ * buffers. If the camera HAL does not support storing the meta data in the
+ * video buffers when it is requested to do do, INVALID_OPERATION must be
+ * returned. It is very useful for the camera HAL to pass meta data rather
+ * than the actual frame data directly to the video encoder, since the
+ * amount of the uncompressed frame data can be very large if video size is
+ * large.
+ *
+ * @param enable if true to instruct the camera HAL to store
+ * meta data in the video buffers; false to instruct
+ * the camera HAL to store real YUV data in the video
+ * buffers.
+ *
+ * @return OK on success.
+ */
+ int storeMetaDataInBuffers(int enable);
+
+ /**
+ * Start record mode. When a record image is available, a
+ * CAMERA_MSG_VIDEO_FRAME message is sent with the corresponding
+ * frame. Every record frame must be released by a camera HAL client via
+ * releaseRecordingFrame() before the client calls
+ * disableMsgType(CAMERA_MSG_VIDEO_FRAME). After the client calls
+ * disableMsgType(CAMERA_MSG_VIDEO_FRAME), it is the camera HAL's
+ * responsibility to manage the life-cycle of the video recording frames,
+ * and the client must not modify/access any video recording frames.
+ */
+ int startRecording();
+
+ /**
+ * Stop a previously started recording.
+ */
+ void stopRecording();
+
+ /**
+ * Returns true if recording is enabled.
+ */
+ int recordingEnabled();
+
+ /**
+ * Release a record frame previously returned by CAMERA_MSG_VIDEO_FRAME.
+ *
+ * It is camera HAL client's responsibility to release video recording
+ * frames sent out by the camera HAL before the camera HAL receives a call
+ * to disableMsgType(CAMERA_MSG_VIDEO_FRAME). After it receives the call to
+ * disableMsgType(CAMERA_MSG_VIDEO_FRAME), it is the camera HAL's
+ * responsibility to manage the life-cycle of the video recording frames.
+ */
+ void releaseRecordingFrame(const void *opaque);
+
+ /**
+ * Start auto focus, the notification callback routine is called with
+ * CAMERA_MSG_FOCUS once when focusing is complete. autoFocus() will be
+ * called again if another auto focus is needed.
+ */
+ int autoFocus();
+
+ /**
+ * Cancels auto-focus function. If the auto-focus is still in progress,
+ * this function will cancel it. Whether the auto-focus is in progress or
+ * not, this function will return the focus position to the default. If
+ * the camera does not support auto-focus, this is a no-op.
+ */
+ int cancelAutoFocus();
+
+ /**
+ * Take a picture.
+ */
+ int takePicture();
+
+ /**
+ * Cancel a picture that was started with takePicture. Calling this method
+ * when no picture is being taken is a no-op.
+ */
+ int cancelPicture();
+
+ /**
+ * Set the camera parameters. This returns BAD_VALUE if any parameter is
+ * invalid or not supported.
+ */
+ int setParameters(const char *parms);
+
+ //status_t setParameters(const QCameraParameters& params);
+ /** Retrieve the camera parameters. The buffer returned by the camera HAL
+ must be returned back to it with put_parameters, if put_parameters
+ is not NULL.
+ */
+ int getParameters(char **parms);
+
+ /** The camera HAL uses its own memory to pass us the parameters when we
+ call get_parameters. Use this function to return the memory back to
+ the camera HAL, if put_parameters is not NULL. If put_parameters
+ is NULL, then you have to use free() to release the memory.
+ */
+ void putParameters(char *);
+
+ /**
+ * Send command to camera driver.
+ */
+ int sendCommand(int32_t cmd, int32_t arg1, int32_t arg2);
+
+ /**
+ * Release the hardware resources owned by this object. Note that this is
+ * *not* done in the destructor.
+ */
+ void release();
+
+ /**
+ * Dump state of the camera hardware
+ */
+ int dump(int fd);
+
+ //virtual sp<IMemoryHeap> getPreviewHeap() const;
+ //virtual sp<IMemoryHeap> getRawHeap() const;
+
+
+ status_t takeLiveSnapshot();
+ status_t takeFullSizeLiveshot();
+ bool canTakeFullSizeLiveshot();
+
+ //virtual status_t getBufferInfo( sp<IMemory>& Frame,
+ //size_t *alignedSize);
+ void getPictureSize(int *picture_width, int *picture_height) const;
+ void getPreviewSize(int *preview_width, int *preview_height) const;
+ cam_format_t getPreviewFormat() const;
+
+ cam_pad_format_t getPreviewPadding() const;
+
+ //bool useOverlay(void);
+ //virtual status_t setOverlay(const sp<Overlay> &overlay);
+ void processEvent(mm_camera_event_t *);
+ int getJpegQuality() const;
+ int getNumOfSnapshots(void) const;
+ int getNumOfSnapshots(const QCameraParameters& params);
+ int getThumbSizesFromAspectRatio(uint32_t aspect_ratio,
+ int *picture_width,
+ int *picture_height);
+ bool isRawSnapshot();
+ bool mShutterSoundPlayed;
+ void dumpFrameToFile(struct msm_frame*, HAL_cam_dump_frm_type_t);
+
+ static QCameraHardwareInterface *createInstance(int, int);
+ status_t setZSLBurstLookBack(const QCameraParameters& params);
+ status_t setZSLBurstInterval(const QCameraParameters& params);
+ int getZSLBurstInterval(void);
+ int getZSLQueueDepth(void) const;
+ int getZSLBackLookCount(void) const;
+
+ ~QCameraHardwareInterface();
+ int initHeapMem(QCameraHalHeap_t *heap, int num_of_buf, int pmem_type,
+ int frame_len, int cbcr_off, int y_off, mm_cameara_stream_buf_t *StreamBuf,
+ mm_camera_buf_def_t *buf_def, uint8_t num_planes, uint32_t *planes);
+
+ int releaseHeapMem( QCameraHalHeap_t *heap);
+ status_t sendMappingBuf(int ext_mode, int idx, int fd, uint32_t size,
+ int cameraid, mm_camera_socket_msg_type msg_type);
+ status_t sendUnMappingBuf(int ext_mode, int idx, int cameraid,
+ mm_camera_socket_msg_type msg_type);
+
+ int allocate_ion_memory(QCameraHalHeap_t *p_camera_memory, int cnt,
+ int ion_type);
+ int deallocate_ion_memory(QCameraHalHeap_t *p_camera_memory, int cnt);
+
+ int allocate_ion_memory(QCameraStatHeap_t *p_camera_memory, int cnt,
+ int ion_type);
+ int deallocate_ion_memory(QCameraStatHeap_t *p_camera_memory, int cnt);
+
+ int cache_ops(int ion_fd, struct ion_flush_data *cache_inv_data, int type);
+
+ void dumpFrameToFile(const void * data, uint32_t size, char* name,
+ char* ext, int index);
+ preview_format_info_t getPreviewFormatInfo( );
+ bool isCameraReady();
+ bool isNoDisplayMode();
+
+private:
+ int16_t zoomRatios[MAX_ZOOM_RATIOS];
+ struct camera_size_type default_preview_sizes[PREVIEW_TBL_MAX_SIZE];
+ struct camera_size_type default_video_sizes[VIDEO_TBL_MAX_SIZE];
+ struct camera_size_type default_hfr_sizes[HFR_TBL_MAX_SIZE];
+ struct camera_size_type default_thumbnail_sizes[THUMB_TBL_MAX_SIZE];
+ unsigned int preview_sizes_count;
+ unsigned int video_sizes_count;
+ unsigned int thumbnail_sizes_count;
+ unsigned int hfr_sizes_count;
+
+
+ bool mUseOverlay;
+
+ void loadTables();
+ void initDefaultParameters();
+ bool getMaxPictureDimension(mm_camera_dimension_t *dim);
+
+ status_t updateFocusDistances();
+
+ bool native_set_parms(mm_camera_parm_type_t type, uint16_t length, void *value);
+ bool native_set_parms( mm_camera_parm_type_t type, uint16_t length, void *value, int *result);
+
+ void hasAutoFocusSupport();
+ void debugShowPreviewFPS() const;
+ //void prepareSnapshotAndWait();
+
+ bool isPreviewRunning();
+ bool isRecordingRunning();
+ bool isSnapshotRunning();
+
+ void processChannelEvent(mm_camera_ch_event_t *, app_notify_cb_t *);
+ void processPreviewChannelEvent(mm_camera_ch_event_type_t channelEvent, app_notify_cb_t *);
+ void processRecordChannelEvent(mm_camera_ch_event_type_t channelEvent, app_notify_cb_t *);
+ void processSnapshotChannelEvent(mm_camera_ch_event_type_t channelEvent, app_notify_cb_t *);
+ void processCtrlEvent(mm_camera_ctrl_event_t *, app_notify_cb_t *);
+ void processStatsEvent(mm_camera_stats_event_t *, app_notify_cb_t *);
+ void processInfoEvent(mm_camera_info_event_t *event, app_notify_cb_t *);
+ void processprepareSnapshotEvent(cam_ctrl_status_t *);
+ void roiEvent(fd_roi_t roi, app_notify_cb_t *);
+ void zoomEvent(cam_ctrl_status_t *status, app_notify_cb_t *);
+ void autofocusevent(cam_ctrl_status_t *status, app_notify_cb_t *);
+ void handleZoomEventForPreview(app_notify_cb_t *);
+ void handleZoomEventForSnapshot(void);
+ status_t autoFocusEvent(cam_ctrl_status_t *, app_notify_cb_t *);
+ status_t autoFocusMoveEvent(cam_ctrl_status_t *, app_notify_cb_t *);
+
+ void filterPictureSizes();
+ bool supportsSceneDetection();
+ bool supportsSelectableZoneAf();
+ bool supportsFaceDetection();
+ bool supportsRedEyeReduction();
+ bool preview_parm_config (cam_ctrl_dimension_t* dim,QCameraParameters& parm);
+
+ void stopPreviewInternal();
+ void stopRecordingInternal();
+ //void stopPreviewZSL();
+ status_t cancelPictureInternal();
+ //status_t startPreviewZSL();
+ void pausePreviewForSnapshot();
+ void pausePreviewForZSL();
+ void pausePreviewForVideo();
+ void prepareVideoPicture(bool disable);
+ status_t resumePreviewAfterSnapshot();
+
+ status_t runFaceDetection();
+
+ status_t setParameters(const QCameraParameters& params);
+ QCameraParameters& getParameters() ;
+
+ bool getFlashCondition(void);
+
+ status_t setCameraMode(const QCameraParameters& params);
+ status_t setPictureSizeTable(void);
+ status_t setPreviewSizeTable(void);
+ status_t setVideoSizeTable(void);
+ status_t setPreviewSize(const QCameraParameters& params);
+ status_t setJpegThumbnailSize(const QCameraParameters& params);
+ status_t setPreviewFpsRange(const QCameraParameters& params);
+ status_t setPreviewFrameRate(const QCameraParameters& params);
+ status_t setPreviewFrameRateMode(const QCameraParameters& params);
+ status_t setVideoSize(const QCameraParameters& params);
+ status_t setPictureSize(const QCameraParameters& params);
+ status_t setJpegQuality(const QCameraParameters& params);
+ status_t setNumOfSnapshot(const QCameraParameters& params);
+ status_t setJpegRotation(int isZSL);
+ int getJpegRotation(void);
+ int getISOSpeedValue();
+ status_t setAntibanding(const QCameraParameters& params);
+ status_t setEffect(const QCameraParameters& params);
+ status_t setExposureCompensation(const QCameraParameters ¶ms);
+ status_t setAutoExposure(const QCameraParameters& params);
+ status_t setWhiteBalance(const QCameraParameters& params);
+ status_t setFlash(const QCameraParameters& params);
+ status_t setGpsLocation(const QCameraParameters& params);
+ status_t setRotation(const QCameraParameters& params);
+ status_t setZoom(const QCameraParameters& params);
+ status_t setFocusMode(const QCameraParameters& params);
+ status_t setBrightness(const QCameraParameters& params);
+ status_t setSkinToneEnhancement(const QCameraParameters& params);
+ status_t setOrientation(const QCameraParameters& params);
+ status_t setLensshadeValue(const QCameraParameters& params);
+ status_t setMCEValue(const QCameraParameters& params);
+ status_t setISOValue(const QCameraParameters& params);
+ status_t setPictureFormat(const QCameraParameters& params);
+ status_t setSharpness(const QCameraParameters& params);
+ status_t setContrast(const QCameraParameters& params);
+ status_t setSaturation(const QCameraParameters& params);
+ status_t setWaveletDenoise(const QCameraParameters& params);
+ status_t setSceneMode(const QCameraParameters& params);
+ status_t setContinuousAf(const QCameraParameters& params);
+ status_t setFaceDetection(const char *str);
+ status_t setSceneDetect(const QCameraParameters& params);
+ status_t setStrTextures(const QCameraParameters& params);
+ status_t setPreviewFormat(const QCameraParameters& params);
+ status_t setSelectableZoneAf(const QCameraParameters& params);
+ status_t setOverlayFormats(const QCameraParameters& params);
+ status_t setHighFrameRate(const QCameraParameters& params);
+ status_t setRedeyeReduction(const QCameraParameters& params);
+ status_t setAEBracket(const QCameraParameters& params);
+ status_t setFaceDetect(const QCameraParameters& params);
+ status_t setDenoise(const QCameraParameters& params);
+ status_t setAecAwbLock(const QCameraParameters & params);
+ status_t setHistogram(int histogram_en);
+ status_t setRecordingHint(const QCameraParameters& params);
+ status_t setRecordingHintValue(const int32_t value);
+ status_t setFocusAreas(const QCameraParameters& params);
+ status_t setMeteringAreas(const QCameraParameters& params);
+ status_t setFullLiveshot(void);
+ status_t setDISMode(void);
+ status_t setCaptureBurstExp(void);
+ status_t setPowerMode(const QCameraParameters& params);
+ void takePicturePrepareHardware( );
+ status_t setNoDisplayMode(const QCameraParameters& params);
+ status_t setCAFLockCancel(void);
+
+ isp3a_af_mode_t getAutoFocusMode(const QCameraParameters& params);
+ bool isValidDimension(int w, int h);
+
+ String8 create_values_str(const str_map *values, int len);
+
+ void setMyMode(int mode);
+ bool isZSLMode();
+ bool isWDenoiseEnabled();
+ void wdenoiseEvent(cam_ctrl_status_t status, void *cookie);
+ bool isLowPowerCamcorder();
+ void freePictureTable(void);
+ void freeVideoSizeTable(void);
+
+ int32_t createPreview();
+ int32_t createRecord();
+ int32_t createSnapshot();
+
+ int getHDRMode();
+ //EXIF
+ void addExifTag(exif_tag_id_t tagid, exif_tag_type_t type,
+ uint32_t count, uint8_t copy, void *data);
+ void setExifTags();
+ void initExifData();
+ void deinitExifData();
+ void setExifTagsGPS();
+ exif_tags_info_t* getExifData(){ return mExifData; }
+ int getExifTableNumEntries() { return mExifTableNumEntries; }
+ void parseGPSCoordinate(const char *latlonString, rat_t* coord);
+ bool getHdrInfoAndSetExp( int max_num_frm, int *num_frame, int *exp);
+ void hdrEvent(cam_ctrl_status_t status, void *cookie);
+
+ int mCameraId;
+ camera_mode_t myMode;
+ bool mPauseFramedispatch;
+
+ QCameraParameters mParameters;
+ //sp<Overlay> mOverlay;
+ int32_t mMsgEnabled;
+
+ camera_notify_callback mNotifyCb;
+ camera_data_callback mDataCb;
+ camera_data_timestamp_callback mDataCbTimestamp;
+ camera_request_memory mGetMemory;
+ void *mCallbackCookie;
+
+ sp<AshmemPool> mMetaDataHeap;
+
+ mutable Mutex mLock;
+ //mutable Mutex eventLock;
+ Mutex mCallbackLock;
+ Mutex mPreviewMemoryLock;
+ Mutex mRecordingMemoryLock;
+ Mutex mAutofocusLock;
+ Mutex mMetaDataWaitLock;
+ Mutex mRecordFrameLock;
+ Mutex mRecordLock;
+ Condition mRecordWait;
+ pthread_mutex_t mAsyncCmdMutex;
+ pthread_cond_t mAsyncCmdWait;
+
+ QCameraStream *mStreamDisplay;
+ QCameraStream *mStreamRecord;
+ QCameraStream *mStreamSnap;
+ QCameraStream *mStreamLiveSnap;
+
+ cam_ctrl_dimension_t mDimension;
+ int mPreviewWidth, mPreviewHeight;
+ int mVideoWidth, mVideoHeight;
+ int thumbnailWidth, thumbnailHeight;
+ int maxSnapshotWidth, maxSnapshotHeight;
+ int mPreviewFormat;
+ int mFps;
+ int mDebugFps;
+ int mBrightness;
+ int mContrast;
+ int mBestShotMode;
+ int mEffects;
+ int mSkinToneEnhancement;
+ int mDenoiseValue;
+ int mHJR;
+ int mRotation;
+ int mJpegQuality;
+ int mThumbnailQuality;
+ int mTargetSmoothZoom;
+ int mSmoothZoomStep;
+ int mMaxZoom;
+ int mCurrentZoom;
+ int mSupportedPictureSizesCount;
+ int mFaceDetectOn;
+ int mDumpFrmCnt;
+ int mDumpSkipCnt;
+ int mFocusMode;
+
+ unsigned int mPictureSizeCount;
+ unsigned int mPreviewSizeCount;
+ int mPowerMode;
+ unsigned int mVideoSizeCount;
+
+ bool mAutoFocusRunning;
+ bool mNeedToUnlockCaf;
+ bool mMultiTouch;
+ bool mHasAutoFocusSupport;
+ bool mInitialized;
+ bool mDisEnabled;
+ bool strTexturesOn;
+ bool mIs3DModeOn;
+ bool mSmoothZoomRunning;
+ bool mPreparingSnapshot;
+ bool mParamStringInitialized;
+ bool mZoomSupported;
+ bool mSendMetaData;
+ bool mFullLiveshotEnabled;
+ bool mRecordingHint;
+ bool mAppRecordingHint;
+ bool mStartRecording;
+ bool mReleasedRecordingFrame;
+ bool mStateLiveshot;
+ int mHdrMode;
+ int mSnapshotFormat;
+ int mZslInterval;
+ bool mRestartPreview;
+ bool isCameraOpen;
+
+ led_mode_t mLedStatusForZsl;
+ bool mFlashCond;
+
+/*for histogram*/
+ int mStatsOn;
+ int mCurrentHisto;
+ bool mSendData;
+ sp<AshmemPool> mStatHeap;
+ camera_memory_t *mStatsMapped[3];
+ QCameraStatHeap_t mHistServer;
+ int32_t mStatSize;
+
+ bool mZslLookBackMode;
+ int mZslLookBackValue;
+ int mHFRLevel;
+ bool mZslEmptyQueueFlag;
+ String8 mEffectValues;
+ String8 mIsoValues;
+ String8 mSceneModeValues;
+ String8 mSceneDetectValues;
+ String8 mFocusModeValues;
+ String8 mSelectableZoneAfValues;
+ String8 mAutoExposureValues;
+ String8 mWhitebalanceValues;
+ String8 mAntibandingValues;
+ String8 mFrameRateModeValues;
+ String8 mTouchAfAecValues;
+ String8 mPreviewSizeValues;
+ String8 mPictureSizeValues;
+ String8 mVideoSizeValues;
+ String8 mFlashValues;
+ String8 mLensShadeValues;
+ String8 mMceValues;
+ String8 mHistogramValues;
+ String8 mSkinToneEnhancementValues;
+ String8 mPictureFormatValues;
+ String8 mDenoiseValues;
+ String8 mZoomRatioValues;
+ String8 mPreviewFrameRateValues;
+ String8 mPreviewFormatValues;
+ String8 mFaceDetectionValues;
+ String8 mHfrValues;
+ String8 mHfrSizeValues;
+ String8 mRedeyeReductionValues;
+ String8 denoise_value;
+ String8 mFpsRangesSupportedValues;
+ String8 mZslValues;
+ String8 mFocusDistance;
+
+ friend class QCameraStream;
+ friend class QCameraStream_record;
+ friend class QCameraStream_preview;
+ friend class QCameraStream_Snapshot;
+
+ camera_size_type* mPictureSizes;
+ camera_size_type* mPreviewSizes;
+ camera_size_type* mVideoSizes;
+ const camera_size_type * mPictureSizesPtr;
+ HAL_camera_state_type_t mCameraState;
+ void *libdnr;
+ int (*LINK_morpho_DNR_ProcessFrame)(unsigned char* yuvImage, int width, int height, int y_level, int c_level);
+
+ /* Temporary - can be removed after Honeycomb*/
+#ifdef USE_ION
+ sp<IonPool> mPostPreviewHeap;
+#else
+ sp<PmemPool> mPostPreviewHeap;
+#endif
+ mm_cameara_stream_buf_t mPrevForPostviewBuf;
+ int mStoreMetaDataInFrame;
+ preview_stream_ops_t *mPreviewWindow;
+ Mutex mStateLock;
+ int mPreviewState;
+ /*preview memory with display case: memory is allocated and freed via
+ gralloc */
+ QCameraHalMemory_t mPreviewMemory;
+
+ /*preview memory without display case: memory is allocated
+ directly by camera */
+ QCameraHalHeap_t mNoDispPreviewMemory;
+
+ QCameraHalHeap_t mSnapshotMemory;
+ QCameraHalHeap_t mThumbnailMemory;
+ QCameraHalHeap_t mRecordingMemory;
+ QCameraHalHeap_t mJpegMemory;
+ QCameraHalHeap_t mRawMemory;
+ camera_frame_metadata_t mMetadata;
+ camera_face_t mFace[MAX_ROI];
+ preview_format_info_t mPreviewFormatInfo;
+ friend void liveshot_callback(mm_camera_ch_data_buf_t *frame,void *user_data);
+
+ //EXIF
+ exif_tags_info_t mExifData[MAX_EXIF_TABLE_ENTRIES]; //Exif tags for JPEG encoder
+ exif_values_t mExifValues; //Exif values in usable format
+ int mExifTableNumEntries; //NUmber of entries in mExifData
+ int mNoDisplayMode;
+ int mIsoValue;
+
+};
+
+}; // namespace android
+
+#endif
diff --git a/camera/QCameraHWI_Display.cpp b/camera/QCameraHWI_Display.cpp
new file mode 100644
index 0000000..27ebfef
--- /dev/null
+++ b/camera/QCameraHWI_Display.cpp
@@ -0,0 +1,76 @@
+/*
+** Copyright (c) 2011 The Linux Foundation. All rights reserved.
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+** http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+/*#error uncomment this for compiler test!*/
+
+#define ALOG_NDEBUG 0
+#define ALOG_NIDEBUG 0
+#define LOG_TAG "QCameraHWI_Display"
+#include <utils/Log.h>
+#include <utils/threads.h>
+#include <fcntl.h>
+#include <sys/mman.h>
+
+#include "QCameraHAL.h"
+#include "QCameraHWI.h"
+#include "QCameraHWI_Display.h"
+
+
+namespace android {
+
+int QCameraDisplay_Overlay::Display_prepare_buffers()
+{
+ return 0;
+}
+
+
+int QCameraDisplay_Overlay::Display_set_crop()
+{
+ return 0;
+}
+
+
+int QCameraDisplay_Overlay::Display_set_geometry()
+{
+ return 0;
+}
+
+
+void QCameraDisplay_Overlay::Display_enqueue()
+{
+ return ;
+}
+
+
+
+void QCameraDisplay_Overlay::Display_dequeue()
+{
+ return ;
+}
+
+
+void QCameraDisplay_Overlay::Display_release_buffers()
+{
+ return ;
+}
+
+QCameraDisplay::~QCameraDisplay(){}
+
+QCameraDisplay_Overlay::~QCameraDisplay_Overlay()
+{
+ return ;
+}
+}; // namespace android
diff --git a/camera/QCameraHWI_Display.h b/camera/QCameraHWI_Display.h
new file mode 100644
index 0000000..cdf1dab
--- /dev/null
+++ b/camera/QCameraHWI_Display.h
@@ -0,0 +1,69 @@
+/*
+** Copyright (c) 2011, The Linux Foundation. All rights reserved.
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+** http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+#ifndef ANDROID_HARDWARE_QCAMERAHWI_DISPLAY_H
+#define ANDROID_HARDWARE_QCAMERAHWI_DISPLAY_H
+
+
+#include <utils/threads.h>
+
+#include <binder/MemoryBase.h>
+#include <binder/MemoryHeapBase.h>
+#include <utils/threads.h>
+#include "QCamera_Intf.h"
+extern "C" {
+#include <mm_camera_interface2.h>
+}
+
+namespace android {
+
+/*===============================
+ Base Display Class
+================================*/
+
+class QCameraDisplay {
+
+public:
+ virtual int Display_prepare_buffers() = 0;
+ virtual int Display_set_crop( ) = 0;
+ virtual int Display_set_geometry( ) =0;
+ virtual void Display_enqueue( ) = 0;
+ virtual void Display_dequeue( ) = 0;
+ virtual void Display_release_buffers( ) =0;
+ virtual ~QCameraDisplay( );
+};
+
+/*================================
+ Overlay Derivative
+==================================*/
+class QCameraDisplay_Overlay: public QCameraDisplay {
+
+public:
+ int Display_prepare_buffers();
+ int Display_set_crop( );
+ int Display_set_geometry( );
+ void Display_enqueue( );
+ void Display_dequeue( );
+ void Display_release_buffers( );
+ virtual ~QCameraDisplay_Overlay( );
+
+
+};
+
+
+}; // namespace android
+
+#endif
diff --git a/camera/QCameraHWI_Mem.cpp b/camera/QCameraHWI_Mem.cpp
new file mode 100644
index 0000000..e8e469b
--- /dev/null
+++ b/camera/QCameraHWI_Mem.cpp
@@ -0,0 +1,405 @@
+/*
+** Copyright (c) 2011-2012 The Linux Foundation. All rights reserved.
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+** http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+/*#error uncomment this for compiler test!*/
+
+//#define ALOG_NDEBUG 0
+#define ALOG_NIDEBUG 0
+#define LOG_TAG "QCameraHWI_Mem"
+#include <utils/Log.h>
+
+#include <utils/Errors.h>
+#include <utils/threads.h>
+//#include <binder/MemoryHeapPmem.h>
+#include <utils/String16.h>
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <unistd.h>
+#include <fcntl.h>
+#include <cutils/properties.h>
+#include <math.h>
+#if HAVE_ANDROID_OS
+#include <linux/android_pmem.h>
+#endif
+#include <linux/ioctl.h>
+#include "QCameraParameters.h"
+#include <media/mediarecorder.h>
+#include <gralloc_priv.h>
+
+#include "QCameraHWI_Mem.h"
+
+#define CAMERA_HAL_UNUSED(expr) do { (void)(expr); } while (0)
+
+/* QCameraHardwareInterface class implementation goes here*/
+/* following code implement the contol logic of this class*/
+
+namespace android {
+
+
+static bool register_buf(int size,
+ int frame_size,
+ int cbcr_offset,
+ int yoffset,
+ int pmempreviewfd,
+ uint32_t offset,
+ uint8_t *buf,
+ int pmem_type,
+ bool vfe_can_write,
+ bool register_buffer = true);
+
+#if 0
+MMCameraDL::MMCameraDL(){
+ ALOGV("MMCameraDL: E");
+ libmmcamera = NULL;
+#if DLOPEN_LIBMMCAMERA
+ libmmcamera = ::dlopen("liboemcamera.so", RTLD_NOW);
+#endif
+ ALOGV("Open MM camera DL libeomcamera loaded at %p ", libmmcamera);
+ ALOGV("MMCameraDL: X");
+}
+
+void * MMCameraDL::pointer(){
+ return libmmcamera;
+}
+
+MMCameraDL::~MMCameraDL(){
+ ALOGV("~MMCameraDL: E");
+ LINK_mm_camera_destroy();
+ if (libmmcamera != NULL) {
+ ::dlclose(libmmcamera);
+ ALOGV("closed MM Camera DL ");
+ }
+ libmmcamera = NULL;
+ ALOGV("~MMCameraDL: X");
+}
+
+
+wp<MMCameraDL> MMCameraDL::instance;
+Mutex MMCameraDL::singletonLock;
+
+
+sp<MMCameraDL> MMCameraDL::getInstance(){
+ Mutex::Autolock instanceLock(singletonLock);
+ sp<MMCameraDL> mmCamera = instance.promote();
+ if(mmCamera == NULL){
+ mmCamera = new MMCameraDL();
+ instance = mmCamera;
+ }
+ return mmCamera;
+}
+#endif
+
+MemPool::MemPool(int buffer_size, int num_buffers,
+ int frame_size,
+ const char *name) :
+ mBufferSize(buffer_size),
+ mNumBuffers(num_buffers),
+ mFrameSize(frame_size),
+ mBuffers(NULL), mName(name)
+{
+ int page_size_minus_1 = getpagesize() - 1;
+ mAlignedBufferSize = (buffer_size + page_size_minus_1) & (~page_size_minus_1);
+}
+
+void MemPool::completeInitialization()
+{
+ // If we do not know how big the frame will be, we wait to allocate
+ // the buffers describing the individual frames until we do know their
+ // size.
+
+ if (mFrameSize > 0) {
+ mBuffers = new sp<MemoryBase>[mNumBuffers];
+ for (int i = 0; i < mNumBuffers; i++) {
+ mBuffers[i] = new
+ MemoryBase(mHeap,
+ i * mAlignedBufferSize,
+ mFrameSize);
+ }
+ }
+}
+
+AshmemPool::AshmemPool(int buffer_size, int num_buffers,
+ int frame_size,
+ const char *name) :
+ MemPool(buffer_size,
+ num_buffers,
+ frame_size,
+ name)
+{
+ ALOGV("constructing MemPool %s backed by ashmem: "
+ "%d frames @ %d uint8_ts, "
+ "buffer size %d",
+ mName,
+ num_buffers, frame_size, buffer_size);
+
+ int page_mask = getpagesize() - 1;
+ int ashmem_size = buffer_size * num_buffers;
+ ashmem_size += page_mask;
+ ashmem_size &= ~page_mask;
+
+ mHeap = new MemoryHeapBase(ashmem_size);
+
+ completeInitialization();
+}
+
+static bool register_buf(int size,
+ int frame_size,
+ int cbcr_offset,
+ int yoffset,
+ int pmempreviewfd,
+ uint32_t offset,
+ uint8_t *buf,
+ int pmem_type,
+ bool vfe_can_write,
+ bool register_buffer)
+{
+ /*TODO*/
+ /*
+ struct msm_pmem_info pmemBuf;
+ CAMERA_HAL_UNUSED(frame_size);
+
+ pmemBuf.type = pmem_type;
+ pmemBuf.fd = pmempreviewfd;
+ pmemBuf.offset = offset;
+ pmemBuf.len = size;
+ pmemBuf.vaddr = buf;
+ pmemBuf.y_off = yoffset;
+ pmemBuf.cbcr_off = cbcr_offset;
+
+ pmemBuf.active = vfe_can_write;
+
+ ALOGV("register_buf: reg = %d buffer = %p",
+ !register_buffer, buf);
+ if(native_start_ops(register_buffer ? CAMERA_OPS_REGISTER_BUFFER :
+ CAMERA_OPS_UNREGISTER_BUFFER ,(void *)&pmemBuf) < 0) {
+ ALOGE("register_buf: MSM_CAM_IOCTL_(UN)REGISTER_PMEM error %s",
+ strerror(errno));
+ return false;
+ }*/
+
+ return true;
+
+}
+
+#if 0
+bool register_record_buffers(bool register_buffer) {
+ ALOGI("%s: (%d) E", __FUNCTION__, register_buffer);
+ struct msm_pmem_info pmemBuf;
+
+ for (int cnt = 0; cnt < VIDEO_BUFFER_COUNT; ++cnt) {
+ pmemBuf.type = MSM_PMEM_VIDEO;
+ pmemBuf.fd = mRecordHeap->mHeap->getHeapID();
+ pmemBuf.offset = mRecordHeap->mAlignedBufferSize * cnt;
+ pmemBuf.len = mRecordHeap->mBufferSize;
+ pmemBuf.vaddr = (uint8_t *)mRecordHeap->mHeap->base() + mRecordHeap->mAlignedBufferSize * cnt;
+ pmemBuf.y_off = 0;
+ pmemBuf.cbcr_off = recordframes[0].cbcr_off;
+ if(register_buffer == true) {
+ pmemBuf.active = (cnt<ACTIVE_VIDEO_BUFFERS);
+ if( (mVpeEnabled) && (cnt == kRecordBufferCount-1)) {
+ pmemBuf.type = MSM_PMEM_VIDEO_VPE;
+ pmemBuf.active = 1;
+ }
+ } else {
+ pmemBuf.active = false;
+ }
+
+ ALOGV("register_buf: reg = %d buffer = %p", !register_buffer,
+ (void *)pmemBuf.vaddr);
+ if(native_start_ops(register_buffer ? CAMERA_OPS_REGISTER_BUFFER :
+ CAMERA_OPS_UNREGISTER_BUFFER ,(void *)&pmemBuf) < 0) {
+ ALOGE("register_buf: MSM_CAM_IOCTL_(UN)REGISTER_PMEM error %s",
+ strerror(errno));
+ return false;
+ }
+ }
+ return true;
+}
+#endif
+#if 0
+PmemPool::PmemPool(const char *pmem_pool,
+ int flags,
+ int pmem_type,
+ int buffer_size, int num_buffers,
+ int frame_size, int cbcr_offset,
+ int yOffset, const char *name) :
+ MemPool(buffer_size,num_buffers,frame_size,name),
+ mPmemType(pmem_type),
+ mCbCrOffset(cbcr_offset),
+ myOffset(yOffset)
+{
+ ALOGI("constructing MemPool %s backed by pmem pool %s: "
+ "%d frames @ %d bytes, buffer size %d",
+ mName,
+ pmem_pool, num_buffers, frame_size,
+ buffer_size);
+
+ //mMMCameraDLRef = MMCameraDL::getInstance();
+
+
+ // Make a new mmap'ed heap that can be shared across processes.
+ // mAlignedBufferSize is already in 4k aligned. (do we need total size necessary to be in power of 2??)
+ mAlignedSize = mAlignedBufferSize * num_buffers;
+
+ sp<MemoryHeapBase> masterHeap =
+ new MemoryHeapBase(pmem_pool, mAlignedSize, flags);
+
+ if (masterHeap->getHeapID() < 0) {
+ ALOGE("failed to construct master heap for pmem pool %s", pmem_pool);
+ masterHeap.clear();
+ return;
+ }
+
+ sp<MemoryHeapPmem> pmemHeap = new MemoryHeapPmem(masterHeap, flags);
+ if (pmemHeap->getHeapID() >= 0) {
+ pmemHeap->slap();
+ masterHeap.clear();
+ mHeap = pmemHeap;
+ pmemHeap.clear();
+
+ mFd = mHeap->getHeapID();
+ if (::ioctl(mFd, PMEM_GET_SIZE, &mSize)) {
+ ALOGE("pmem pool %s ioctl(PMEM_GET_SIZE) error %s (%d)",
+ pmem_pool,
+ ::strerror(errno), errno);
+ mHeap.clear();
+ return;
+ }
+
+ ALOGE("pmem pool %s ioctl(fd = %d, PMEM_GET_SIZE) is %ld",
+ pmem_pool,
+ mFd,
+ mSize.len);
+ ALOGE("mBufferSize=%d, mAlignedBufferSize=%d\n", mBufferSize, mAlignedBufferSize);
+
+#if 0
+ // Unregister preview buffers with the camera drivers. Allow the VFE to write
+ // to all preview buffers except for the last one.
+ // Only Register the preview, snapshot and thumbnail buffers with the kernel.
+ if( (strcmp("postview", mName) != 0) ){
+ int num_buf = num_buffers;
+ if(!strcmp("preview", mName)) num_buf = kPreviewBufferCount;
+ ALOGD("num_buffers = %d", num_buf);
+ for (int cnt = 0; cnt < num_buf; ++cnt) {
+ int active = 1;
+ if(pmem_type == MSM_PMEM_VIDEO){
+ active = (cnt<ACTIVE_VIDEO_BUFFERS);
+ //When VPE is enabled, set the last record
+ //buffer as active and pmem type as PMEM_VIDEO_VPE
+ //as this is a requirement from VPE operation.
+ //No need to set this pmem type to VIDEO_VPE while unregistering,
+ //because as per camera stack design: "the VPE AXI is also configured
+ //when VFE is configured for VIDEO, which is as part of preview
+ //initialization/start. So during this VPE AXI config camera stack
+ //will lookup the PMEM_VIDEO_VPE buffer and give it as o/p of VPE and
+ //change it's type to PMEM_VIDEO".
+ if( (mVpeEnabled) && (cnt == kRecordBufferCount-1)) {
+ active = 1;
+ pmem_type = MSM_PMEM_VIDEO_VPE;
+ }
+ ALOGV(" pmempool creating video buffers : active %d ", active);
+ }
+ else if (pmem_type == MSM_PMEM_PREVIEW){
+ active = (cnt < ACTIVE_PREVIEW_BUFFERS);
+ }
+ else if ((pmem_type == MSM_PMEM_MAINIMG)
+ || (pmem_type == MSM_PMEM_THUMBNAIL)){
+ active = (cnt < ACTIVE_ZSL_BUFFERS);
+ }
+ register_buf(mBufferSize,
+ mFrameSize, mCbCrOffset, myOffset,
+ mHeap->getHeapID(),
+ mAlignedBufferSize * cnt,
+ (uint8_t *)mHeap->base() + mAlignedBufferSize * cnt,
+ pmem_type,
+ active);
+ }
+ }
+#endif
+ completeInitialization();
+ }
+ else ALOGE("pmem pool %s error: could not create master heap!",
+ pmem_pool);
+ ALOGI("%s: (%s) X ", __FUNCTION__, mName);
+}
+#endif
+
+PmemPool::~PmemPool()
+{
+ ALOGV("%s: %s E", __FUNCTION__, mName);
+#if 0
+ if (mHeap != NULL) {
+ // Unregister preview buffers with the camera drivers.
+ // Only Unregister the preview, snapshot and thumbnail
+ // buffers with the kernel.
+ if( (strcmp("postview", mName) != 0) ){
+ int num_buffers = mNumBuffers;
+ if(!strcmp("preview", mName)) num_buffers = PREVIEW_BUFFER_COUNT;
+ for (int cnt = 0; cnt < num_buffers; ++cnt) {
+ register_buf(mBufferSize,
+ mFrameSize,
+ mCbCrOffset,
+ myOffset,
+ mHeap->getHeapID(),
+ mAlignedBufferSize * cnt,
+ (uint8_t *)mHeap->base() + mAlignedBufferSize * cnt,
+ mPmemType,
+ false,
+ false /* unregister */);
+ }
+ }
+ }
+ mMMCameraDLRef.clear();
+#endif
+ ALOGV("%s: %s X", __FUNCTION__, mName);
+}
+MemPool::~MemPool()
+{
+ ALOGV("destroying MemPool %s", mName);
+ if (mFrameSize > 0)
+ delete [] mBuffers;
+ mHeap.clear();
+ ALOGV("destroying MemPool %s completed", mName);
+}
+
+
+status_t MemPool::dump(int fd, const Vector<String16>& args) const
+{
+ const size_t SIZE = 256;
+ char buffer[SIZE];
+ String8 result;
+ CAMERA_HAL_UNUSED(args);
+ snprintf(buffer, 255, "QualcommCameraHardware::AshmemPool::dump\n");
+ result.append(buffer);
+ if (mName) {
+ snprintf(buffer, 255, "mem pool name (%s)\n", mName);
+ result.append(buffer);
+ }
+ if (mHeap != 0) {
+ snprintf(buffer, 255, "heap base(%p), size(%d), flags(%d), device(%s)\n",
+ mHeap->getBase(), mHeap->getSize(),
+ mHeap->getFlags(), mHeap->getDevice());
+ result.append(buffer);
+ }
+ snprintf(buffer, 255,
+ "buffer size (%d), number of buffers (%d), frame size(%d)",
+ mBufferSize, mNumBuffers, mFrameSize);
+ result.append(buffer);
+ write(fd, result.string(), result.size());
+ return NO_ERROR;
+}
+
+};
diff --git a/camera/QCameraHWI_Mem.h b/camera/QCameraHWI_Mem.h
new file mode 100644
index 0000000..b2f4691
--- /dev/null
+++ b/camera/QCameraHWI_Mem.h
@@ -0,0 +1,109 @@
+/*
+** Copyright (c) 2011-2012 The Linux Foundation. All rights reserved.
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+** http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+
+#ifndef __QCAMERAHWI_MEM_H
+#define __QCAMERAHWI_MEM_H
+
+#include <binder/MemoryBase.h>
+#include <binder/MemoryHeapBase.h>
+#include <utils/threads.h>
+#include <stdint.h>
+#include "QCamera_Intf.h"
+
+extern "C" {
+#include <linux/android_pmem.h>
+#include <linux/msm_ion.h>
+}
+
+
+#define VIDEO_BUFFER_COUNT 5
+#define VIDEO_BUFFER_COUNT_LOW_POWER_CAMCORDER 9
+
+#define PREVIEW_BUFFER_COUNT 5
+
+namespace android {
+
+// This class represents a heap which maintains several contiguous
+// buffers. The heap may be backed by pmem (when pmem_pool contains
+// the name of a /dev/pmem* file), or by ashmem (when pmem_pool == NULL).
+
+struct MemPool : public RefBase {
+ MemPool(int buffer_size, int num_buffers,
+ int frame_size,
+ const char *name);
+
+ virtual ~MemPool() = 0;
+
+ void completeInitialization();
+ bool initialized() const {
+ return mHeap != NULL && mHeap->base() != MAP_FAILED;
+ }
+
+ virtual status_t dump(int fd, const Vector<String16>& args) const;
+
+ int mBufferSize;
+ int mAlignedBufferSize;
+ int mNumBuffers;
+ int mFrameSize;
+ sp<MemoryHeapBase> mHeap;
+ sp<MemoryBase> *mBuffers;
+
+ const char *mName;
+};
+
+class AshmemPool : public MemPool {
+public:
+ AshmemPool(int buffer_size, int num_buffers,
+ int frame_size,
+ const char *name);
+};
+
+class PmemPool : public MemPool {
+public:
+ PmemPool(const char *pmem_pool,
+ int flags, int pmem_type,
+ int buffer_size, int num_buffers,
+ int frame_size, int cbcr_offset,
+ int yoffset, const char *name);
+ virtual ~PmemPool();
+ int mFd;
+ int mPmemType;
+ int mCbCrOffset;
+ int myOffset;
+ int mCameraControlFd;
+ uint32_t mAlignedSize;
+ struct pmem_region mSize;
+};
+
+class IonPool : public MemPool {
+public:
+ IonPool( int flags,
+ int buffer_size, int num_buffers,
+ int frame_size, int cbcr_offset,
+ int yoffset, const char *name);
+ virtual ~IonPool();
+ int mFd;
+ int mCbCrOffset;
+ int myOffset;
+ int mCameraControlFd;
+ uint32_t mAlignedSize;
+private:
+ static const char mIonDevName[];
+};
+
+};
+#endif
diff --git a/camera/QCameraHWI_Parm.cpp b/camera/QCameraHWI_Parm.cpp
new file mode 100644
index 0000000..8012e60
--- /dev/null
+++ b/camera/QCameraHWI_Parm.cpp
@@ -0,0 +1,4315 @@
+/*
+** Copyright (c) 2011-2012 The Linux Foundation. All rights reserved.
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+** http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+//#define ALOG_NDEBUG 0
+#define ALOG_NIDEBUG 0
+#define LOG_TAG "QCameraHWI_Parm"
+#include <utils/Log.h>
+
+#include <utils/Errors.h>
+#include <utils/threads.h>
+//#include <binder/MemoryHeapPmem.h>
+#include <utils/String16.h>
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <unistd.h>
+#include <fcntl.h>
+#include <cutils/properties.h>
+#include <math.h>
+#if HAVE_ANDROID_OS
+#include <linux/android_pmem.h>
+#endif
+#include <linux/ioctl.h>
+#include "QCameraParameters.h"
+#include <media/mediarecorder.h>
+#include <gralloc_priv.h>
+
+#include "linux/msm_mdp.h"
+#include <linux/fb.h>
+#include <limits.h>
+
+extern "C" {
+#include <fcntl.h>
+#include <time.h>
+#include <pthread.h>
+#include <stdio.h>
+#include <string.h>
+#include <unistd.h>
+#include <termios.h>
+#include <assert.h>
+#include <stdlib.h>
+#include <ctype.h>
+#include <signal.h>
+#include <errno.h>
+#include <sys/mman.h>
+#include <sys/system_properties.h>
+#include <sys/time.h>
+#include <stdlib.h>
+#include <linux/msm_ion.h>
+
+} // extern "C"
+
+#include "QCameraHWI.h"
+#include "QCameraStream.h"
+
+/* QCameraHardwareInterface class implementation goes here*/
+/* following code implements the parameter logic of this class*/
+#define EXPOSURE_COMPENSATION_MAXIMUM_NUMERATOR 12
+#define EXPOSURE_COMPENSATION_MINIMUM_NUMERATOR -12
+#define EXPOSURE_COMPENSATION_DEFAULT_NUMERATOR 0
+#define EXPOSURE_COMPENSATION_DENOMINATOR 6
+#define EXPOSURE_COMPENSATION_STEP ((float (1))/EXPOSURE_COMPENSATION_DENOMINATOR)
+#define DEFAULT_CAMERA_AREA "(0, 0, 0, 0, 0)"
+
+#define HDR_HAL_FRAME 2
+
+#define BURST_INTREVAL_MIN 1
+#define BURST_INTREVAL_MAX 10
+#define BURST_INTREVAL_DEFAULT 1
+
+//Default FPS
+#define MINIMUM_FPS 5
+#define MAXIMUM_FPS 120
+#define DEFAULT_FIXED_FPS 30
+#define DEFAULT_FPS 30
+
+//Default Picture Width
+#define DEFAULT_PICTURE_WIDTH 640
+#define DEFAULT_PICTURE_HEIGHT 480
+
+//Default Video Width
+#define DEFAULT_VIDEO_WIDTH 1920
+#define DEFAULT_VIDEO_HEIGHT 1088
+
+#define THUMBNAIL_SIZE_COUNT (sizeof(thumbnail_sizes)/sizeof(thumbnail_size_type))
+#define DEFAULT_THUMBNAIL_SETTING 4
+#define THUMBNAIL_WIDTH_STR "512"
+#define THUMBNAIL_HEIGHT_STR "384"
+#define THUMBNAIL_SMALL_HEIGHT 144
+
+#define DONT_CARE_COORDINATE -1
+
+//for histogram stats
+#define HISTOGRAM_STATS_SIZE 257
+
+//Supported preview fps ranges should be added to this array in the form (minFps,maxFps)
+static android::FPSRange FpsRangesSupported[] = {
+ android::FPSRange(MINIMUM_FPS*1000,MAXIMUM_FPS*1000)
+ };
+#define FPS_RANGES_SUPPORTED_COUNT (sizeof(FpsRangesSupported)/sizeof(FpsRangesSupported[0]))
+
+
+typedef struct {
+ uint32_t aspect_ratio;
+ uint32_t width;
+ uint32_t height;
+} thumbnail_size_type;
+
+static thumbnail_size_type thumbnail_sizes[] = {
+{ 7281, 512, 288 }, //1.777778
+{ 6826, 480, 288 }, //1.666667
+{ 6808, 256, 154 }, //1.66233
+{ 6144, 432, 288 }, //1.5
+{ 5461, 512, 384 }, //1.333333
+{ 5006, 352, 288 }, //1.222222
+{ 5461, 320, 240 }, //1.33333
+{ 5006, 176, 144 }, //1.222222
+
+};
+
+static struct camera_size_type zsl_picture_sizes[] = {
+ { 1280, 960}, // 1.3MP
+ { 800, 600}, //SVGA
+ { 800, 480}, // WVGA
+ { 640, 480}, // VGA
+ { 352, 288}, //CIF
+ { 320, 240}, // QVGA
+ { 176, 144} // QCIF
+};
+
+static camera_size_type default_picture_sizes[] = {
+ { 4000, 3000}, // 12MP
+ { 3264, 2448}, // 8MP
+ { 3264, 1836}, // Picture Size to match 1080p,720p AR
+ { 3264, 2176}, // Picture Size to match 480p AR
+ { 2592, 1944}, // 5MP
+ { 2048, 1536}, // 3MP QXGA
+ { 1920, 1080}, // HD1080
+ { 1600, 1200}, // 2MP UXGA
+ { 1280, 960}, // 1.3MP
+ { 1280, 720},
+ { 720, 480},
+ { 800, 480}, // WVGA
+ { 640, 480}, // VGA
+ { 352, 288}, // CIF
+ { 320, 240}, // QVGA
+ { 176, 144} // QCIF
+};
+
+static int iso_speed_values[] = {
+ 0, 1, 100, 200, 400, 800, 1600
+};
+
+extern int HAL_numOfCameras;
+extern qcamera_info_t HAL_cameraInfo[MSM_MAX_CAMERA_SENSORS];
+extern mm_camera_t * HAL_camerahandle[MSM_MAX_CAMERA_SENSORS];
+
+namespace android {
+
+static uint32_t HFR_SIZE_COUNT=2;
+static const int PICTURE_FORMAT_JPEG = 1;
+static const int PICTURE_FORMAT_RAW = 2;
+
+/********************************************************************/
+static const str_map effects[] = {
+ { QCameraParameters::EFFECT_NONE, CAMERA_EFFECT_OFF },
+ { QCameraParameters::EFFECT_MONO, CAMERA_EFFECT_MONO },
+ { QCameraParameters::EFFECT_NEGATIVE, CAMERA_EFFECT_NEGATIVE },
+ { QCameraParameters::EFFECT_SOLARIZE, CAMERA_EFFECT_SOLARIZE },
+ { QCameraParameters::EFFECT_SEPIA, CAMERA_EFFECT_SEPIA },
+ { QCameraParameters::EFFECT_POSTERIZE, CAMERA_EFFECT_POSTERIZE },
+ { QCameraParameters::EFFECT_WHITEBOARD, CAMERA_EFFECT_WHITEBOARD },
+ { QCameraParameters::EFFECT_BLACKBOARD, CAMERA_EFFECT_BLACKBOARD },
+ { QCameraParameters::EFFECT_AQUA, CAMERA_EFFECT_AQUA },
+ { QCameraParameters::EFFECT_EMBOSS, CAMERA_EFFECT_EMBOSS },
+ { QCameraParameters::EFFECT_SKETCH, CAMERA_EFFECT_SKETCH },
+ { QCameraParameters::EFFECT_NEON, CAMERA_EFFECT_NEON }
+};
+
+static const str_map iso[] = {
+ { QCameraParameters::ISO_AUTO, CAMERA_ISO_AUTO},
+ { QCameraParameters::ISO_HJR, CAMERA_ISO_DEBLUR},
+ { QCameraParameters::ISO_100, CAMERA_ISO_100},
+ { QCameraParameters::ISO_200, CAMERA_ISO_200},
+ { QCameraParameters::ISO_400, CAMERA_ISO_400},
+ { QCameraParameters::ISO_800, CAMERA_ISO_800 },
+ { QCameraParameters::ISO_1600, CAMERA_ISO_1600 }
+};
+
+static const str_map scenemode[] = {
+ { QCameraParameters::SCENE_MODE_AUTO, CAMERA_BESTSHOT_OFF },
+ { QCameraParameters::SCENE_MODE_ASD, CAMERA_BESTSHOT_AUTO },
+ { QCameraParameters::SCENE_MODE_ACTION, CAMERA_BESTSHOT_ACTION },
+ { QCameraParameters::SCENE_MODE_PORTRAIT, CAMERA_BESTSHOT_PORTRAIT },
+ { QCameraParameters::SCENE_MODE_LANDSCAPE, CAMERA_BESTSHOT_LANDSCAPE },
+ { QCameraParameters::SCENE_MODE_NIGHT, CAMERA_BESTSHOT_NIGHT },
+ { QCameraParameters::SCENE_MODE_NIGHT_PORTRAIT, CAMERA_BESTSHOT_NIGHT_PORTRAIT },
+ { QCameraParameters::SCENE_MODE_THEATRE, CAMERA_BESTSHOT_THEATRE },
+ { QCameraParameters::SCENE_MODE_BEACH, CAMERA_BESTSHOT_BEACH },
+ { QCameraParameters::SCENE_MODE_SNOW, CAMERA_BESTSHOT_SNOW },
+ { QCameraParameters::SCENE_MODE_SUNSET, CAMERA_BESTSHOT_SUNSET },
+ { QCameraParameters::SCENE_MODE_STEADYPHOTO, CAMERA_BESTSHOT_ANTISHAKE },
+ { QCameraParameters::SCENE_MODE_FIREWORKS , CAMERA_BESTSHOT_FIREWORKS },
+ { QCameraParameters::SCENE_MODE_SPORTS , CAMERA_BESTSHOT_SPORTS },
+ { QCameraParameters::SCENE_MODE_PARTY, CAMERA_BESTSHOT_PARTY },
+ { QCameraParameters::SCENE_MODE_CANDLELIGHT, CAMERA_BESTSHOT_CANDLELIGHT },
+ { QCameraParameters::SCENE_MODE_BACKLIGHT, CAMERA_BESTSHOT_BACKLIGHT },
+ { QCameraParameters::SCENE_MODE_FLOWERS, CAMERA_BESTSHOT_FLOWERS },
+ { QCameraParameters::SCENE_MODE_AR, CAMERA_BESTSHOT_AR },
+ { QCameraParameters::SCENE_MODE_HDR, CAMERA_BESTSHOT_AUTO },
+};
+
+static const str_map scenedetect[] = {
+ { QCameraParameters::SCENE_DETECT_OFF, false },
+ { QCameraParameters::SCENE_DETECT_ON, true },
+};
+
+#define DONT_CARE AF_MODE_MAX
+static const str_map focus_modes[] = {
+ { QCameraParameters::FOCUS_MODE_AUTO, AF_MODE_AUTO},
+ { QCameraParameters::FOCUS_MODE_INFINITY, AF_MODE_INFINITY },
+ { QCameraParameters::FOCUS_MODE_NORMAL, AF_MODE_NORMAL },
+ { QCameraParameters::FOCUS_MODE_MACRO, AF_MODE_MACRO },
+ { QCameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE, AF_MODE_CAF},
+ { QCameraParameters::FOCUS_MODE_CONTINUOUS_VIDEO, AF_MODE_CAF }
+};
+
+static const str_map selectable_zone_af[] = {
+ { QCameraParameters::SELECTABLE_ZONE_AF_AUTO, AUTO },
+ { QCameraParameters::SELECTABLE_ZONE_AF_SPOT_METERING, SPOT },
+ { QCameraParameters::SELECTABLE_ZONE_AF_CENTER_WEIGHTED, CENTER_WEIGHTED },
+ { QCameraParameters::SELECTABLE_ZONE_AF_FRAME_AVERAGE, AVERAGE }
+};
+
+static const str_map autoexposure[] = {
+ { QCameraParameters::AUTO_EXPOSURE_FRAME_AVG, CAMERA_AEC_FRAME_AVERAGE },
+ { QCameraParameters::AUTO_EXPOSURE_CENTER_WEIGHTED, CAMERA_AEC_CENTER_WEIGHTED },
+ { QCameraParameters::AUTO_EXPOSURE_SPOT_METERING, CAMERA_AEC_SPOT_METERING }
+};
+
+// from aeecamera.h
+static const str_map whitebalance[] = {
+ { QCameraParameters::WHITE_BALANCE_AUTO, CAMERA_WB_AUTO },
+ { QCameraParameters::WHITE_BALANCE_INCANDESCENT, CAMERA_WB_INCANDESCENT },
+ { QCameraParameters::WHITE_BALANCE_FLUORESCENT, CAMERA_WB_FLUORESCENT },
+ { QCameraParameters::WHITE_BALANCE_DAYLIGHT, CAMERA_WB_DAYLIGHT },
+ { QCameraParameters::WHITE_BALANCE_CLOUDY_DAYLIGHT, CAMERA_WB_CLOUDY_DAYLIGHT }
+};
+
+static const str_map antibanding[] = {
+ { QCameraParameters::ANTIBANDING_OFF, CAMERA_ANTIBANDING_OFF },
+ { QCameraParameters::ANTIBANDING_50HZ, CAMERA_ANTIBANDING_50HZ },
+ { QCameraParameters::ANTIBANDING_60HZ, CAMERA_ANTIBANDING_60HZ },
+ { QCameraParameters::ANTIBANDING_AUTO, CAMERA_ANTIBANDING_AUTO }
+};
+
+static const str_map frame_rate_modes[] = {
+ {QCameraParameters::KEY_PREVIEW_FRAME_RATE_AUTO_MODE, FPS_MODE_AUTO},
+ {QCameraParameters::KEY_PREVIEW_FRAME_RATE_FIXED_MODE, FPS_MODE_FIXED}
+};
+
+static const str_map touchafaec[] = {
+ { QCameraParameters::TOUCH_AF_AEC_OFF, false },
+ { QCameraParameters::TOUCH_AF_AEC_ON, true }
+};
+
+static const str_map hfr[] = {
+ { QCameraParameters::VIDEO_HFR_OFF, CAMERA_HFR_MODE_OFF },
+ { QCameraParameters::VIDEO_HFR_2X, CAMERA_HFR_MODE_60FPS },
+ { QCameraParameters::VIDEO_HFR_3X, CAMERA_HFR_MODE_90FPS },
+ { QCameraParameters::VIDEO_HFR_4X, CAMERA_HFR_MODE_120FPS },
+};
+static const int HFR_VALUES_COUNT = (sizeof(hfr)/sizeof(str_map));
+
+static const str_map flash[] = {
+ { QCameraParameters::FLASH_MODE_OFF, LED_MODE_OFF },
+ { QCameraParameters::FLASH_MODE_AUTO, LED_MODE_AUTO },
+ { QCameraParameters::FLASH_MODE_ON, LED_MODE_ON },
+ { QCameraParameters::FLASH_MODE_TORCH, LED_MODE_TORCH}
+};
+
+static const str_map lensshade[] = {
+ { QCameraParameters::LENSSHADE_ENABLE, true },
+ { QCameraParameters::LENSSHADE_DISABLE, false }
+};
+
+static const str_map mce[] = {
+ { QCameraParameters::MCE_ENABLE, true },
+ { QCameraParameters::MCE_DISABLE, false }
+};
+
+static const str_map histogram[] = {
+ { QCameraParameters::HISTOGRAM_ENABLE, true },
+ { QCameraParameters::HISTOGRAM_DISABLE, false }
+};
+
+static const str_map skinToneEnhancement[] = {
+ { QCameraParameters::SKIN_TONE_ENHANCEMENT_ENABLE, true },
+ { QCameraParameters::SKIN_TONE_ENHANCEMENT_DISABLE, false }
+};
+
+static const str_map denoise[] = {
+ { QCameraParameters::DENOISE_OFF, false },
+ { QCameraParameters::DENOISE_ON, true }
+};
+
+static const str_map facedetection[] = {
+ { QCameraParameters::FACE_DETECTION_OFF, false },
+ { QCameraParameters::FACE_DETECTION_ON, true }
+};
+
+static const str_map redeye_reduction[] = {
+ { QCameraParameters::REDEYE_REDUCTION_ENABLE, true },
+ { QCameraParameters::REDEYE_REDUCTION_DISABLE, false }
+};
+
+static const str_map picture_formats[] = {
+ {QCameraParameters::PIXEL_FORMAT_JPEG, PICTURE_FORMAT_JPEG},
+ {QCameraParameters::PIXEL_FORMAT_RAW, PICTURE_FORMAT_RAW}
+};
+
+static const str_map recording_Hints[] = {
+ {"false", false},
+ {"true", true}
+};
+
+static const str_map preview_formats[] = {
+ {QCameraParameters::PIXEL_FORMAT_YUV420SP, HAL_PIXEL_FORMAT_YCrCb_420_SP},
+ {QCameraParameters::PIXEL_FORMAT_YUV420SP_ADRENO, HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO},
+ {QCameraParameters::PIXEL_FORMAT_YV12, HAL_PIXEL_FORMAT_YV12},
+ {QCameraParameters::PIXEL_FORMAT_YUV420P,HAL_PIXEL_FORMAT_YV12},
+ {QCameraParameters::PIXEL_FORMAT_NV12, HAL_PIXEL_FORMAT_YCbCr_420_SP}
+};
+
+static const preview_format_info_t preview_format_info_list[] = {
+ {HAL_PIXEL_FORMAT_YCrCb_420_SP, CAMERA_YUV_420_NV21, CAMERA_PAD_TO_WORD, 2},
+ {HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO, CAMERA_YUV_420_NV21, CAMERA_PAD_TO_4K, 2},
+ {HAL_PIXEL_FORMAT_YCbCr_420_SP, CAMERA_YUV_420_NV12, CAMERA_PAD_TO_WORD, 2},
+ {HAL_PIXEL_FORMAT_YV12, CAMERA_YUV_420_YV12, CAMERA_PAD_TO_WORD, 3}
+};
+
+static const str_map zsl_modes[] = {
+ { QCameraParameters::ZSL_OFF, false },
+ { QCameraParameters::ZSL_ON, true },
+};
+
+
+static const str_map hdr_bracket[] = {
+ { QCameraParameters::AE_BRACKET_HDR_OFF,HDR_BRACKETING_OFF},
+ { QCameraParameters::AE_BRACKET_HDR,HDR_MODE },
+};
+
+typedef enum {
+ NORMAL_POWER,
+ LOW_POWER
+} power_mode;
+
+static const str_map power_modes[] = {
+ { QCameraParameters::NORMAL_POWER,NORMAL_POWER },
+ { QCameraParameters::LOW_POWER,LOW_POWER }
+};
+
+/**************************************************************************/
+static int attr_lookup(const str_map arr[], int len, const char *name)
+{
+ if (name) {
+ for (int i = 0; i < len; i++) {
+ if (!strcmp(arr[i].desc, name))
+ return arr[i].val;
+ }
+ }
+ return NOT_FOUND;
+}
+
+bool QCameraHardwareInterface::native_set_parms(
+ mm_camera_parm_type_t type, uint16_t length, void *value)
+{
+ ALOGV("%s : type : %d Value : %d",__func__,type,*((int *)value));
+ if(MM_CAMERA_OK != cam_config_set_parm(mCameraId, type,value )) {
+ ALOGE("native_set_parms failed: type %d length %d error %s",
+ type, length, strerror(errno));
+ return false;
+ }
+
+ return true;
+
+}
+
+bool QCameraHardwareInterface::native_set_parms(
+ mm_camera_parm_type_t type, uint16_t length, void *value, int *result)
+{
+ *result= cam_config_set_parm(mCameraId, type,value );
+ if(MM_CAMERA_OK == *result) {
+ ALOGV("native_set_parms: succeeded : %d", *result);
+ return true;
+ }
+
+ ALOGE("native_set_parms failed: type %d length %d error str %s error# %d",
+ type, length, strerror(errno), errno);
+ return false;
+}
+
+//Filter Picture sizes based on max width and height
+/* TBD: do we still need this - except for ZSL? */
+void QCameraHardwareInterface::filterPictureSizes(){
+ unsigned int i;
+ if(mPictureSizeCount <= 0)
+ return;
+ maxSnapshotWidth = mPictureSizes[0].width;
+ maxSnapshotHeight = mPictureSizes[0].height;
+ // Iterate through all the width and height to find the max value
+ for(i =0; i<mPictureSizeCount;i++){
+ if(((maxSnapshotWidth < mPictureSizes[i].width) &&
+ (maxSnapshotHeight <= mPictureSizes[i].height))){
+ maxSnapshotWidth = mPictureSizes[i].width;
+ maxSnapshotHeight = mPictureSizes[i].height;
+ }
+ }
+ if(myMode & CAMERA_ZSL_MODE){
+ // due to lack of PMEM we restrict to lower resolution
+ mPictureSizesPtr = zsl_picture_sizes;
+ mSupportedPictureSizesCount = 7;
+ }else{
+ mPictureSizesPtr = mPictureSizes;
+ mSupportedPictureSizesCount = mPictureSizeCount;
+ }
+}
+
+static String8 create_sizes_str(const camera_size_type *sizes, int len) {
+ String8 str;
+ char buffer[32];
+
+ if (len > 0) {
+ snprintf(buffer, sizeof(buffer), "%dx%d", sizes[0].width, sizes[0].height);
+ str.append(buffer);
+ }
+ for (int i = 1; i < len; i++) {
+ snprintf(buffer, sizeof(buffer), ",%dx%d", sizes[i].width, sizes[i].height);
+ str.append(buffer);
+ }
+ return str;
+}
+
+String8 QCameraHardwareInterface::create_values_str(const str_map *values, int len) {
+ String8 str;
+
+ if (len > 0) {
+ str.append(values[0].desc);
+ }
+ for (int i = 1; i < len; i++) {
+ str.append(",");
+ str.append(values[i].desc);
+ }
+ return str;
+}
+
+static String8 create_fps_str(const android:: FPSRange* fps, int len) {
+ String8 str;
+ char buffer[32];
+
+ if (len > 0) {
+ snprintf(buffer, sizeof(buffer), "(%d,%d)", fps[0].minFPS, fps[0].maxFPS);
+ str.append(buffer);
+ }
+ for (int i = 1; i < len; i++) {
+ snprintf(buffer, sizeof(buffer), ",(%d,%d)", fps[i].minFPS, fps[i].maxFPS);
+ str.append(buffer);
+ }
+ return str;
+}
+
+static String8 create_values_range_str(int min, int max){
+ String8 str;
+ char buffer[32];
+
+ if(min <= max){
+ snprintf(buffer, sizeof(buffer), "%d", min);
+ str.append(buffer);
+
+ for (int i = min + 1; i <= max; i++) {
+ snprintf(buffer, sizeof(buffer), ",%d", i);
+ str.append(buffer);
+ }
+ }
+ return str;
+}
+
+static int parse_size(const char *str, int &width, int &height)
+{
+ // Find the width.
+ char *end;
+ int w = (int)strtol(str, &end, 10);
+ // If an 'x' or 'X' does not immediately follow, give up.
+ if ( (*end != 'x') && (*end != 'X') )
+ return -1;
+
+ // Find the height, immediately after the 'x'.
+ int h = (int)strtol(end+1, 0, 10);
+
+ width = w;
+ height = h;
+
+ return 0;
+}
+
+bool QCameraHardwareInterface::isValidDimension(int width, int height) {
+ bool retVal = false;
+ /* This function checks if a given resolution is valid or not.
+ * A particular resolution is considered valid if it satisfies
+ * the following conditions:
+ * 1. width & height should be multiple of 16.
+ * 2. width & height should be less than/equal to the dimensions
+ * supported by the camera sensor.
+ * 3. the aspect ratio is a valid aspect ratio and is among the
+ * commonly used aspect ratio as determined by the thumbnail_sizes
+ * data structure.
+ */
+
+ if( (width == CEILING16(width)) && (height == CEILING16(height))
+ && (width <= maxSnapshotWidth)
+ && (height <= maxSnapshotHeight) )
+ {
+ uint32_t pictureAspectRatio = (uint32_t)((width * Q12)/height);
+ for(uint32_t i = 0; i < THUMBNAIL_SIZE_COUNT; i++ ) {
+ if(thumbnail_sizes[i].aspect_ratio == pictureAspectRatio) {
+ retVal = true;
+ break;
+ }
+ }
+ }
+ return retVal;
+}
+
+void QCameraHardwareInterface::hasAutoFocusSupport(){
+
+ ALOGV("%s",__func__);
+
+ if(isZSLMode()){
+ mHasAutoFocusSupport = false;
+ return;
+ }
+
+ if(cam_ops_is_op_supported (mCameraId, MM_CAMERA_OPS_FOCUS )) {
+ mHasAutoFocusSupport = true;
+ }
+ else {
+ ALOGV("AutoFocus is not supported");
+ mHasAutoFocusSupport = false;
+ }
+
+ ALOGV("%s:rc= %d",__func__, mHasAutoFocusSupport);
+
+}
+
+bool QCameraHardwareInterface::supportsSceneDetection() {
+ bool rc = cam_config_is_parm_supported(mCameraId,MM_CAMERA_PARM_ASD_ENABLE);
+ return rc;
+}
+
+bool QCameraHardwareInterface::supportsFaceDetection() {
+ bool rc;
+
+ status_t ret = NO_ERROR;
+ mm_camera_op_mode_type_t op_mode;
+
+ ret = cam_config_get_parm(mCameraId, MM_CAMERA_PARM_OP_MODE, &op_mode);
+ if(ret != NO_ERROR){
+ ALOGE("%s: Failed to get Op Mode", __func__);
+ }
+
+ ALOGV("%s: OP_Mode is %d, ret=%d, mHdrMode=%d",__func__,op_mode,ret,mHdrMode);
+ if ((ret == NO_ERROR) && (op_mode == MM_CAMERA_OP_MODE_VIDEO) && (mHdrMode != HDR_MODE))
+ {
+ ALOGV("%s: Video mode : FD not supported",__func__);
+ return false;
+ }
+ else{
+ rc = cam_config_is_parm_supported(mCameraId,MM_CAMERA_PARM_FD);
+ ALOGV("%s: Still mode : FD supported : %d",__func__,rc);
+ return rc;
+ }
+}
+
+bool QCameraHardwareInterface::supportsSelectableZoneAf() {
+ bool rc = cam_config_is_parm_supported(mCameraId,MM_CAMERA_PARM_FOCUS_RECT);
+ return rc;
+}
+
+bool QCameraHardwareInterface::supportsRedEyeReduction() {
+ bool rc = cam_config_is_parm_supported(mCameraId,MM_CAMERA_PARM_REDEYE_REDUCTION);
+ return rc;
+}
+
+static String8 create_str(int16_t *arr, int length){
+ String8 str;
+ char buffer[32] = {0};
+
+ if(length > 0){
+ snprintf(buffer, sizeof(buffer), "%d", arr[0]);
+ str.append(buffer);
+ }
+
+ for (int i =1;i<length;i++){
+ snprintf(buffer, sizeof(buffer), ",%d",arr[i]);
+ str.append(buffer);
+ }
+ return str;
+}
+
+bool QCameraHardwareInterface::getMaxPictureDimension(mm_camera_dimension_t *maxDim)
+{
+ bool ret = NO_ERROR;
+ mm_camera_dimension_t dim;
+
+ ret = cam_config_get_parm(mCameraId,
+ MM_CAMERA_PARM_MAX_PICTURE_SIZE, &dim);
+ if (ret != NO_ERROR)
+ return ret;
+
+ /* Find the first dimension in the mPictureSizes
+ * array which is smaller than the max dimension.
+ * This will be the valid max picture resolution */
+ for (unsigned int i = 0; i < mPictureSizeCount; i++) {
+ if ((mPictureSizes[i].width <= dim.width) &&
+ (mPictureSizes[i].height <= dim.height)) {
+ maxDim->height = mPictureSizes[i].height;
+ maxDim->width = mPictureSizes[i].width;
+ break;
+ }
+ }
+ ALOGV("%s: Found Max Picture dimension: %d x %d", __func__,
+ maxDim->width, maxDim->height);
+ return ret;
+}
+void QCameraHardwareInterface::loadTables()
+{
+
+ bool ret = NO_ERROR;
+ ALOGV("%s: E", __func__);
+
+ ret = cam_config_get_parm(mCameraId,
+ MM_CAMERA_PARM_PREVIEW_SIZES_CNT, &preview_sizes_count);
+
+ default_sizes_tbl_t preview_sizes_tbl;
+ preview_sizes_tbl.tbl_size=preview_sizes_count;
+ preview_sizes_tbl.sizes_tbl=&default_preview_sizes[0];
+ if(MM_CAMERA_OK != cam_config_get_parm(mCameraId,
+ MM_CAMERA_PARM_DEF_PREVIEW_SIZES, &preview_sizes_tbl)){
+ ALOGE("%s:Failed to get default preview sizes",__func__);
+ }
+ ret = cam_config_get_parm(mCameraId,
+ MM_CAMERA_PARM_VIDEO_SIZES_CNT, &video_sizes_count);
+
+ default_sizes_tbl_t video_sizes_tbl;
+ video_sizes_tbl.tbl_size=video_sizes_count;
+ video_sizes_tbl.sizes_tbl=&default_video_sizes[0];
+ if(MM_CAMERA_OK != cam_config_get_parm(mCameraId,
+ MM_CAMERA_PARM_DEF_VIDEO_SIZES, &video_sizes_tbl)){
+ ALOGE("%s:Failed to get default video sizes",__func__);
+ }
+
+ ret = cam_config_get_parm(mCameraId,
+ MM_CAMERA_PARM_THUMB_SIZES_CNT, &thumbnail_sizes_count);
+
+ default_sizes_tbl_t thumbnail_sizes_tbl;
+ thumbnail_sizes_tbl.tbl_size=thumbnail_sizes_count;
+ thumbnail_sizes_tbl.sizes_tbl=&default_thumbnail_sizes[0];
+ if(MM_CAMERA_OK != cam_config_get_parm(mCameraId,
+ MM_CAMERA_PARM_DEF_THUMB_SIZES, &thumbnail_sizes_tbl)){
+ ALOGE("%s:Failed to get default thumbnail sizes",__func__);
+ }
+
+ ret = cam_config_get_parm(mCameraId,
+ MM_CAMERA_PARM_HFR_SIZES_CNT, &hfr_sizes_count);
+
+ default_sizes_tbl_t hfr_sizes_tbl;
+ hfr_sizes_tbl.tbl_size=hfr_sizes_count;
+ hfr_sizes_tbl.sizes_tbl=&default_hfr_sizes[0];
+ if(MM_CAMERA_OK != cam_config_get_parm(mCameraId,
+ MM_CAMERA_PARM_DEF_HFR_SIZES, &hfr_sizes_tbl)){
+ ALOGE("%s:Failed to get default HFR sizes",__func__);
+ }
+ ALOGV("%s: X", __func__);
+}
+void QCameraHardwareInterface::initDefaultParameters()
+{
+ bool ret;
+ char prop[PROPERTY_VALUE_MAX];
+ mm_camera_dimension_t maxDim;
+ int rc = MM_CAMERA_OK;
+ ALOGV("%s: E", __func__);
+
+ memset(&maxDim, 0, sizeof(mm_camera_dimension_t));
+ ret = getMaxPictureDimension(&maxDim);
+
+ if (ret != NO_ERROR) {
+ ALOGE("%s: Cannot get Max picture size supported", __func__);
+ return;
+ }
+ if (!maxDim.width || !maxDim.height) {
+ maxDim.width = DEFAULT_LIVESHOT_WIDTH;
+ maxDim.height = DEFAULT_LIVESHOT_HEIGHT;
+ }
+
+ memset(prop, 0, sizeof(prop));
+ property_get("persist.camera.snap.format", prop, "0");
+ mSnapshotFormat = atoi(prop);
+ ALOGV("%s: prop =(%s), snap_format=%d", __func__, prop, mSnapshotFormat);
+
+ //cam_ctrl_dimension_t dim;
+ mHFRLevel = 0;
+ memset(&mDimension, 0, sizeof(cam_ctrl_dimension_t));
+ memset(&mPreviewFormatInfo, 0, sizeof(preview_format_info_t));
+ mDimension.video_width = DEFAULT_VIDEO_WIDTH;
+ mDimension.video_height = DEFAULT_VIDEO_HEIGHT;
+ // mzhu mDimension.picture_width = DEFAULT_STREAM_WIDTH;
+ // mzhu mDimension.picture_height = DEFAULT_STREAM_HEIGHT;
+ mDimension.picture_width = maxDim.width;
+ mDimension.picture_height = maxDim.height;
+ mDimension.display_width = DEFAULT_STREAM_WIDTH;
+ mDimension.display_height = DEFAULT_STREAM_HEIGHT;
+ mDimension.orig_picture_dx = mDimension.picture_width;
+ mDimension.orig_picture_dy = mDimension.picture_height;
+ mDimension.ui_thumbnail_width = DEFAULT_STREAM_WIDTH;
+ mDimension.ui_thumbnail_height = DEFAULT_STREAM_HEIGHT;
+ mDimension.orig_video_width = DEFAULT_STREAM_WIDTH;
+ mDimension.orig_video_height = DEFAULT_STREAM_HEIGHT;
+
+ mDimension.prev_format = CAMERA_YUV_420_NV21;
+ mDimension.enc_format = CAMERA_YUV_420_NV12;
+ if (mSnapshotFormat == 1) {
+ mDimension.main_img_format = CAMERA_YUV_422_NV61;
+ } else {
+ mDimension.main_img_format = CAMERA_YUV_420_NV21;
+ }
+ mDimension.thumb_format = CAMERA_YUV_420_NV21;
+ ALOGV("%s: main_img_format =%d, thumb_format=%d", __func__,
+ mDimension.main_img_format, mDimension.thumb_format);
+ mDimension.prev_padding_format = CAMERA_PAD_TO_WORD;
+
+ ret = native_set_parms(MM_CAMERA_PARM_DIMENSION,
+ sizeof(cam_ctrl_dimension_t), (void *) &mDimension);
+ if(!ret) {
+ ALOGE("MM_CAMERA_PARM_DIMENSION Failed.");
+ return;
+ }
+
+ hasAutoFocusSupport();
+
+ // Initialize constant parameter strings. This will happen only once in the
+ // lifetime of the mediaserver process.
+ if (true/*!mParamStringInitialized*/) {
+ //filter picture sizes
+ filterPictureSizes();
+ mPictureSizeValues = create_sizes_str(
+ mPictureSizesPtr, mSupportedPictureSizesCount);
+ mPreviewSizeValues = create_sizes_str(
+ mPreviewSizes, mPreviewSizeCount);
+ mVideoSizeValues = create_sizes_str(
+ mVideoSizes, mVideoSizeCount);
+
+ //Query for max HFR value
+ camera_hfr_mode_t maxHFR;
+ cam_config_get_parm(mCameraId, MM_CAMERA_PARM_MAX_HFR_MODE, (void *)&maxHFR);
+ //Filter HFR values and build parameter string
+ String8 str;
+ for(int i=0; i<HFR_VALUES_COUNT; i++){
+ if(hfr[i].val <= maxHFR){
+ if(i>0) str.append(",");
+ str.append(hfr[i].desc);
+ }
+ }
+ mHfrValues = str;
+ mHfrSizeValues = create_sizes_str(
+ default_hfr_sizes, hfr_sizes_count);
+ mFpsRangesSupportedValues = create_fps_str(
+ FpsRangesSupported,FPS_RANGES_SUPPORTED_COUNT );
+ mParameters.set(
+ QCameraParameters::KEY_SUPPORTED_PREVIEW_FPS_RANGE,
+ mFpsRangesSupportedValues);
+ mParameters.setPreviewFpsRange(MINIMUM_FPS*1000,MAXIMUM_FPS*1000);
+ mFlashValues = create_values_str(
+ flash, sizeof(flash) / sizeof(str_map));
+ mLensShadeValues = create_values_str(
+ lensshade,sizeof(lensshade)/sizeof(str_map));
+ mMceValues = create_values_str(
+ mce,sizeof(mce)/sizeof(str_map));
+ mEffectValues = create_values_str(effects, sizeof(effects) / sizeof(str_map));
+ mAntibandingValues = create_values_str(
+ antibanding, sizeof(antibanding) / sizeof(str_map));
+ mIsoValues = create_values_str(iso,sizeof(iso)/sizeof(str_map));
+ mAutoExposureValues = create_values_str(
+ autoexposure, sizeof(autoexposure) / sizeof(str_map));
+ mWhitebalanceValues = create_values_str(
+ whitebalance, sizeof(whitebalance) / sizeof(str_map));
+
+ if(mHasAutoFocusSupport){
+ mFocusModeValues = create_values_str(
+ focus_modes, sizeof(focus_modes) / sizeof(str_map));
+ }
+
+ mSceneModeValues = create_values_str(scenemode, sizeof(scenemode) / sizeof(str_map));
+
+ if(mHasAutoFocusSupport){
+ mTouchAfAecValues = create_values_str(
+ touchafaec,sizeof(touchafaec)/sizeof(str_map));
+ }
+ //Currently Enabling Histogram for 8x60
+ mHistogramValues = create_values_str(
+ histogram,sizeof(histogram)/sizeof(str_map));
+
+ mSkinToneEnhancementValues = create_values_str(
+ skinToneEnhancement,sizeof(skinToneEnhancement)/sizeof(str_map));
+
+ mPictureFormatValues = create_values_str(
+ picture_formats, sizeof(picture_formats)/sizeof(str_map));
+
+ mZoomSupported=false;
+ mMaxZoom=0;
+ mm_camera_zoom_tbl_t zmt;
+ if(MM_CAMERA_OK != cam_config_get_parm(mCameraId,
+ MM_CAMERA_PARM_MAXZOOM, &mMaxZoom)){
+ ALOGE("%s:Failed to get max zoom",__func__);
+ }else{
+
+ ALOGV("Max Zoom:%d",mMaxZoom);
+ /* Kernel driver limits the max amount of data that can be retreived through a control
+ command to 260 bytes hence we conservatively limit to 110 zoom ratios */
+ if(mMaxZoom>MAX_ZOOM_RATIOS) {
+ ALOGV("%s:max zoom is larger than sizeof zoomRatios table",__func__);
+ mMaxZoom=MAX_ZOOM_RATIOS-1;
+ }
+ zmt.size=mMaxZoom;
+ zmt.zoom_ratio_tbl=&zoomRatios[0];
+ if(MM_CAMERA_OK != cam_config_get_parm(mCameraId,
+ MM_CAMERA_PARM_ZOOM_RATIO, &zmt)){
+ ALOGE("%s:Failed to get max zoom ratios",__func__);
+ }else{
+ mZoomSupported=true;
+ mZoomRatioValues = create_str(zoomRatios, mMaxZoom);
+ }
+ }
+
+ ALOGV("Zoom supported:%d",mZoomSupported);
+
+ denoise_value = create_values_str(
+ denoise, sizeof(denoise) / sizeof(str_map));
+
+ if(supportsFaceDetection()) {
+ mFaceDetectionValues = create_values_str(
+ facedetection, sizeof(facedetection) / sizeof(str_map));
+ }
+
+ if(mHasAutoFocusSupport){
+ mSelectableZoneAfValues = create_values_str(
+ selectable_zone_af, sizeof(selectable_zone_af) / sizeof(str_map));
+ }
+
+ mSceneDetectValues = create_values_str(scenedetect, sizeof(scenedetect) / sizeof(str_map));
+
+ mRedeyeReductionValues = create_values_str(
+ redeye_reduction, sizeof(redeye_reduction) / sizeof(str_map));
+
+ mZslValues = create_values_str(
+ zsl_modes,sizeof(zsl_modes)/sizeof(str_map));
+
+ mParamStringInitialized = true;
+ }
+
+ //set supported video sizes
+ mParameters.set(QCameraParameters::KEY_SUPPORTED_VIDEO_SIZES, mVideoSizeValues.string());
+
+ //set default video size to first one in supported table
+ String8 vSize = create_sizes_str(&mVideoSizes[0], 1);
+ mParameters.set(QCameraParameters::KEY_VIDEO_SIZE, vSize.string());
+
+ //Set Preview size
+ int default_preview_width, default_preview_height;
+ cam_config_get_parm(mCameraId, MM_CAMERA_PARM_DEFAULT_PREVIEW_WIDTH,
+ &default_preview_width);
+ cam_config_get_parm(mCameraId, MM_CAMERA_PARM_DEFAULT_PREVIEW_HEIGHT,
+ &default_preview_height);
+ mParameters.setPreviewSize(default_preview_width, default_preview_height);
+ mParameters.set(QCameraParameters::KEY_SUPPORTED_PREVIEW_SIZES,
+ mPreviewSizeValues.string());
+ mDimension.display_width = default_preview_width;
+ mDimension.display_height = default_preview_height;
+
+ //Set Preview Frame Rate
+ if(mFps >= MINIMUM_FPS && mFps <= MAXIMUM_FPS) {
+ mPreviewFrameRateValues = create_values_range_str(
+ MINIMUM_FPS, mFps);
+ }else{
+ mPreviewFrameRateValues = create_values_range_str(
+ MINIMUM_FPS, MAXIMUM_FPS);
+ }
+
+
+ if (cam_config_is_parm_supported(mCameraId, MM_CAMERA_PARM_FPS)) {
+ mParameters.set(QCameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES,
+ mPreviewFrameRateValues.string());
+ } else {
+ mParameters.set(
+ QCameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES,
+ DEFAULT_FIXED_FPS);
+ }
+
+ //Set Preview Frame Rate Modes
+ mParameters.setPreviewFrameRateMode("frame-rate-auto");
+ mFrameRateModeValues = create_values_str(
+ frame_rate_modes, sizeof(frame_rate_modes) / sizeof(str_map));
+ if(cam_config_is_parm_supported(mCameraId, MM_CAMERA_PARM_FPS_MODE)){
+ mParameters.set(QCameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATE_MODES,
+ mFrameRateModeValues.string());
+ }
+
+ //Set Preview Format
+ //mParameters.setPreviewFormat("yuv420sp"); // informative
+ mParameters.setPreviewFormat(QCameraParameters::PIXEL_FORMAT_YUV420SP);
+
+ mPreviewFormatValues = create_values_str(
+ preview_formats, sizeof(preview_formats) / sizeof(str_map));
+ mParameters.set(QCameraParameters::KEY_SUPPORTED_PREVIEW_FORMATS,
+ mPreviewFormatValues.string());
+
+ //Set Overlay Format
+ mParameters.set("overlay-format", HAL_PIXEL_FORMAT_YCbCr_420_SP);
+ mParameters.set("max-num-detected-faces-hw", "2");
+
+ // Set supported max faces
+ int maxNumFaces = 0;
+ if (supportsFaceDetection()) {
+ //Query the maximum number of faces supported by hardware.
+ if(MM_CAMERA_OK != cam_config_get_parm(mCameraId,
+ MM_CAMERA_PARM_MAX_NUM_FACES_DECT, &maxNumFaces)){
+ ALOGE("%s:Failed to get max number of faces supported",__func__);
+ }
+ }
+ mParameters.set(CameraParameters::KEY_MAX_NUM_DETECTED_FACES_HW, maxNumFaces);
+ //This paramtere is set to default here. This will be changed by application
+ //if it needs to support specific number of faces. See also setParameters.
+ mParameters.set(QCameraParameters::KEY_MAX_NUM_REQUESTED_FACES, 2);
+
+ // Set camera features supported flag
+ int32_t featureFlag = 0;
+ if (supportsFaceDetection()) {
+ featureFlag |= 0x00000001; // bit 0 indicate faciral feature
+ }
+ mParameters.set(QCameraParameters::KEY_SUPPORTED_CAMERA_FEATURES, featureFlag);
+
+ //Set Picture Size
+ mParameters.setPictureSize(DEFAULT_PICTURE_WIDTH, DEFAULT_PICTURE_HEIGHT);
+ mParameters.set(QCameraParameters::KEY_SUPPORTED_PICTURE_SIZES,
+ mPictureSizeValues.string());
+
+ //Set Preview Frame Rate
+ if(mFps >= MINIMUM_FPS && mFps <= MAXIMUM_FPS) {
+ mParameters.setPreviewFrameRate(mFps);
+ }else{
+ mParameters.setPreviewFrameRate(DEFAULT_FPS);
+ }
+
+ //Set Picture Format
+ mParameters.setPictureFormat("jpeg"); // informative
+ mParameters.set(QCameraParameters::KEY_SUPPORTED_PICTURE_FORMATS,
+ mPictureFormatValues);
+
+ mParameters.set(QCameraParameters::KEY_JPEG_QUALITY, "90"); // max quality
+ mJpegQuality = 90;
+ //Set Video Format
+ mParameters.set(QCameraParameters::KEY_VIDEO_FRAME_FORMAT, "yuv420sp");
+
+ //Set Thumbnail parameters
+ mParameters.set(QCameraParameters::KEY_JPEG_THUMBNAIL_WIDTH,
+ THUMBNAIL_WIDTH_STR); // informative
+ mParameters.set(QCameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT,
+ THUMBNAIL_HEIGHT_STR); // informative
+ mDimension.ui_thumbnail_width =
+ thumbnail_sizes[DEFAULT_THUMBNAIL_SETTING].width;
+ mDimension.ui_thumbnail_height =
+ thumbnail_sizes[DEFAULT_THUMBNAIL_SETTING].height;
+ mParameters.set(QCameraParameters::KEY_JPEG_THUMBNAIL_QUALITY, "90");
+ String8 valuesStr = create_sizes_str(default_thumbnail_sizes, thumbnail_sizes_count);
+ mParameters.set(QCameraParameters::KEY_SUPPORTED_JPEG_THUMBNAIL_SIZES,
+ valuesStr.string());
+ // Define CAMERA_SMOOTH_ZOOM in Android.mk file , to enable smoothzoom
+#ifdef CAMERA_SMOOTH_ZOOM
+ mParameters.set(QCameraParameters::KEY_SMOOTH_ZOOM_SUPPORTED, "true");
+#endif
+ if(mZoomSupported){
+ mParameters.set(QCameraParameters::KEY_ZOOM_SUPPORTED, "true");
+ ALOGV("max zoom is %d", mMaxZoom-1);
+ /* mMaxZoom value that the query interface returns is the size
+ ALOGV("max zoom is %d", mMaxZoom-1);
+ * mMaxZoom value that the query interface returns is the size
+ * of zoom table. So the actual max zoom value will be one
+ * less than that value. */
+
+ mParameters.set("max-zoom",mMaxZoom-1);
+ mParameters.set(QCameraParameters::KEY_ZOOM_RATIOS,
+ mZoomRatioValues);
+ } else
+ {
+ mParameters.set(QCameraParameters::KEY_ZOOM_SUPPORTED, "false");
+ }
+
+ /* Enable zoom support for video application if VPE enabled */
+ if(mZoomSupported) {
+ mParameters.set("video-zoom-support", "true");
+ } else {
+ mParameters.set("video-zoom-support", "false");
+ }
+
+ //8960 supports Power modes : Low power, Normal Power.
+ mParameters.set("power-mode-supported", "true");
+
+ //Set Live shot support
+ rc = cam_config_is_parm_supported(mCameraId, MM_CAMERA_PARM_LIVESHOT_MAIN);
+ if(!rc) {
+ ALOGV("%s:LIVESHOT is not supported", __func__);
+ mParameters.set("video-snapshot-supported", "false");
+ } else {
+ mParameters.set("video-snapshot-supported", "true");
+ }
+
+
+ //Set default power mode
+ mParameters.set(QCameraParameters::KEY_POWER_MODE,"Low_Power");
+ //Set Wnr on
+ mParameters.set(QCameraParameters::KEY_DENOISE,true);
+ //Set Camera Mode
+ mParameters.set(QCameraParameters::KEY_CAMERA_MODE,1);
+ mParameters.set(QCameraParameters::KEY_AE_BRACKET_HDR,"Off");
+
+ //Set Antibanding
+ mParameters.set(QCameraParameters::KEY_ANTIBANDING,
+ QCameraParameters::ANTIBANDING_AUTO);
+ mParameters.set(QCameraParameters::KEY_SUPPORTED_ANTIBANDING,
+ mAntibandingValues);
+
+ //Set Effect
+ mParameters.set(QCameraParameters::KEY_EFFECT,
+ QCameraParameters::EFFECT_NONE);
+ mParameters.set(QCameraParameters::KEY_SUPPORTED_EFFECTS, mEffectValues);
+
+ //Set Auto Exposure
+ mParameters.set(QCameraParameters::KEY_AUTO_EXPOSURE,
+ QCameraParameters::AUTO_EXPOSURE_CENTER_WEIGHTED);
+ mParameters.set(QCameraParameters::KEY_SUPPORTED_AUTO_EXPOSURE, mAutoExposureValues);
+
+ //Set WhiteBalance
+ mParameters.set(QCameraParameters::KEY_WHITE_BALANCE,
+ QCameraParameters::WHITE_BALANCE_AUTO);
+ mParameters.set(QCameraParameters::KEY_SUPPORTED_WHITE_BALANCE,mWhitebalanceValues);
+
+ //Set AEC_LOCK
+ mParameters.set(QCameraParameters::KEY_AUTO_EXPOSURE_LOCK, "false");
+ if(cam_config_is_parm_supported(mCameraId, MM_CAMERA_PARM_AEC_LOCK)){
+ mParameters.set(QCameraParameters::KEY_AUTO_EXPOSURE_LOCK_SUPPORTED, "true");
+ } else {
+ mParameters.set(QCameraParameters::KEY_AUTO_EXPOSURE_LOCK_SUPPORTED, "false");
+ }
+ //Set AWB_LOCK
+ mParameters.set(QCameraParameters::KEY_AUTO_WHITEBALANCE_LOCK, "false");
+ if(cam_config_is_parm_supported(mCameraId, MM_CAMERA_PARM_AWB_LOCK))
+ mParameters.set(QCameraParameters::KEY_AUTO_WHITEBALANCE_LOCK_SUPPORTED, "true");
+ else
+ mParameters.set(QCameraParameters::KEY_AUTO_WHITEBALANCE_LOCK_SUPPORTED, "false");
+
+ //Set Focus Mode
+ if(mHasAutoFocusSupport){
+ mParameters.set(QCameraParameters::KEY_FOCUS_MODE,
+ QCameraParameters::FOCUS_MODE_AUTO);
+ mFocusMode = AF_MODE_AUTO;
+ mParameters.set(QCameraParameters::KEY_SUPPORTED_FOCUS_MODES,
+ mFocusModeValues);
+ mParameters.set(QCameraParameters::KEY_MAX_NUM_FOCUS_AREAS, "1");
+ mParameters.set(QCameraParameters::KEY_MAX_NUM_METERING_AREAS, "1");
+ } else {
+ mParameters.set(QCameraParameters::KEY_FOCUS_MODE,
+ QCameraParameters::FOCUS_MODE_INFINITY);
+ mFocusMode = DONT_CARE;
+ mParameters.set(QCameraParameters::KEY_SUPPORTED_FOCUS_MODES,
+ QCameraParameters::FOCUS_MODE_INFINITY);
+ mParameters.set(QCameraParameters::KEY_MAX_NUM_FOCUS_AREAS, "0");
+ mParameters.set(QCameraParameters::KEY_MAX_NUM_METERING_AREAS, "0");
+ }
+
+ mParameters.set(QCameraParameters::KEY_FOCUS_AREAS, DEFAULT_CAMERA_AREA);
+ mParameters.set(QCameraParameters::KEY_METERING_AREAS, DEFAULT_CAMERA_AREA);
+
+ //Set Flash
+ if (cam_config_is_parm_supported(mCameraId, MM_CAMERA_PARM_LED_MODE)) {
+ mParameters.set(QCameraParameters::KEY_FLASH_MODE,
+ QCameraParameters::FLASH_MODE_OFF);
+ mParameters.set(QCameraParameters::KEY_SUPPORTED_FLASH_MODES,
+ mFlashValues);
+ }
+
+ //Set Sharpness
+ mParameters.set(QCameraParameters::KEY_MAX_SHARPNESS,
+ CAMERA_MAX_SHARPNESS);
+ mParameters.set(QCameraParameters::KEY_SHARPNESS,
+ CAMERA_DEF_SHARPNESS);
+
+ //Set Contrast
+ mParameters.set(QCameraParameters::KEY_MAX_CONTRAST,
+ CAMERA_MAX_CONTRAST);
+ mParameters.set(QCameraParameters::KEY_CONTRAST,
+ CAMERA_DEF_CONTRAST);
+
+ //Set Saturation
+ mParameters.set(QCameraParameters::KEY_MAX_SATURATION,
+ CAMERA_MAX_SATURATION);
+ mParameters.set(QCameraParameters::KEY_SATURATION,
+ CAMERA_DEF_SATURATION);
+
+ //Set Brightness/luma-adaptaion
+ mParameters.set("luma-adaptation", "3");
+
+ mParameters.set(QCameraParameters::KEY_PICTURE_FORMAT,
+ QCameraParameters::PIXEL_FORMAT_JPEG);
+
+ //Set Lensshading
+ mParameters.set(QCameraParameters::KEY_LENSSHADE,
+ QCameraParameters::LENSSHADE_ENABLE);
+ mParameters.set(QCameraParameters::KEY_SUPPORTED_LENSSHADE_MODES,
+ mLensShadeValues);
+
+ //Set ISO Mode
+ mParameters.set(QCameraParameters::KEY_ISO_MODE,
+ QCameraParameters::ISO_AUTO);
+ mParameters.set(QCameraParameters::KEY_SUPPORTED_ISO_MODES,
+ mIsoValues);
+
+ //Set MCE
+ mParameters.set(QCameraParameters::KEY_MEMORY_COLOR_ENHANCEMENT,
+ QCameraParameters::MCE_ENABLE);
+ mParameters.set(QCameraParameters::KEY_SUPPORTED_MEM_COLOR_ENHANCE_MODES,
+ mMceValues);
+ //Set HFR
+ if (cam_config_is_parm_supported(mCameraId, MM_CAMERA_PARM_HFR)) {
+ mParameters.set(QCameraParameters::KEY_VIDEO_HIGH_FRAME_RATE,
+ QCameraParameters::VIDEO_HFR_OFF);
+ mParameters.set(QCameraParameters::KEY_SUPPORTED_HFR_SIZES,
+ mHfrSizeValues.string());
+ mParameters.set(QCameraParameters::KEY_SUPPORTED_VIDEO_HIGH_FRAME_RATE_MODES,
+ mHfrValues);
+ } else{
+ mParameters.set(QCameraParameters::KEY_SUPPORTED_HFR_SIZES,"");
+ }
+
+ //Set Histogram
+ mParameters.set(QCameraParameters::KEY_HISTOGRAM,
+ QCameraParameters::HISTOGRAM_DISABLE);
+ mParameters.set(QCameraParameters::KEY_SUPPORTED_HISTOGRAM_MODES,
+ mHistogramValues);
+
+ //Set SkinTone Enhancement
+ mParameters.set(QCameraParameters::KEY_SKIN_TONE_ENHANCEMENT,
+ QCameraParameters::SKIN_TONE_ENHANCEMENT_DISABLE);
+ mParameters.set("skinToneEnhancement", "0");
+ mParameters.set(QCameraParameters::KEY_SUPPORTED_SKIN_TONE_ENHANCEMENT_MODES,
+ mSkinToneEnhancementValues);
+
+ //Set Scene Mode
+ mParameters.set(QCameraParameters::KEY_SCENE_MODE,
+ QCameraParameters::SCENE_MODE_AUTO);
+ mParameters.set(QCameraParameters::KEY_SUPPORTED_SCENE_MODES,
+ mSceneModeValues);
+
+ //Set Streaming Textures
+ mParameters.set("strtextures", "OFF");
+
+ //Set Denoise
+ mParameters.set(QCameraParameters::KEY_DENOISE,
+ QCameraParameters::DENOISE_ON);
+ mParameters.set(QCameraParameters::KEY_SUPPORTED_DENOISE,
+ denoise_value);
+ //Set Touch AF/AEC
+ mParameters.set(QCameraParameters::KEY_TOUCH_AF_AEC,
+ QCameraParameters::TOUCH_AF_AEC_OFF);
+ mParameters.set(QCameraParameters::KEY_SUPPORTED_TOUCH_AF_AEC,
+ mTouchAfAecValues);
+ /* touch-AF ROI for reducing af fail case */
+ mParameters.set("touchAfAec-dx","200");
+ mParameters.set("touchAfAec-dy","200");
+
+ //Set Scene Detection
+ mParameters.set(QCameraParameters::KEY_SCENE_DETECT,
+ QCameraParameters::SCENE_DETECT_OFF);
+ mParameters.set(QCameraParameters::KEY_SUPPORTED_SCENE_DETECT,
+ mSceneDetectValues);
+
+ //Set Selectable Zone AF
+ mParameters.set(QCameraParameters::KEY_SELECTABLE_ZONE_AF,
+ QCameraParameters::SELECTABLE_ZONE_AF_AUTO);
+ mParameters.set(QCameraParameters::KEY_SUPPORTED_SELECTABLE_ZONE_AF,
+ mSelectableZoneAfValues);
+
+ //Set Face Detection
+ if(supportsFaceDetection()){
+ mParameters.set(QCameraParameters::KEY_FACE_DETECTION,
+ QCameraParameters::FACE_DETECTION_OFF);
+ mParameters.set(QCameraParameters::KEY_SUPPORTED_FACE_DETECTION,
+ mFaceDetectionValues);
+ }
+
+ //Set Red Eye Reduction
+ mParameters.set(QCameraParameters::KEY_REDEYE_REDUCTION,
+ QCameraParameters::REDEYE_REDUCTION_DISABLE);
+ mParameters.set(QCameraParameters::KEY_SUPPORTED_REDEYE_REDUCTION,
+ mRedeyeReductionValues);
+
+ //Set ZSL
+ mParameters.set(QCameraParameters::KEY_ZSL,
+ QCameraParameters::ZSL_OFF);
+ mParameters.set(QCameraParameters::KEY_SUPPORTED_ZSL_MODES,
+ mZslValues);
+
+ //Set Focal length, horizontal and vertical view angles
+ focus_distances_info_t focalLength;
+ float horizontalViewAngle = 0.0f;
+ float verticalViewAngle = 0.0f;
+ cam_config_get_parm(mCameraId, MM_CAMERA_PARM_FOCAL_LENGTH,
+ (void *)&focalLength);
+ mParameters.setFloat(QCameraParameters::KEY_FOCAL_LENGTH,
+ focalLength.focus_distance[0]);
+ cam_config_get_parm(mCameraId, MM_CAMERA_PARM_HORIZONTAL_VIEW_ANGLE,
+ (void *)&horizontalViewAngle);
+ mParameters.setFloat(QCameraParameters::KEY_HORIZONTAL_VIEW_ANGLE,
+ horizontalViewAngle);
+ cam_config_get_parm(mCameraId, MM_CAMERA_PARM_VERTICAL_VIEW_ANGLE,
+ (void *)&verticalViewAngle);
+ mParameters.setFloat(QCameraParameters::KEY_VERTICAL_VIEW_ANGLE,
+ verticalViewAngle);
+
+ //Set Exposure Compensation
+ mParameters.set(
+ QCameraParameters::KEY_MAX_EXPOSURE_COMPENSATION,
+ EXPOSURE_COMPENSATION_MAXIMUM_NUMERATOR);
+ mParameters.set(
+ QCameraParameters::KEY_MIN_EXPOSURE_COMPENSATION,
+ EXPOSURE_COMPENSATION_MINIMUM_NUMERATOR);
+ mParameters.set(
+ QCameraParameters::KEY_EXPOSURE_COMPENSATION,
+ EXPOSURE_COMPENSATION_DEFAULT_NUMERATOR);
+ mParameters.setFloat(
+ QCameraParameters::KEY_EXPOSURE_COMPENSATION_STEP,
+ EXPOSURE_COMPENSATION_STEP);
+
+ mParameters.set("num-snaps-per-shutter", 1);
+
+ mParameters.set("capture-burst-captures-values", getZSLQueueDepth());
+ mParameters.set("capture-burst-interval-supported", "true");
+ mParameters.set("capture-burst-interval-max", BURST_INTREVAL_MAX); /*skip frames*/
+ mParameters.set("capture-burst-interval-min", BURST_INTREVAL_MIN); /*skip frames*/
+ mParameters.set("capture-burst-interval", BURST_INTREVAL_DEFAULT); /*skip frames*/
+ mParameters.set("capture-burst-retroactive", 0);
+ mParameters.set("capture-burst-retroactive-max", getZSLQueueDepth());
+ mParameters.set("capture-burst-exposures", "");
+ mParameters.set("capture-burst-exposures-values",
+ "-12,-11,-10,-9,-8,-7,-6,-5,-4,-3,-2,-1,0,1,2,3,4,5,6,7,8,9,10,11,12");
+ {
+ String8 CamModeStr;
+ char buffer[32];
+ int flag = 0;
+
+ for (int i = 0; i < HAL_CAM_MODE_MAX; i++) {
+ if ( 0 ) { /*exclude some conflicting case*/
+ } else {
+ if (flag == 0) { /*first item*/
+ snprintf(buffer, sizeof(buffer), "%d", i);
+ } else {
+ snprintf(buffer, sizeof(buffer), ",%d", i);
+ }
+ flag = 1;
+ CamModeStr.append(buffer);
+ }
+ }
+ mParameters.set("camera-mode-values", CamModeStr);
+ }
+
+ mParameters.set("ae-bracket-hdr-values",
+ create_values_str(hdr_bracket, sizeof(hdr_bracket)/sizeof(str_map) ));
+
+// if(mIs3DModeOn)
+// mParameters.set("3d-frame-format", "left-right");
+ mParameters.set("no-display-mode", 0);
+ //mUseOverlay = useOverlay();
+ mParameters.set("zoom", 0);
+
+ int mNuberOfVFEOutputs;
+ ret = cam_config_get_parm(mCameraId, MM_CAMERA_PARM_VFE_OUTPUT_ENABLE, &mNuberOfVFEOutputs);
+ if(ret != MM_CAMERA_OK) {
+ ALOGE("get parm MM_CAMERA_PARM_VFE_OUTPUT_ENABLE failed");
+ ret = BAD_VALUE;
+ }
+ if(mNuberOfVFEOutputs == 1)
+ {
+ mParameters.set(QCameraParameters::KEY_SINGLE_ISP_OUTPUT_ENABLED, "true");
+ } else {
+ mParameters.set(QCameraParameters::KEY_SINGLE_ISP_OUTPUT_ENABLED, "false");
+ }
+
+ if (setParameters(mParameters) != NO_ERROR) {
+ ALOGE("Failed to set default parameters?!");
+ }
+
+ mNoDisplayMode = 0;
+ mLedStatusForZsl = LED_MODE_OFF;
+
+ mInitialized = true;
+ strTexturesOn = false;
+
+ ALOGV("%s: X", __func__);
+ return;
+}
+
+/**
+ * Set the camera parameters. This returns BAD_VALUE if any parameter is
+ * invalid or not supported.
+ */
+
+int QCameraHardwareInterface::setParameters(const char *parms)
+{
+ QCameraParameters param;
+ String8 str = String8(parms);
+ param.unflatten(str);
+ status_t ret = setParameters(param);
+ if(ret == NO_ERROR)
+ return 0;
+ else
+ return -1;
+}
+
+/**
+ * Set the camera parameters. This returns BAD_VALUE if any parameter is
+ * invalid or not supported. */
+status_t QCameraHardwareInterface::setParameters(const QCameraParameters& params)
+{
+ status_t ret = NO_ERROR;
+
+ ALOGV("%s: E", __func__);
+// Mutex::Autolock l(&mLock);
+ status_t rc, final_rc = NO_ERROR;
+
+ if ((rc = setPowerMode(params))) final_rc = rc;
+ if ((rc = setPreviewSize(params))) final_rc = rc;
+ if ((rc = setVideoSize(params))) final_rc = rc;
+ if ((rc = setPictureSize(params))) final_rc = rc;
+ if ((rc = setJpegThumbnailSize(params))) final_rc = rc;
+ if ((rc = setJpegQuality(params))) final_rc = rc;
+ if ((rc = setEffect(params))) final_rc = rc;
+ if ((rc = setGpsLocation(params))) final_rc = rc;
+ if ((rc = setRotation(params))) final_rc = rc;
+ if ((rc = setZoom(params))) final_rc = rc;
+ if ((rc = setOrientation(params))) final_rc = rc;
+ if ((rc = setLensshadeValue(params))) final_rc = rc;
+ if ((rc = setMCEValue(params))) final_rc = rc;
+ if ((rc = setPictureFormat(params))) final_rc = rc;
+ if ((rc = setSharpness(params))) final_rc = rc;
+ if ((rc = setSaturation(params))) final_rc = rc;
+ if ((rc = setSceneMode(params))) final_rc = rc;
+ if ((rc = setContrast(params))) final_rc = rc;
+// if ((rc = setFaceDetect(params))) final_rc = rc;
+ if ((rc = setStrTextures(params))) final_rc = rc;
+ if ((rc = setPreviewFormat(params))) final_rc = rc;
+ if ((rc = setSkinToneEnhancement(params))) final_rc = rc;
+ if ((rc = setWaveletDenoise(params))) final_rc = rc;
+ if ((rc = setAntibanding(params))) final_rc = rc;
+ // if ((rc = setOverlayFormats(params))) final_rc = rc;
+ if ((rc = setRedeyeReduction(params))) final_rc = rc;
+ if ((rc = setCaptureBurstExp())) final_rc = rc;
+
+ const char *str_val = params.get("capture-burst-exposures");
+ if ( str_val == NULL || strlen(str_val)==0 ) {
+ char burst_exp[PROPERTY_VALUE_MAX];
+ memset(burst_exp, 0, sizeof(burst_exp));
+ property_get("persist.capture.burst.exposures", burst_exp, "");
+ if ( strlen(burst_exp)>0 ) {
+ mParameters.set("capture-burst-exposures", burst_exp);
+ }
+ } else {
+ mParameters.set("capture-burst-exposures", str_val);
+ }
+ mParameters.set("num-snaps-per-shutter", params.get("num-snaps-per-shutter"));
+
+ if ((rc = setAEBracket(params))) final_rc = rc;
+ // if ((rc = setDenoise(params))) final_rc = rc;
+ if ((rc = setPreviewFpsRange(params))) final_rc = rc;
+ if((rc = setRecordingHint(params))) final_rc = rc;
+ if ((rc = setNumOfSnapshot(params))) final_rc = rc;
+ if ((rc = setAecAwbLock(params))) final_rc = rc;
+ if ((rc = setWhiteBalance(params))) final_rc = rc;
+ const char *str = params.get(QCameraParameters::KEY_SCENE_MODE);
+ int32_t value = attr_lookup(scenemode, sizeof(scenemode) / sizeof(str_map), str);
+
+ if((value != NOT_FOUND) && (value == CAMERA_BESTSHOT_OFF )) {
+ //if ((rc = setPreviewFrameRateMode(params))) final_rc = rc;
+ if ((rc = setPreviewFrameRate(params))) final_rc = rc;
+ if ((rc = setBrightness(params))) final_rc = rc;
+ if ((rc = setISOValue(params))) final_rc = rc;
+ if ((rc = setFocusAreas(params))) final_rc = rc;
+ if ((rc = setMeteringAreas(params))) final_rc = rc;
+ }
+ if ((rc = setFocusMode(params))) final_rc = rc;
+ if ((rc = setAutoExposure(params))) final_rc = rc;
+ if ((rc = setExposureCompensation(params))) final_rc = rc;
+ if ((rc = setFlash(params))) final_rc = rc;
+ //selectableZoneAF needs to be invoked after continuous AF
+ if ((rc = setSelectableZoneAf(params))) final_rc = rc;
+ // setHighFrameRate needs to be done at end, as there can
+ // be a preview restart, and need to use the updated parameters
+ if ((rc = setHighFrameRate(params))) final_rc = rc;
+ if ((rc = setZSLBurstLookBack(params))) final_rc = rc;
+ if ((rc = setZSLBurstInterval(params))) final_rc = rc;
+ if ((rc = setNoDisplayMode(params))) final_rc = rc;
+
+ //Update Exiftag values.
+ setExifTags();
+
+ ALOGV("%s: X", __func__);
+ return final_rc;
+}
+
+/** Retrieve the camera parameters. The buffer returned by the camera HAL
+ must be returned back to it with put_parameters, if put_parameters
+ is not NULL.
+ */
+int QCameraHardwareInterface::getParameters(char **parms)
+{
+ char* rc = NULL;
+ String8 str;
+ QCameraParameters param = getParameters();
+ //param.dump();
+ str = param.flatten( );
+ rc = (char *)malloc(sizeof(char)*(str.length()+1));
+ if(rc != NULL){
+ memset(rc, 0, sizeof(char)*(str.length()+1));
+ strncpy(rc, str.string(), str.length());
+ rc[str.length()] = 0;
+ *parms = rc;
+ }
+ return 0;
+}
+
+/** The camera HAL uses its own memory to pass us the parameters when we
+ call get_parameters. Use this function to return the memory back to
+ the camera HAL, if put_parameters is not NULL. If put_parameters
+ is NULL, then you have to use free() to release the memory.
+*/
+void QCameraHardwareInterface::putParameters(char *rc)
+{
+ free(rc);
+ rc = NULL;
+}
+
+QCameraParameters& QCameraHardwareInterface::getParameters()
+{
+ Mutex::Autolock lock(mLock);
+ mParameters.set(QCameraParameters::KEY_FOCUS_DISTANCES, mFocusDistance.string());
+ const char *str = mParameters.get(QCameraParameters::KEY_SCENE_MODE);
+ if (mHasAutoFocusSupport && strcmp(str, "auto")) {
+ mParameters.set(QCameraParameters::KEY_FOCUS_MODE,
+ QCameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE);
+ }
+ return mParameters;
+}
+
+status_t QCameraHardwareInterface::runFaceDetection()
+{
+ bool ret = true;
+
+ const char *str = mParameters.get(QCameraParameters::KEY_FACE_DETECTION);
+ if (str != NULL) {
+ int value = attr_lookup(facedetection,
+ sizeof(facedetection) / sizeof(str_map), str);
+ fd_set_parm_t fd_set_parm;
+ int requested_faces = mParameters.getInt(QCameraParameters::KEY_MAX_NUM_REQUESTED_FACES);
+ fd_set_parm.fd_mode = value;
+ fd_set_parm.num_fd = requested_faces;
+ ret = native_set_parms(MM_CAMERA_PARM_FD, sizeof(fd_set_parm_t), (void *)&fd_set_parm);
+ return ret ? NO_ERROR : UNKNOWN_ERROR;
+ }
+ ALOGE("Invalid Face Detection value: %s", (str == NULL) ? "NULL" : str);
+ return BAD_VALUE;
+}
+
+status_t QCameraHardwareInterface::setSharpness(const QCameraParameters& params)
+{
+ bool ret = false;
+ int rc = MM_CAMERA_OK;
+ ALOGV("%s",__func__);
+ rc = cam_config_is_parm_supported(mCameraId, MM_CAMERA_PARM_SHARPNESS);
+ if(!rc) {
+ ALOGV("%s:CONTRAST not supported", __func__);
+ return NO_ERROR;
+ }
+ int sharpness = params.getInt(QCameraParameters::KEY_SHARPNESS);
+ if((sharpness < CAMERA_MIN_SHARPNESS
+ || sharpness > CAMERA_MAX_SHARPNESS))
+ return UNKNOWN_ERROR;
+
+ ALOGV("setting sharpness %d", sharpness);
+ mParameters.set(QCameraParameters::KEY_SHARPNESS, sharpness);
+ ret = native_set_parms(MM_CAMERA_PARM_SHARPNESS, sizeof(sharpness),
+ (void *)&sharpness);
+ return ret ? NO_ERROR : UNKNOWN_ERROR;
+}
+
+status_t QCameraHardwareInterface::setSaturation(const QCameraParameters& params)
+{
+ bool ret = false;
+ int rc = MM_CAMERA_OK;
+ ALOGV("%s",__func__);
+ rc = cam_config_is_parm_supported(mCameraId, MM_CAMERA_PARM_SATURATION);
+ if(!rc) {
+ ALOGV("%s:MM_CAMERA_PARM_SATURATION not supported", __func__);
+ return NO_ERROR;
+ }
+ int result;
+ int saturation = params.getInt(QCameraParameters::KEY_SATURATION);
+
+ if((saturation < CAMERA_MIN_SATURATION)
+ || (saturation > CAMERA_MAX_SATURATION))
+ return UNKNOWN_ERROR;
+
+ ALOGV("Setting saturation %d", saturation);
+ mParameters.set(QCameraParameters::KEY_SATURATION, saturation);
+ ret = native_set_parms(MM_CAMERA_PARM_SATURATION, sizeof(saturation),
+ (void *)&saturation, (int *)&result);
+ if(result != MM_CAMERA_OK)
+ ALOGV("Saturation Value: %d is not set as the selected value is not supported", saturation);
+ return ret ? NO_ERROR : UNKNOWN_ERROR;
+}
+
+status_t QCameraHardwareInterface::setContrast(const QCameraParameters& params)
+{
+ ALOGV("%s E", __func__ );
+ int rc = MM_CAMERA_OK;
+ rc = cam_config_is_parm_supported(mCameraId, MM_CAMERA_PARM_CONTRAST);
+ if(!rc) {
+ ALOGV("%s:CONTRAST not supported", __func__);
+ return NO_ERROR;
+ }
+ const char *str = params.get(QCameraParameters::KEY_SCENE_MODE);
+ ALOGV("Contrast : %s",str);
+ int32_t value = attr_lookup(scenemode, sizeof(scenemode) / sizeof(str_map), str);
+ if(value == CAMERA_BESTSHOT_OFF) {
+ int contrast = params.getInt(QCameraParameters::KEY_CONTRAST);
+ if((contrast < CAMERA_MIN_CONTRAST)
+ || (contrast > CAMERA_MAX_CONTRAST))
+ {
+ ALOGV("Contrast Value not matching");
+ return UNKNOWN_ERROR;
+ }
+ ALOGV("setting contrast %d", contrast);
+ mParameters.set(QCameraParameters::KEY_CONTRAST, contrast);
+ ALOGV("Calling Contrast set on Lower layer");
+ bool ret = native_set_parms(MM_CAMERA_PARM_CONTRAST, sizeof(contrast),
+ (void *)&contrast);
+ ALOGV("Lower layer returned %d", ret);
+ int bestshot_reconfigure;
+ cam_config_get_parm(mCameraId, MM_CAMERA_PARM_BESTSHOT_RECONFIGURE,
+ &bestshot_reconfigure);
+ if(bestshot_reconfigure) {
+ if (mContrast != contrast) {
+ mContrast = contrast;
+ if (mPreviewState == QCAMERA_HAL_PREVIEW_STARTED && ret) {
+ mRestartPreview = 1;
+ pausePreviewForZSL();
+ }
+ }
+ }
+ return ret ? NO_ERROR : UNKNOWN_ERROR;
+ } else {
+ ALOGV(" Contrast value will not be set " \
+ "when the scenemode selected is %s", str);
+ return NO_ERROR;
+ }
+ return BAD_VALUE;
+}
+
+status_t QCameraHardwareInterface::setSceneDetect(const QCameraParameters& params)
+{
+ ALOGV("%s",__func__);
+ bool retParm;
+ int rc = MM_CAMERA_OK;
+
+ rc = cam_config_is_parm_supported(mCameraId,MM_CAMERA_PARM_ASD_ENABLE);
+ if(!rc) {
+ ALOGV("%s:MM_CAMERA_PARM_ASD_ENABLE not supported", __func__);
+ return NO_ERROR;
+ }
+
+ const char *str = params.get(QCameraParameters::KEY_SCENE_DETECT);
+ ALOGV("Scene Detect string : %s",str);
+ if (str != NULL) {
+ int32_t value = attr_lookup(scenedetect, sizeof(scenedetect) / sizeof(str_map), str);
+ ALOGV("Scenedetect Value : %d",value);
+ if (value != NOT_FOUND) {
+ mParameters.set(QCameraParameters::KEY_SCENE_DETECT, str);
+
+ retParm = native_set_parms(MM_CAMERA_PARM_ASD_ENABLE, sizeof(value),
+ (void *)&value);
+
+ return retParm ? NO_ERROR : UNKNOWN_ERROR;
+ }
+ }
+ return BAD_VALUE;
+}
+
+status_t QCameraHardwareInterface::setZoom(const QCameraParameters& params)
+{
+ status_t rc = NO_ERROR;
+
+ ALOGV("%s: E",__func__);
+
+
+ if( !( cam_config_is_parm_supported(mCameraId, MM_CAMERA_PARM_ZOOM))) {
+ ALOGV("%s:MM_CAMERA_PARM_ZOOM not supported", __func__);
+ return NO_ERROR;
+ }
+ // No matter how many different zoom values the driver can provide, HAL
+ // provides applictations the same number of zoom levels. The maximum driver
+ // zoom value depends on sensor output (VFE input) and preview size (VFE
+ // output) because VFE can only crop and cannot upscale. If the preview size
+ // is bigger, the maximum zoom ratio is smaller. However, we want the
+ // zoom ratio of each zoom level is always the same whatever the preview
+ // size is. Ex: zoom level 1 is always 1.2x, zoom level 2 is 1.44x, etc. So,
+ // we need to have a fixed maximum zoom value and do read it from the
+ // driver.
+ static const int ZOOM_STEP = 1;
+ int32_t zoom_level = params.getInt("zoom");
+ if(zoom_level >= 0 && zoom_level <= mMaxZoom-1) {
+ mParameters.set("zoom", zoom_level);
+ int32_t zoom_value = ZOOM_STEP * zoom_level;
+ bool ret = native_set_parms(MM_CAMERA_PARM_ZOOM,
+ sizeof(zoom_value), (void *)&zoom_value);
+ if(ret) {
+ mCurrentZoom=zoom_level;
+ }
+ rc = ret ? NO_ERROR : UNKNOWN_ERROR;
+ } else {
+ rc = BAD_VALUE;
+ }
+ ALOGV("%s X",__func__);
+ return rc;
+
+}
+
+status_t QCameraHardwareInterface::setISOValue(const QCameraParameters& params) {
+
+ status_t rc = NO_ERROR;
+ ALOGV("%s",__func__);
+
+ rc = cam_config_is_parm_supported(mCameraId, MM_CAMERA_PARM_ISO);
+ if(!rc) {
+ ALOGV("%s:MM_CAMERA_PARM_ISO not supported", __func__);
+ return NO_ERROR;
+ }
+ const char *str = params.get(QCameraParameters::KEY_ISO_MODE);
+ ALOGV("ISO string : %s", str);
+ int8_t temp_hjr;
+ if (str != NULL) {
+ int value = (camera_iso_mode_type)attr_lookup(
+ iso, sizeof(iso) / sizeof(str_map), str);
+ ALOGV("ISO string : %s", str);
+ if (value != NOT_FOUND) {
+ camera_iso_mode_type temp = (camera_iso_mode_type) value;
+ if (value == CAMERA_ISO_DEBLUR) {
+ temp_hjr = true;
+ native_set_parms(MM_CAMERA_PARM_HJR, sizeof(int8_t), (void*)&temp_hjr);
+ mHJR = value;
+ }
+ else {
+ if (mHJR == CAMERA_ISO_DEBLUR) {
+ temp_hjr = false;
+ native_set_parms(MM_CAMERA_PARM_HJR, sizeof(int8_t), (void*)&temp_hjr);
+ mHJR = value;
+ }
+ }
+
+ mParameters.set(QCameraParameters::KEY_ISO_MODE, str);
+ native_set_parms(MM_CAMERA_PARM_ISO, sizeof(camera_iso_mode_type), (void *)&temp);
+ mIsoValue = (int)temp;
+ return NO_ERROR;
+ }
+ }
+ return BAD_VALUE;
+}
+
+status_t QCameraHardwareInterface::updateFocusDistances()
+{
+ ALOGV("%s: IN", __FUNCTION__);
+ focus_distances_info_t focusDistances;
+ if(cam_config_get_parm(mCameraId, MM_CAMERA_PARM_FOCUS_DISTANCES,
+ &focusDistances) == MM_CAMERA_OK) {
+ String8 str;
+ char buffer[32] = {0};
+ //set all distances to infinity if focus mode is infinity
+ if(mFocusMode == AF_MODE_INFINITY) {
+ snprintf(buffer, sizeof(buffer), "Infinity,");
+ str.append(buffer);
+ snprintf(buffer, sizeof(buffer), "Infinity,");
+ str.append(buffer);
+ snprintf(buffer, sizeof(buffer), "Infinity");
+ str.append(buffer);
+ } else {
+ snprintf(buffer, sizeof(buffer), "%f", focusDistances.focus_distance[0]);
+ str.append(buffer);
+ snprintf(buffer, sizeof(buffer), ",%f", focusDistances.focus_distance[1]);
+ str.append(buffer);
+ snprintf(buffer, sizeof(buffer), ",%f", focusDistances.focus_distance[2]);
+ str.append(buffer);
+ }
+ ALOGV("%s: setting KEY_FOCUS_DISTANCES as %s", __FUNCTION__, str.string());
+ mFocusDistance = str;
+ return NO_ERROR;
+ }
+ ALOGE("%s: get CAMERA_PARM_FOCUS_DISTANCES failed!!!", __FUNCTION__);
+ return BAD_VALUE;
+}
+
+// Parse string like "(1, 2, 3, 4, ..., N)"
+// num is pointer to an allocated array of size N
+static int parseNDimVector(const char *str, int *num, int N, char delim = ',')
+{
+ char *start, *end;
+ if(num == NULL) {
+ ALOGE("Invalid output array (num == NULL)");
+ return -1;
+ }
+ //check if string starts and ends with parantheses
+ if(str[0] != '(' || str[strlen(str)-1] != ')') {
+ ALOGE("Invalid format of string %s, valid format is (n1, n2, n3, n4 ...)", str);
+ return -1;
+ }
+ start = (char*) str;
+ start++;
+ for(int i=0; i<N; i++) {
+ *(num+i) = (int) strtol(start, &end, 10);
+ if(*end != delim && i < N-1) {
+ ALOGE("Cannot find delimeter '%c' in string \"%s\". end = %c", delim, str, *end);
+ return -1;
+ }
+ start = end+1;
+ }
+ return 0;
+}
+
+// parse string like "(1, 2, 3, 4, 5),(1, 2, 3, 4, 5),..."
+static int parseCameraAreaString(const char* str, int max_num_areas,
+ camera_area_t *pAreas, int *num_areas_found)
+{
+ char area_str[32];
+ const char *start, *end, *p;
+ start = str; end = NULL;
+ int values[5], index=0;
+ *num_areas_found = 0;
+
+ while(start != NULL) {
+ if(*start != '(') {
+ ALOGE("%s: error: Ill formatted area string: %s", __func__, str);
+ return -1;
+ }
+ end = strchr(start, ')');
+ if(end == NULL) {
+ ALOGE("%s: error: Ill formatted area string: %s", __func__, str);
+ return -1;
+ }
+ int i;
+ for (i=0,p=start; p<=end; p++, i++) {
+ area_str[i] = *p;
+ }
+ area_str[i] = '\0';
+ if(parseNDimVector(area_str, values, 5) < 0){
+ ALOGE("%s: error: Failed to parse the area string: %s", __func__, area_str);
+ return -1;
+ }
+ // no more areas than max_num_areas are accepted.
+ if(index >= max_num_areas) {
+ ALOGE("%s: error: too many areas specified %s", __func__, str);
+ return -1;
+ }
+ pAreas[index].x1 = values[0];
+ pAreas[index].y1 = values[1];
+ pAreas[index].x2 = values[2];
+ pAreas[index].y2 = values[3];
+ pAreas[index].weight = values[4];
+
+ index++;
+ start = strchr(end, '('); // serach for next '('
+ }
+ (*num_areas_found) = index;
+ return 0;
+}
+static bool validateCameraAreas(camera_area_t *areas, int num_areas)
+{
+ for(int i=0; i<num_areas; i++) {
+
+ // handle special case (0, 0, 0, 0, 0)
+ if((areas[i].x1 == 0) && (areas[i].y1 == 0)
+ && (areas[i].x2 == 0) && (areas[i].y2 == 0) && (areas[i].weight == 0)) {
+ continue;
+ }
+ if(areas[i].x1 < -1000) return false; // left should be >= -1000
+ if(areas[i].y1 < -1000) return false; // top should be >= -1000
+ if(areas[i].x2 > 1000) return false; // right should be <= 1000
+ if(areas[i].y2 > 1000) return false; // bottom should be <= 1000
+ if(areas[i].weight <= 0 || areas[i].weight > 1000) // weight should be in [1, 1000]
+ return false;
+ if(areas[i].x1 >= areas[i].x2) { // left should be < right
+ return false;
+ }
+ if(areas[i].y1 >= areas[i].y2) // top should be < bottom
+ return false;
+ }
+ return true;
+}
+
+status_t QCameraHardwareInterface::setFocusAreas(const QCameraParameters& params)
+{
+ ALOGV("%s: E", __func__);
+ status_t rc;
+ int max_num_af_areas = mParameters.getInt(QCameraParameters::KEY_MAX_NUM_FOCUS_AREAS);
+ if(max_num_af_areas == 0) {
+ return NO_ERROR;
+ }
+ const char *str = params.get(QCameraParameters::KEY_FOCUS_AREAS);
+ if (str == NULL) {
+ ALOGE("%s: Parameter string is null", __func__);
+ rc = NO_ERROR;
+ } else {
+ camera_area_t *areas = new camera_area_t[max_num_af_areas];
+ int num_areas_found=0;
+ if(parseCameraAreaString(str, max_num_af_areas, areas, &num_areas_found) < 0) {
+ ALOGE("%s: Failed to parse the string: %s", __func__, str);
+ delete areas;
+ return BAD_VALUE;
+ }
+ for(int i=0; i<num_areas_found; i++) {
+ ALOGV("FocusArea[%d] = (%d, %d, %d, %d, %d)", i, (areas[i].x1), (areas[i].y1),
+ (areas[i].x2), (areas[i].y2), (areas[i].weight));
+ }
+ if(validateCameraAreas(areas, num_areas_found) == false) {
+ ALOGE("%s: invalid areas specified : %s", __func__, str);
+ delete areas;
+ return BAD_VALUE;
+ }
+ mParameters.set(QCameraParameters::KEY_FOCUS_AREAS, str);
+ num_areas_found = 1; //temp; need to change after the multi-roi is enabled
+
+ //if the native_set_parms is called when preview is not started, it
+ //crashes in lower layer, so return of preview is not started
+ if(mPreviewState == QCAMERA_HAL_PREVIEW_STOPPED) {
+ delete areas;
+ return NO_ERROR;
+ }
+
+ //for special area string (0, 0, 0, 0, 0), set the num_areas_found to 0,
+ //so no action is takenby the lower layer
+ if(num_areas_found == 1 && (areas[0].x1 == 0) && (areas[0].y1 == 0)
+ && (areas[0].x2 == 0) && (areas[0].y2 == 0) && (areas[0].weight == 0)) {
+ num_areas_found = 0;
+ }
+#if 1 //temp solution
+
+ roi_info_t af_roi_value;
+ memset(&af_roi_value, 0, sizeof(roi_info_t));
+ uint16_t x1, x2, y1, y2, dx, dy;
+ int previewWidth, previewHeight;
+ this->getPreviewSize(&previewWidth, &previewHeight);
+ //transform the coords from (-1000, 1000) to (0, previewWidth or previewHeight)
+ x1 = (uint16_t)((areas[0].x1 + 1000.0f)*(previewWidth/2000.0f));
+ y1 = (uint16_t)((areas[0].y1 + 1000.0f)*(previewHeight/2000.0f));
+ x2 = (uint16_t)((areas[0].x2 + 1000.0f)*(previewWidth/2000.0f));
+ y2 = (uint16_t)((areas[0].y2 + 1000.0f)*(previewHeight/2000.0f));
+ dx = x2 - x1;
+ dy = y2 - y1;
+
+ af_roi_value.num_roi = num_areas_found;
+ af_roi_value.roi[0].x = x1;
+ af_roi_value.roi[0].y = y1;
+ af_roi_value.roi[0].dx = dx;
+ af_roi_value.roi[0].dy = dy;
+ af_roi_value.is_multiwindow = 0;
+ if (native_set_parms(MM_CAMERA_PARM_AF_ROI, sizeof(roi_info_t), (void*)&af_roi_value))
+ rc = NO_ERROR;
+ else
+ rc = BAD_VALUE;
+ delete areas;
+#endif
+#if 0 //better solution with multi-roi, to be enabled later
+ af_mtr_area_t afArea;
+ afArea.num_area = num_areas_found;
+
+ uint16_t x1, x2, y1, y2, dx, dy;
+ int previewWidth, previewHeight;
+ this->getPreviewSize(&previewWidth, &previewHeight);
+
+ for(int i=0; i<num_areas_found; i++) {
+ //transform the coords from (-1000, 1000) to (0, previewWidth or previewHeight)
+ x1 = (uint16_t)((areas[i].x1 + 1000.0f)*(previewWidth/2000.0f));
+ y1 = (uint16_t)((areas[i].y1 + 1000.0f)*(previewHeight/2000.0f));
+ x2 = (uint16_t)((areas[i].x2 + 1000.0f)*(previewWidth/2000.0f));
+ y2 = (uint16_t)((areas[i].y2 + 1000.0f)*(previewHeight/2000.0f));
+ dx = x2 - x1;
+ dy = y2 - y1;
+ afArea.mtr_area[i].x = x1;
+ afArea.mtr_area[i].y = y1;
+ afArea.mtr_area[i].dx = dx;
+ afArea.mtr_area[i].dy = dy;
+ afArea.weight[i] = areas[i].weight;
+ }
+
+ if(native_set_parms(MM_CAMERA_PARM_AF_MTR_AREA, sizeof(af_mtr_area_t), (void*)&afArea))
+ rc = NO_ERROR;
+ else
+ rc = BAD_VALUE;*/
+#endif
+ }
+ ALOGV("%s: X", __func__);
+ return rc;
+}
+
+status_t QCameraHardwareInterface::setMeteringAreas(const QCameraParameters& params)
+{
+ ALOGV("%s: E", __func__);
+ status_t rc;
+ int max_num_mtr_areas = mParameters.getInt(QCameraParameters::KEY_MAX_NUM_METERING_AREAS);
+ if(max_num_mtr_areas == 0) {
+ return NO_ERROR;
+ }
+
+ const char *str = params.get(QCameraParameters::KEY_METERING_AREAS);
+ if (str == NULL) {
+ ALOGE("%s: Parameter string is null", __func__);
+ rc = NO_ERROR;
+ } else {
+ camera_area_t *areas = new camera_area_t[max_num_mtr_areas];
+ int num_areas_found=0;
+ if(parseCameraAreaString(str, max_num_mtr_areas, areas, &num_areas_found) < 0) {
+ ALOGE("%s: Failed to parse the string: %s", __func__, str);
+ delete areas;
+ return BAD_VALUE;
+ }
+ for(int i=0; i<num_areas_found; i++) {
+ ALOGV("MeteringArea[%d] = (%d, %d, %d, %d, %d)", i, (areas[i].x1), (areas[i].y1),
+ (areas[i].x2), (areas[i].y2), (areas[i].weight));
+ }
+ if(validateCameraAreas(areas, num_areas_found) == false) {
+ ALOGE("%s: invalid areas specified : %s", __func__, str);
+ delete areas;
+ return BAD_VALUE;
+ }
+ mParameters.set(QCameraParameters::KEY_METERING_AREAS, str);
+
+ //if the native_set_parms is called when preview is not started, it
+ //crashes in lower layer, so return of preview is not started
+ if(mPreviewState == QCAMERA_HAL_PREVIEW_STOPPED) {
+ delete areas;
+ return NO_ERROR;
+ }
+
+ num_areas_found = 1; //temp; need to change after the multi-roi is enabled
+
+ //for special area string (0, 0, 0, 0, 0), set the num_areas_found to 0,
+ //so no action is takenby the lower layer
+ if(num_areas_found == 1 && (areas[0].x1 == 0) && (areas[0].y1 == 0)
+ && (areas[0].x2 == 0) && (areas[0].y2 == 0) && (areas[0].weight == 0)) {
+ num_areas_found = 0;
+ }
+#if 1
+ cam_set_aec_roi_t aec_roi_value;
+ uint16_t x1, x2, y1, y2;
+ int previewWidth, previewHeight;
+ this->getPreviewSize(&previewWidth, &previewHeight);
+ //transform the coords from (-1000, 1000) to (0, previewWidth or previewHeight)
+ x1 = (uint16_t)((areas[0].x1 + 1000.0f)*(previewWidth/2000.0f));
+ y1 = (uint16_t)((areas[0].y1 + 1000.0f)*(previewHeight/2000.0f));
+ x2 = (uint16_t)((areas[0].x2 + 1000.0f)*(previewWidth/2000.0f));
+ y2 = (uint16_t)((areas[0].y2 + 1000.0f)*(previewHeight/2000.0f));
+ delete areas;
+
+ if(num_areas_found == 1) {
+ aec_roi_value.aec_roi_enable = AEC_ROI_ON;
+ aec_roi_value.aec_roi_type = AEC_ROI_BY_COORDINATE;
+ aec_roi_value.aec_roi_position.coordinate.x = (x1+x2)/2;
+ aec_roi_value.aec_roi_position.coordinate.y = (y1+y2)/2;
+ } else {
+ aec_roi_value.aec_roi_enable = AEC_ROI_OFF;
+ aec_roi_value.aec_roi_type = AEC_ROI_BY_COORDINATE;
+ aec_roi_value.aec_roi_position.coordinate.x = DONT_CARE_COORDINATE;
+ aec_roi_value.aec_roi_position.coordinate.y = DONT_CARE_COORDINATE;
+ }
+
+ if(native_set_parms(MM_CAMERA_PARM_AEC_ROI, sizeof(cam_set_aec_roi_t), (void *)&aec_roi_value))
+ rc = NO_ERROR;
+ else
+ rc = BAD_VALUE;
+#endif
+#if 0 //solution including multi-roi, to be enabled later
+ aec_mtr_area_t aecArea;
+ aecArea.num_area = num_areas_found;
+
+ uint16_t x1, x2, y1, y2, dx, dy;
+ int previewWidth, previewHeight;
+ this->getPreviewSize(&previewWidth, &previewHeight);
+
+ for(int i=0; i<num_areas_found; i++) {
+ //transform the coords from (-1000, 1000) to (0, previewWidth or previewHeight)
+ x1 = (uint16_t)((areas[i].x1 + 1000.0f)*(previewWidth/2000.0f));
+ y1 = (uint16_t)((areas[i].y1 + 1000.0f)*(previewHeight/2000.0f));
+ x2 = (uint16_t)((areas[i].x2 + 1000.0f)*(previewWidth/2000.0f));
+ y2 = (uint16_t)((areas[i].y2 + 1000.0f)*(previewHeight/2000.0f));
+ dx = x2 - x1;
+ dy = y2 - y1;
+ aecArea.mtr_area[i].x = x1;
+ aecArea.mtr_area[i].y = y1;
+ aecArea.mtr_area[i].dx = dx;
+ aecArea.mtr_area[i].dy = dy;
+ aecArea.weight[i] = areas[i].weight;
+ }
+ delete areas;
+
+ if(native_set_parms(MM_CAMERA_PARM_AEC_MTR_AREA, sizeof(aec_mtr_area_t), (void*)&aecArea))
+ rc = NO_ERROR;
+ else
+ rc = BAD_VALUE;
+#endif
+ }
+ ALOGV("%s: X", __func__);
+ return rc;
+}
+
+status_t QCameraHardwareInterface::setFocusMode(const QCameraParameters& params)
+{
+ const char *str = params.get(QCameraParameters::KEY_FOCUS_MODE);
+ const char *prev_str = mParameters.get(QCameraParameters::KEY_FOCUS_MODE);
+ ALOGV("%s",__func__);
+ if (str != NULL) {
+ ALOGV("Focus mode %s",str);
+ int32_t value = attr_lookup(focus_modes,
+ sizeof(focus_modes) / sizeof(str_map), str);
+ if (value != NOT_FOUND) {
+ mParameters.set(QCameraParameters::KEY_FOCUS_MODE, str);
+ mFocusMode = value;
+
+ if(updateFocusDistances() != NO_ERROR) {
+ ALOGE("%s: updateFocusDistances failed for %s", __FUNCTION__, str);
+ return UNKNOWN_ERROR;
+ }
+ mParameters.set(QCameraParameters::KEY_FOCUS_DISTANCES, mFocusDistance.string());
+ if(mHasAutoFocusSupport){
+ bool ret = native_set_parms(MM_CAMERA_PARM_FOCUS_MODE,
+ sizeof(value),
+ (void *)&value);
+
+ int cafSupport = false;
+ int caf_type=0;
+ const char *str_hdr = mParameters.get(QCameraParameters::KEY_SCENE_MODE);
+ if(!strcmp(str, QCameraParameters::FOCUS_MODE_CONTINUOUS_VIDEO) ||
+ !strcmp(str, QCameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE)){
+ cafSupport = true;
+ bool rc = false;
+ if(!strcmp(str, QCameraParameters::FOCUS_MODE_CONTINUOUS_VIDEO))
+ {
+ caf_type = 1;
+ rc = native_set_parms(MM_CAMERA_PARM_CAF_TYPE, sizeof(caf_type), (void *)&caf_type);
+ }
+ else if(!strcmp(str, QCameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE))
+ {
+ caf_type = 2;
+ rc = native_set_parms(MM_CAMERA_PARM_CAF_TYPE, sizeof(caf_type), (void *)&caf_type);
+ }
+ ALOGV("caf_type %d rc %d", caf_type, rc);
+ }
+ ALOGV("Continuous Auto Focus %d", cafSupport);
+ if(mAutoFocusRunning && cafSupport){
+ mAutoFocusRunning = false;
+ if(MM_CAMERA_OK!=cam_ops_action(mCameraId,false,MM_CAMERA_OPS_FOCUS,NULL )) {
+ ALOGE("%s: AF command failed err:%d error %s",__func__, errno,strerror(errno));
+ }
+ }
+ ret = native_set_parms(MM_CAMERA_PARM_CONTINUOUS_AF, sizeof(cafSupport),
+ (void *)&cafSupport);
+ }
+
+ return NO_ERROR;
+ }
+ ALOGV("%s:Could not look up str value",__func__);
+ }
+ ALOGE("Invalid focus mode value: %s", (str == NULL) ? "NULL" : str);
+ return BAD_VALUE;
+}
+
+status_t QCameraHardwareInterface::setSceneMode(const QCameraParameters& params)
+{
+ status_t rc = NO_ERROR;
+ ALOGV("%s",__func__);
+
+ rc = cam_config_is_parm_supported(mCameraId, MM_CAMERA_PARM_BESTSHOT_MODE);
+ if(!rc) {
+ ALOGV("%s:Parameter Scenemode is not supported for this sensor", __func__);
+ return NO_ERROR;
+ }
+ const char *str = params.get(QCameraParameters::KEY_SCENE_MODE);
+ const char *oldstr = mParameters.get(QCameraParameters::KEY_SCENE_MODE);
+
+ if (str != NULL && oldstr != NULL) {
+ int32_t value = attr_lookup(scenemode, sizeof(scenemode) / sizeof(str_map), str);
+ if (value != NOT_FOUND) {
+ /* Check to see if there was a change of scene mode */
+ if(strcmp(str,oldstr)) {
+ ALOGV("%s: valued changed from %s to %s",__func__,oldstr, str);
+
+ /* Check if we are either transitioning to/from HDR state
+ if yes preview needs restart*/
+ if(!strcmp(str, "hdr") || !strcmp(oldstr, "hdr") ) {
+ ALOGV("Changed between HDR/non-HDR states");
+
+ /* Restart only if preview already running*/
+ if (mPreviewState == QCAMERA_HAL_PREVIEW_STARTED) {
+ ALOGV("Preview in progress,restarting for HDR transition");
+ mParameters.set(QCameraParameters::KEY_SCENE_MODE, str);
+ mRestartPreview = 1;
+ pausePreviewForZSL();
+ }
+ }
+ }
+
+ mParameters.set(QCameraParameters::KEY_SCENE_MODE, str);
+ bool ret = native_set_parms(MM_CAMERA_PARM_BESTSHOT_MODE, sizeof(value),
+ (void *)&value);
+ int bestshot_reconfigure;
+ cam_config_get_parm(mCameraId, MM_CAMERA_PARM_BESTSHOT_RECONFIGURE,
+ &bestshot_reconfigure);
+ if(bestshot_reconfigure) {
+ if (mBestShotMode != value) {
+ mBestShotMode = value;
+ if (mPreviewState == QCAMERA_HAL_PREVIEW_STARTED && ret) {
+ ALOGV("%s:Bestshot trigerring restart",__func__);
+ mRestartPreview = 1;
+ pausePreviewForZSL();
+ }
+ }
+ }
+ return ret ? NO_ERROR : UNKNOWN_ERROR;
+ }
+ }
+ ALOGE("Invalid scenemode value: %s", (str == NULL) ? "NULL" : str);
+ return BAD_VALUE;
+}
+
+status_t QCameraHardwareInterface::setSelectableZoneAf(const QCameraParameters& params)
+{
+ ALOGV("%s",__func__);
+ status_t rc = NO_ERROR;
+ if(mHasAutoFocusSupport) {
+ const char *str = params.get(QCameraParameters::KEY_SELECTABLE_ZONE_AF);
+ if (str != NULL) {
+ int32_t value = attr_lookup(selectable_zone_af, sizeof(selectable_zone_af) / sizeof(str_map), str);
+ if (value != NOT_FOUND) {
+ rc = cam_config_is_parm_supported(mCameraId, MM_CAMERA_PARM_FOCUS_RECT);
+ if(!rc) {
+ ALOGV("SelectableZoneAF is not supported for this sensor");
+ return NO_ERROR;
+ }else {
+ mParameters.set(QCameraParameters::KEY_SELECTABLE_ZONE_AF, str);
+ bool ret = native_set_parms(MM_CAMERA_PARM_FOCUS_RECT, sizeof(value),
+ (void *)&value);
+ return ret ? NO_ERROR : UNKNOWN_ERROR;
+ }
+ }
+ }
+ ALOGE("Invalid selectable zone af value: %s", (str == NULL) ? "NULL" : str);
+ return BAD_VALUE;
+
+ }
+ return NO_ERROR;
+}
+
+status_t QCameraHardwareInterface::setEffect(const QCameraParameters& params)
+{
+ ALOGV("%s",__func__);
+ status_t rc = NO_ERROR;
+ const char *str = params.get(QCameraParameters::KEY_EFFECT);
+ int result;
+ if (str != NULL) {
+ ALOGV("Setting effect %s",str);
+ int32_t value = attr_lookup(effects, sizeof(effects) / sizeof(str_map), str);
+ if (value != NOT_FOUND) {
+ rc = cam_config_is_parm_supported(mCameraId, MM_CAMERA_PARM_EFFECT);
+ if(!rc) {
+ ALOGV("Camera Effect - %s mode is not supported for this sensor",str);
+ return NO_ERROR;
+ }else {
+ mParameters.set(QCameraParameters::KEY_EFFECT, str);
+ bool ret = native_set_parms(MM_CAMERA_PARM_EFFECT, sizeof(value),
+ (void *)&value,(int *)&result);
+ if(result != MM_CAMERA_OK) {
+ ALOGE("Camera Effect: %s is not set as the selected value is not supported ", str);
+ }
+ int bestshot_reconfigure;
+ cam_config_get_parm(mCameraId, MM_CAMERA_PARM_BESTSHOT_RECONFIGURE,
+ &bestshot_reconfigure);
+ if(bestshot_reconfigure) {
+ if (mEffects != value) {
+ mEffects = value;
+ if (mPreviewState == QCAMERA_HAL_PREVIEW_STARTED && ret) {
+ mRestartPreview = 1;
+ pausePreviewForZSL();
+ }
+ }
+ }
+ return ret ? NO_ERROR : UNKNOWN_ERROR;
+ }
+ }
+ }
+ ALOGE("Invalid effect value: %s", (str == NULL) ? "NULL" : str);
+ ALOGV("setEffect X");
+ return BAD_VALUE;
+}
+
+status_t QCameraHardwareInterface::setBrightness(const QCameraParameters& params) {
+
+ ALOGV("%s",__func__);
+ status_t rc = NO_ERROR;
+ rc = cam_config_is_parm_supported(mCameraId, MM_CAMERA_PARM_BRIGHTNESS);
+ if(!rc) {
+ ALOGV("MM_CAMERA_PARM_BRIGHTNESS mode is not supported for this sensor");
+ return NO_ERROR;
+ }
+ int brightness = params.getInt("luma-adaptation");
+ if (mBrightness != brightness) {
+ ALOGV(" new brightness value : %d ", brightness);
+ mBrightness = brightness;
+ mParameters.set("luma-adaptation", brightness);
+ bool ret = native_set_parms(MM_CAMERA_PARM_BRIGHTNESS, sizeof(mBrightness),
+ (void *)&mBrightness);
+ return ret ? NO_ERROR : UNKNOWN_ERROR;
+ }
+
+ return NO_ERROR;
+}
+
+status_t QCameraHardwareInterface::setAutoExposure(const QCameraParameters& params)
+{
+
+ ALOGV("%s",__func__);
+ status_t rc = NO_ERROR;
+ rc = cam_config_is_parm_supported(mCameraId, MM_CAMERA_PARM_EXPOSURE);
+ if(!rc) {
+ ALOGV("MM_CAMERA_PARM_EXPOSURE mode is not supported for this sensor");
+ return NO_ERROR;
+ }
+ const char *str = params.get(QCameraParameters::KEY_AUTO_EXPOSURE);
+ if (str != NULL) {
+ int32_t value = attr_lookup(autoexposure, sizeof(autoexposure) / sizeof(str_map), str);
+ if (value != NOT_FOUND) {
+ mParameters.set(QCameraParameters::KEY_AUTO_EXPOSURE, str);
+ bool ret = native_set_parms(MM_CAMERA_PARM_EXPOSURE, sizeof(value),
+ (void *)&value);
+ return ret ? NO_ERROR : UNKNOWN_ERROR;
+ }
+ }
+ ALOGE("Invalid auto exposure value: %s", (str == NULL) ? "NULL" : str);
+ return BAD_VALUE;
+}
+
+status_t QCameraHardwareInterface::setExposureCompensation(
+ const QCameraParameters & params){
+ ALOGV("%s",__func__);
+ status_t rc = NO_ERROR;
+ rc = cam_config_is_parm_supported(mCameraId, MM_CAMERA_PARM_EXPOSURE_COMPENSATION);
+ if(!rc) {
+ ALOGV("MM_CAMERA_PARM_EXPOSURE_COMPENSATION mode is not supported for this sensor");
+ return NO_ERROR;
+ }
+ int numerator = params.getInt(QCameraParameters::KEY_EXPOSURE_COMPENSATION);
+ if(EXPOSURE_COMPENSATION_MINIMUM_NUMERATOR <= numerator &&
+ numerator <= EXPOSURE_COMPENSATION_MAXIMUM_NUMERATOR){
+ int16_t numerator16 = (int16_t)(numerator & 0x0000ffff);
+ uint16_t denominator16 = EXPOSURE_COMPENSATION_DENOMINATOR;
+ uint32_t value = 0;
+ value = numerator16 << 16 | denominator16;
+
+ const char *sce_str = params.get(QCameraParameters::KEY_SCENE_MODE);
+ if (sce_str != NULL) {
+ if(!strcmp(sce_str, "sunset")){
+ //Exposure comp value in sunset scene mode
+ mParameters.set(QCameraParameters::KEY_EXPOSURE_COMPENSATION,
+ -6);
+ }else{
+ //Exposure comp value for other
+ mParameters.set(QCameraParameters::KEY_EXPOSURE_COMPENSATION,
+ numerator);
+ }
+ }else {
+ mParameters.set(QCameraParameters::KEY_EXPOSURE_COMPENSATION,
+ numerator);
+ }
+ bool ret = native_set_parms(MM_CAMERA_PARM_EXPOSURE_COMPENSATION,
+ sizeof(value), (void *)&value);
+ return ret ? NO_ERROR : UNKNOWN_ERROR;
+ }
+ ALOGE("Invalid Exposure Compensation");
+ return BAD_VALUE;
+}
+
+status_t QCameraHardwareInterface::setWhiteBalance(const QCameraParameters& params)
+{
+
+ ALOGV("%s",__func__);
+ status_t rc = NO_ERROR;
+ int result;
+ const char *str = NULL;
+ rc = cam_config_is_parm_supported(mCameraId, MM_CAMERA_PARM_WHITE_BALANCE);
+ if(!rc) {
+ ALOGV("MM_CAMERA_PARM_WHITE_BALANCE mode is not supported for this sensor");
+ return NO_ERROR;
+ }
+
+ const char *sce_str = params.get(QCameraParameters::KEY_SCENE_MODE);
+ if (sce_str != NULL) {
+ if(!strcmp(sce_str, "sunset")){
+ //AWB value in sunset scene mode
+ str = QCameraParameters::WHITE_BALANCE_DAYLIGHT;
+ mParameters.set(QCameraParameters::KEY_WHITE_BALANCE, str);
+ }else if(!strcmp(sce_str, "auto")){
+ str = params.get(QCameraParameters::KEY_WHITE_BALANCE);
+ }else{
+ //AWB in other scene Mode
+ str = QCameraParameters::WHITE_BALANCE_AUTO;
+ mParameters.set(QCameraParameters::KEY_WHITE_BALANCE, str);
+ }
+ }else {
+ str = params.get(QCameraParameters::KEY_WHITE_BALANCE);
+ }
+
+ if (str != NULL) {
+ int32_t value = attr_lookup(whitebalance, sizeof(whitebalance) / sizeof(str_map), str);
+ if (value != NOT_FOUND) {
+ mParameters.set(QCameraParameters::KEY_WHITE_BALANCE, str);
+ bool ret = native_set_parms(MM_CAMERA_PARM_WHITE_BALANCE, sizeof(value),
+ (void *)&value, (int *)&result);
+ if(result != MM_CAMERA_OK) {
+ ALOGE("WhiteBalance Value: %s is not set as the selected value is not supported ", str);
+ }
+ return ret ? NO_ERROR : UNKNOWN_ERROR;
+ }
+ }
+ ALOGE("Invalid whitebalance value: %s", (str == NULL) ? "NULL" : str);
+ return BAD_VALUE;
+}
+
+status_t QCameraHardwareInterface::setAntibanding(const QCameraParameters& params)
+{
+ int result;
+
+ ALOGV("%s",__func__);
+ status_t rc = NO_ERROR;
+ rc = cam_config_is_parm_supported(mCameraId, MM_CAMERA_PARM_ANTIBANDING);
+ if(!rc) {
+ ALOGV("ANTIBANDING mode is not supported for this sensor");
+ return NO_ERROR;
+ }
+ const char *str = params.get(QCameraParameters::KEY_ANTIBANDING);
+ if (str != NULL) {
+ int value = (camera_antibanding_type)attr_lookup(
+ antibanding, sizeof(antibanding) / sizeof(str_map), str);
+ if (value != NOT_FOUND) {
+ camera_antibanding_type temp = (camera_antibanding_type) value;
+ ALOGV("Antibanding Value : %d",value);
+ mParameters.set(QCameraParameters::KEY_ANTIBANDING, str);
+ bool ret = native_set_parms(MM_CAMERA_PARM_ANTIBANDING,
+ sizeof(camera_antibanding_type), (void *)&value ,(int *)&result);
+ if(result != MM_CAMERA_OK) {
+ ALOGE("AntiBanding Value: %s is not supported for the given BestShot Mode", str);
+ }
+ return ret ? NO_ERROR : UNKNOWN_ERROR;
+ }
+ }
+ ALOGE("Invalid antibanding value: %s", (str == NULL) ? "NULL" : str);
+
+ return BAD_VALUE;
+}
+
+status_t QCameraHardwareInterface::setPreviewFrameRate(const QCameraParameters& params)
+{
+ ALOGV("%s",__func__);
+ status_t rc = NO_ERROR;
+ rc = cam_config_is_parm_supported(mCameraId, MM_CAMERA_PARM_FPS);
+ if(!rc) {
+ ALOGV("MM_CAMERA_PARM_FPS is not supported for this sensor");
+ return NO_ERROR;
+ }
+ uint16_t previousFps = (uint16_t)mParameters.getPreviewFrameRate();
+ uint16_t fps = (uint16_t)params.getPreviewFrameRate();
+ ALOGV("requested preview frame rate is %u", fps);
+
+ if(mInitialized && (fps == previousFps)){
+ ALOGV("No change is FPS Value %d",fps );
+ return NO_ERROR;
+ }
+
+ if(MINIMUM_FPS <= fps && fps <=MAXIMUM_FPS){
+ mParameters.setPreviewFrameRate(fps);
+ bool ret = native_set_parms(MM_CAMERA_PARM_FPS,
+ sizeof(fps), (void *)&fps);
+ return ret ? NO_ERROR : UNKNOWN_ERROR;
+ }
+
+ return BAD_VALUE;
+}
+
+status_t QCameraHardwareInterface::setPreviewFrameRateMode(const QCameraParameters& params) {
+
+ ALOGV("%s",__func__);
+ status_t rc = NO_ERROR;
+ rc = cam_config_is_parm_supported(mCameraId, MM_CAMERA_PARM_FPS);
+ if(!rc) {
+ ALOGV(" CAMERA FPS mode is not supported for this sensor");
+ return NO_ERROR;
+ }
+ rc = cam_config_is_parm_supported(mCameraId, MM_CAMERA_PARM_FPS_MODE);
+ if(!rc) {
+ ALOGV("CAMERA FPS MODE mode is not supported for this sensor");
+ return NO_ERROR;
+ }
+
+ const char *previousMode = mParameters.getPreviewFrameRateMode();
+ const char *str = params.getPreviewFrameRateMode();
+ if (NULL == previousMode) {
+ ALOGV("Preview Frame Rate Mode is NULL\n");
+ return NO_ERROR;
+ }
+ if (NULL == str) {
+ ALOGV("Preview Frame Rate Mode is NULL\n");
+ return NO_ERROR;
+ }
+ int32_t frameRateMode = attr_lookup(frame_rate_modes, sizeof(frame_rate_modes) / sizeof(str_map),str);
+ if(frameRateMode != NOT_FOUND) {
+ ALOGV("setPreviewFrameRateMode: %s ", str);
+ mParameters.setPreviewFrameRateMode(str);
+ bool ret = native_set_parms(MM_CAMERA_PARM_FPS_MODE, sizeof(frameRateMode), (void *)&frameRateMode);
+ if(!ret) return ret;
+ //set the fps value when chaging modes
+ int16_t fps = (uint16_t)params.getPreviewFrameRate();
+ if(MINIMUM_FPS <= fps && fps <=MAXIMUM_FPS){
+ mParameters.setPreviewFrameRate(fps);
+ ret = native_set_parms(MM_CAMERA_PARM_FPS,
+ sizeof(fps), (void *)&fps);
+ return ret ? NO_ERROR : UNKNOWN_ERROR;
+ }
+ ALOGE("Invalid preview frame rate value: %d", fps);
+ return BAD_VALUE;
+ }
+ ALOGE("Invalid preview frame rate mode value: %s", (str == NULL) ? "NULL" : str);
+
+ return BAD_VALUE;
+}
+
+status_t QCameraHardwareInterface::setSkinToneEnhancement(const QCameraParameters& params) {
+ ALOGV("%s",__func__);
+ status_t rc = NO_ERROR;
+ rc = cam_config_is_parm_supported(mCameraId, MM_CAMERA_PARM_SCE_FACTOR);
+ if(!rc) {
+ ALOGV("SkinToneEnhancement is not supported for this sensor");
+ return NO_ERROR;
+ }
+ int skinToneValue = params.getInt("skinToneEnhancement");
+ if (mSkinToneEnhancement != skinToneValue) {
+ ALOGV(" new skinTone correction value : %d ", skinToneValue);
+ mSkinToneEnhancement = skinToneValue;
+ mParameters.set("skinToneEnhancement", skinToneValue);
+ bool ret = native_set_parms(MM_CAMERA_PARM_SCE_FACTOR, sizeof(mSkinToneEnhancement),
+ (void *)&mSkinToneEnhancement);
+ return ret ? NO_ERROR : UNKNOWN_ERROR;
+ }
+ return NO_ERROR;
+}
+
+status_t QCameraHardwareInterface::setWaveletDenoise(const QCameraParameters& params) {
+ ALOGV("%s",__func__);
+ status_t rc = NO_ERROR;
+ rc = cam_config_is_parm_supported(mCameraId, MM_CAMERA_PARM_WAVELET_DENOISE);
+ if(rc != MM_CAMERA_PARM_SUPPORT_SET) {
+ ALOGV("Wavelet Denoise is not supported for this sensor");
+ /* TO DO */
+// return NO_ERROR;
+ }
+ const char *str = params.get(QCameraParameters::KEY_DENOISE);
+ if (str != NULL) {
+ int value = attr_lookup(denoise,
+ sizeof(denoise) / sizeof(str_map), str);
+ if ((value != NOT_FOUND) && (mDenoiseValue != value)) {
+ mDenoiseValue = value;
+ mParameters.set(QCameraParameters::KEY_DENOISE, str);
+
+ char prop[PROPERTY_VALUE_MAX];
+ memset(prop, 0, sizeof(prop));
+ property_get("persist.denoise.process.plates", prop, "1");
+
+ denoise_param_t temp;
+ memset(&temp, 0, sizeof(denoise_param_t));
+ temp.denoise_enable = value;
+ temp.process_plates = atoi(prop);
+ ALOGV("Denoise enable=%d, plates=%d", temp.denoise_enable, temp.process_plates);
+ bool ret = native_set_parms(MM_CAMERA_PARM_WAVELET_DENOISE, sizeof(temp),
+ (void *)&temp);
+ return ret ? NO_ERROR : UNKNOWN_ERROR;
+ }
+ return NO_ERROR;
+ }
+ ALOGE("Invalid Denoise value: %s", (str == NULL) ? "NULL" : str);
+ return BAD_VALUE;
+}
+
+status_t QCameraHardwareInterface::setVideoSize(const QCameraParameters& params)
+{
+ const char *str= NULL;
+ const char *str_t= NULL;
+ int old_vid_w = 0, old_vid_h = 0;
+ ALOGV("%s: E", __func__);
+ str = params.get(QCameraParameters::KEY_VIDEO_SIZE);
+ str_t = mParameters.get(CameraParameters::KEY_VIDEO_SIZE);
+ if(!str) {
+ mParameters.set(QCameraParameters::KEY_VIDEO_SIZE, "");
+ //If application didn't set this parameter string, use the values from
+ //getPreviewSize() as video dimensions.
+ ALOGV("No Record Size requested, use the preview dimensions");
+ mVideoWidth = mPreviewWidth;
+ mVideoHeight = mPreviewHeight;
+ } else {
+ //Extract the record witdh and height that application requested.
+ ALOGV("%s: requested record size %s", __func__, str);
+ if(!parse_size(str, mVideoWidth, mVideoHeight)) {
+ parse_size(str_t, old_vid_w, old_vid_h);
+ if(old_vid_w != mVideoWidth || old_vid_h != mVideoHeight) {
+ mRestartPreview = true;
+ ALOGV("%s: Video sizes changes, Restart preview...", __func__);
+ }
+ mParameters.set(QCameraParameters::KEY_VIDEO_SIZE, str);
+ } else {
+ mParameters.set(QCameraParameters::KEY_VIDEO_SIZE, "");
+ ALOGE("%s: error :failed to parse parameter record-size (%s)", __func__, str);
+ return BAD_VALUE;
+ }
+ }
+ ALOGV("%s: preview dimensions: %dx%d", __func__, mPreviewWidth, mPreviewHeight);
+ ALOGV("%s: video dimensions: %dx%d", __func__, mVideoWidth, mVideoHeight);
+
+ ALOGV("%s: X", __func__);
+ return NO_ERROR;
+}
+
+status_t QCameraHardwareInterface::setCameraMode(const QCameraParameters& params) {
+ int32_t value = params.getInt(QCameraParameters::KEY_CAMERA_MODE);
+ mParameters.set(QCameraParameters::KEY_CAMERA_MODE,value);
+
+ ALOGV("ZSL is enabled %d", value);
+ if (value == 1) {
+ myMode = (camera_mode_t)(myMode | CAMERA_ZSL_MODE);
+ } else {
+ myMode = (camera_mode_t)(myMode & ~CAMERA_ZSL_MODE);
+ }
+
+ return NO_ERROR;
+}
+
+status_t QCameraHardwareInterface::setPowerMode(const QCameraParameters& params) {
+ uint32_t value = NORMAL_POWER;
+ const char *powermode = NULL;
+
+ powermode = params.get(QCameraParameters::KEY_POWER_MODE);
+ if (powermode != NULL) {
+ value = attr_lookup(power_modes,
+ sizeof(power_modes) / sizeof(str_map), powermode);
+ if((value == LOW_POWER) || mHFRLevel > 1) {
+ ALOGV("Enable Low Power Mode");
+ value = LOW_POWER;
+ mPowerMode = value;
+ mParameters.set(QCameraParameters::KEY_POWER_MODE,"Low_Power");
+ } else {
+ ALOGV("Enable Normal Power Mode");
+ mPowerMode = value;
+ mParameters.set(QCameraParameters::KEY_POWER_MODE,"Normal_Power");
+ }
+ }
+
+ ALOGV("%s Low power mode %s value = %d", __func__,
+ value ? "Enabled" : "Disabled", value);
+ native_set_parms(MM_CAMERA_PARM_LOW_POWER_MODE, sizeof(value),
+ (void *)&value);
+ return NO_ERROR;
+}
+
+
+status_t QCameraHardwareInterface::setPreviewSize(const QCameraParameters& params)
+{
+ int width, height;
+ params.getPreviewSize(&width, &height);
+ ALOGV("################requested preview size %d x %d", width, height);
+
+ // Validate the preview size
+ for (size_t i = 0; i < mPreviewSizeCount; ++i) {
+ if (width == mPreviewSizes[i].width
+ && height == mPreviewSizes[i].height) {
+ int old_width, old_height;
+ mParameters.getPreviewSize(&old_width,&old_height);
+ if(width != old_width || height != old_height) {
+ mRestartPreview = true;
+ }
+ mParameters.setPreviewSize(width, height);
+ ALOGV("setPreviewSize: width: %d heigh: %d", width, height);
+ mPreviewWidth = width;
+ mPreviewHeight = height;
+
+ mDimension.display_width = mPreviewWidth;
+ mDimension.display_height= mPreviewHeight;
+ mDimension.orig_video_width = mPreviewWidth;
+ mDimension.orig_video_height = mPreviewHeight;
+ mDimension.video_width = mPreviewWidth;
+ mDimension.video_height = mPreviewHeight;
+
+ return NO_ERROR;
+ }
+ }
+ ALOGE("Invalid preview size requested: %dx%d", width, height);
+ return BAD_VALUE;
+}
+status_t QCameraHardwareInterface::setPreviewFpsRange(const QCameraParameters& params)
+{
+ ALOGV("%s: E", __func__);
+ int minFps,maxFps;
+ int prevMinFps, prevMaxFps;
+ int rc = NO_ERROR;
+ bool found = false;
+
+ mParameters.getPreviewFpsRange(&prevMinFps, &prevMaxFps);
+ ALOGV("%s: Existing FpsRange Values:(%d, %d)", __func__, prevMinFps, prevMaxFps);
+ params.getPreviewFpsRange(&minFps,&maxFps);
+ ALOGV("%s: Requested FpsRange Values:(%d, %d)", __func__, minFps, maxFps);
+
+ if(mInitialized && (minFps == prevMinFps && maxFps == prevMaxFps)) {
+ ALOGV("%s: No change in FpsRange", __func__);
+ rc = NO_ERROR;
+ goto end;
+ }
+ for(size_t i=0; i<FPS_RANGES_SUPPORTED_COUNT; i++) {
+ // if the value is in the supported list
+ if(minFps==FpsRangesSupported[i].minFPS && maxFps == FpsRangesSupported[i].maxFPS){
+ found = true;
+ ALOGV("FPS: i=%d : minFps = %d, maxFps = %d ",i,FpsRangesSupported[i].minFPS,FpsRangesSupported[i].maxFPS );
+ mParameters.setPreviewFpsRange(minFps,maxFps);
+ // validate the values
+ bool valid = true;
+ // FPS can not be negative
+ if(minFps < 0 || maxFps < 0) valid = false;
+ // minFps must be >= maxFps
+ if(minFps > maxFps) valid = false;
+
+ if(valid) {
+ //Set the FPS mode
+ const char *str = (minFps == maxFps) ?
+ QCameraParameters::KEY_PREVIEW_FRAME_RATE_FIXED_MODE:
+ QCameraParameters::KEY_PREVIEW_FRAME_RATE_AUTO_MODE;
+ ALOGV("%s FPS_MODE = %s", __func__, str);
+ int32_t frameRateMode = attr_lookup(frame_rate_modes,
+ sizeof(frame_rate_modes) / sizeof(str_map),str);
+ bool ret;
+ ret = native_set_parms(MM_CAMERA_PARM_FPS_MODE, sizeof(int32_t),
+ (void *)&frameRateMode);
+
+ //set FPS values
+ uint32_t fps; //lower 2 bytes specify maxFps and higher 2 bytes specify minFps
+ fps = ((uint32_t)(minFps/1000) << 16) + ((uint16_t)(maxFps/1000));
+ ret = native_set_parms(MM_CAMERA_PARM_FPS, sizeof(uint32_t), (void *)&fps);
+ mParameters.setPreviewFpsRange(minFps, maxFps);
+ if(ret)
+ rc = NO_ERROR;
+ else {
+ rc = BAD_VALUE;
+ ALOGE("%s: error: native_set_params failed", __func__);
+ }
+ } else {
+ ALOGE("%s: error: invalid FPS range value", __func__);
+ rc = BAD_VALUE;
+ }
+ }
+ }
+ if(found == false){
+ ALOGE("%s: error: FPS range value not supported", __func__);
+ rc = BAD_VALUE;
+ }
+end:
+ ALOGV("%s: X", __func__);
+ return rc;
+}
+
+status_t QCameraHardwareInterface::setJpegThumbnailSize(const QCameraParameters& params){
+ int width = params.getInt(QCameraParameters::KEY_JPEG_THUMBNAIL_WIDTH);
+ int height = params.getInt(QCameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT);
+
+ ALOGV("requested jpeg thumbnail size %d x %d", width, height);
+
+ // Validate the picture size
+ for (unsigned int i = 0; i < thumbnail_sizes_count; ++i) {
+ if (width == default_thumbnail_sizes[i].width
+ && height == default_thumbnail_sizes[i].height) {
+ thumbnailWidth = width;
+ thumbnailHeight = height;
+ mParameters.set(QCameraParameters::KEY_JPEG_THUMBNAIL_WIDTH, width);
+ mParameters.set(QCameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT, height);
+ return NO_ERROR;
+ }
+ }
+ ALOGE("error: setting jpeg thumbnail size");
+ return BAD_VALUE;
+}
+status_t QCameraHardwareInterface::setPictureSize(const QCameraParameters& params)
+{
+ int width, height;
+ ALOGV("QualcommCameraHardware::setPictureSize E");
+ params.getPictureSize(&width, &height);
+ ALOGV("requested picture size %d x %d", width, height);
+
+ // Validate the picture size
+ for (int i = 0; i < mSupportedPictureSizesCount; ++i) {
+ if (width == mPictureSizesPtr[i].width
+ && height == mPictureSizesPtr[i].height) {
+ int old_width, old_height;
+ mParameters.getPictureSize(&old_width,&old_height);
+ if(width != old_width || height != old_height) {
+ mRestartPreview = true;
+ }
+ mParameters.setPictureSize(width, height);
+ mDimension.picture_width = width;
+ mDimension.picture_height = height;
+ return NO_ERROR;
+ }
+ }
+ /* Dimension not among the ones in the list. Check if
+ * its a valid dimension, if it is, then configure the
+ * camera accordingly. else reject it.
+ */
+ if( isValidDimension(width, height) ) {
+ mParameters.setPictureSize(width, height);
+ mDimension.picture_width = width;
+ mDimension.picture_height = height;
+ return NO_ERROR;
+ } else
+ ALOGE("Invalid picture size requested: %dx%d", width, height);
+ return BAD_VALUE;
+}
+
+status_t QCameraHardwareInterface::setJpegRotation(int isZsl) {
+ return mm_jpeg_encoder_setRotation(mRotation, isZsl);
+}
+
+int QCameraHardwareInterface::getJpegRotation(void) {
+ return mRotation;
+}
+
+int QCameraHardwareInterface::getISOSpeedValue()
+{
+ const char *iso_str = mParameters.get(QCameraParameters::KEY_ISO_MODE);
+ int iso_index = attr_lookup(iso, sizeof(iso) / sizeof(str_map), iso_str);
+ int iso_value = iso_speed_values[iso_index];
+ return iso_value;
+}
+
+
+status_t QCameraHardwareInterface::setJpegQuality(const QCameraParameters& params) {
+ status_t rc = NO_ERROR;
+ int quality = params.getInt(QCameraParameters::KEY_JPEG_QUALITY);
+ ALOGV("setJpegQuality E");
+ if (quality >= 0 && quality <= 100) {
+ mParameters.set(QCameraParameters::KEY_JPEG_QUALITY, quality);
+ mJpegQuality = quality;
+ } else {
+ ALOGE("Invalid jpeg quality=%d", quality);
+ rc = BAD_VALUE;
+ }
+
+ quality = params.getInt(QCameraParameters::KEY_JPEG_THUMBNAIL_QUALITY);
+ if (quality >= 0 && quality <= 100) {
+ mParameters.set(QCameraParameters::KEY_JPEG_THUMBNAIL_QUALITY, quality);
+ } else {
+ ALOGE("Invalid jpeg thumbnail quality=%d", quality);
+ rc = BAD_VALUE;
+ }
+ ALOGV("setJpegQuality X");
+ return rc;
+}
+
+status_t QCameraHardwareInterface::
+setNumOfSnapshot(const QCameraParameters& params) {
+ status_t rc = NO_ERROR;
+
+ int num_of_snapshot = getNumOfSnapshots(params);
+
+ if (num_of_snapshot <= 0) {
+ num_of_snapshot = 1;
+ }
+ ALOGV("number of snapshots = %d", num_of_snapshot);
+ mParameters.set("num-snaps-per-shutter", num_of_snapshot);
+
+ bool result = native_set_parms(MM_CAMERA_PARM_SNAPSHOT_BURST_NUM,
+ sizeof(int),
+ (void *)&num_of_snapshot);
+ if(!result)
+ ALOGE("%s:Failure setting number of snapshots!!!", __func__);
+ return rc;
+}
+
+status_t QCameraHardwareInterface::setPreviewFormat(const QCameraParameters& params) {
+ const char *str = params.getPreviewFormat();
+ int32_t previewFormat = attr_lookup(preview_formats, sizeof(preview_formats) / sizeof(str_map), str);
+ if(previewFormat != NOT_FOUND) {
+ int num = sizeof(preview_format_info_list)/sizeof(preview_format_info_t);
+ int i;
+
+ for (i = 0; i < num; i++) {
+ if (preview_format_info_list[i].Hal_format == previewFormat) {
+ mPreviewFormatInfo = preview_format_info_list[i];
+ break;
+ }
+ }
+
+ if (i == num) {
+ mPreviewFormatInfo.mm_cam_format = CAMERA_YUV_420_NV21;
+ mPreviewFormatInfo.padding = CAMERA_PAD_TO_WORD;
+ return BAD_VALUE;
+ }
+ bool ret = native_set_parms(MM_CAMERA_PARM_PREVIEW_FORMAT, sizeof(cam_format_t),
+ (void *)&mPreviewFormatInfo.mm_cam_format);
+ mParameters.set(QCameraParameters::KEY_PREVIEW_FORMAT, str);
+ mPreviewFormat = mPreviewFormatInfo.mm_cam_format;
+ ALOGV("Setting preview format to %d, i =%d, num=%d, hal_format=%d",
+ mPreviewFormat, i, num, mPreviewFormatInfo.Hal_format);
+ return NO_ERROR;
+ } else if ( strTexturesOn ) {
+ mPreviewFormatInfo.mm_cam_format = CAMERA_YUV_420_NV21;
+ mPreviewFormatInfo.padding = CAMERA_PAD_TO_4K;
+ } else {
+ mPreviewFormatInfo.mm_cam_format = CAMERA_YUV_420_NV21;
+ mPreviewFormatInfo.padding = CAMERA_PAD_TO_WORD;
+ }
+ ALOGE("Invalid preview format value: %s", (str == NULL) ? "NULL" : str);
+ return BAD_VALUE;
+}
+
+status_t QCameraHardwareInterface::setStrTextures(const QCameraParameters& params) {
+ const char *str = params.get("strtextures");
+ const char *prev_str = mParameters.get("strtextures");
+
+ if(str != NULL) {
+ if(!strcmp(str,prev_str)) {
+ return NO_ERROR;
+ }
+ int str_size = strlen(str);
+ mParameters.set("strtextures", str);
+ if(str_size == 2) {
+ if(!strncmp(str, "on", str_size) || !strncmp(str, "ON", str_size)){
+ ALOGV("Resetting mUseOverlay to false");
+ strTexturesOn = true;
+ mUseOverlay = false;
+ }
+ }else if(str_size == 3){
+ if (!strncmp(str, "off", str_size) || !strncmp(str, "OFF", str_size)) {
+ strTexturesOn = false;
+ mUseOverlay = true;
+ }
+ }
+
+ }
+ return NO_ERROR;
+}
+
+status_t QCameraHardwareInterface::setFlash(const QCameraParameters& params)
+{
+ const char *str = NULL;
+
+ ALOGV("%s: E",__func__);
+ int rc = cam_config_is_parm_supported(mCameraId, MM_CAMERA_PARM_LED_MODE);
+ if(!rc) {
+ ALOGV("%s:LED FLASH not supported", __func__);
+ return NO_ERROR;
+ }
+
+ const char *sce_str = params.get(QCameraParameters::KEY_SCENE_MODE);
+ if (sce_str != NULL) {
+ if (!strcmp(sce_str, "hdr")) {
+ //Flash In HDR
+ str = QCameraParameters::FLASH_MODE_OFF;
+ mParameters.set(QCameraParameters::KEY_FLASH_MODE, str);
+ }else if(!strcmp(sce_str, "auto")){
+ //Flash Mode in auto scene mode
+ str = params.get(QCameraParameters::KEY_FLASH_MODE);
+ }else{
+ //FLASH in scene Mode except auto, hdr
+ str = QCameraParameters::FLASH_MODE_AUTO;
+ mParameters.set(QCameraParameters::KEY_FLASH_MODE, str);
+ }
+ }else {
+ str = params.get(QCameraParameters::KEY_FLASH_MODE);
+ }
+
+ if (str != NULL) {
+ int32_t value = attr_lookup(flash, sizeof(flash) / sizeof(str_map), str);
+ if (value != NOT_FOUND) {
+ mParameters.set(QCameraParameters::KEY_FLASH_MODE, str);
+ bool ret = native_set_parms(MM_CAMERA_PARM_LED_MODE,
+ sizeof(value), (void *)&value);
+ mLedStatusForZsl = (led_mode_t)value;
+ return ret ? NO_ERROR : UNKNOWN_ERROR;
+ }
+ }
+ ALOGE("Invalid flash mode value: %s", (str == NULL) ? "NULL" : str);
+
+ return BAD_VALUE;
+}
+
+status_t QCameraHardwareInterface::setAecAwbLock(const QCameraParameters & params)
+{
+ ALOGV("%s : E", __func__);
+ status_t rc = NO_ERROR;
+ int32_t value;
+ const char* str;
+
+ //for AEC lock
+ str = params.get(QCameraParameters::KEY_AUTO_EXPOSURE_LOCK);
+ value = (strcmp(str, "true") == 0)? 1 : 0;
+ mParameters.set(QCameraParameters::KEY_AUTO_EXPOSURE_LOCK, str);
+ rc = (native_set_parms(MM_CAMERA_PARM_AEC_LOCK, sizeof(int32_t), (void *)(&value))) ?
+ NO_ERROR : UNKNOWN_ERROR;
+
+ //for AWB lock
+ str = params.get(QCameraParameters::KEY_AUTO_WHITEBALANCE_LOCK);
+ value = (strcmp(str, "true") == 0)? 1 : 0;
+ mParameters.set(QCameraParameters::KEY_AUTO_WHITEBALANCE_LOCK, str);
+ rc = (native_set_parms(MM_CAMERA_PARM_AWB_LOCK, sizeof(int32_t), (void *)(&value))) ?
+ NO_ERROR : UNKNOWN_ERROR;
+ ALOGV("%s : X", __func__);
+ return rc;
+}
+
+status_t QCameraHardwareInterface::setOverlayFormats(const QCameraParameters& params)
+{
+ mParameters.set("overlay-format", HAL_PIXEL_FORMAT_YCbCr_420_SP);
+ if(mIs3DModeOn == true) {
+ int ovFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP|HAL_3D_IN_SIDE_BY_SIDE_L_R|HAL_3D_OUT_SIDE_BY_SIDE;
+ mParameters.set("overlay-format", ovFormat);
+ }
+ return NO_ERROR;
+}
+
+status_t QCameraHardwareInterface::setMCEValue(const QCameraParameters& params)
+{
+ ALOGV("%s",__func__);
+ status_t rc = NO_ERROR;
+ rc = cam_config_is_parm_supported(mCameraId,MM_CAMERA_PARM_MCE);
+ if(!rc) {
+ ALOGV("MM_CAMERA_PARM_MCE mode is not supported for this sensor");
+ return NO_ERROR;
+ }
+ const char *str = params.get(QCameraParameters::KEY_MEMORY_COLOR_ENHANCEMENT);
+ if (str != NULL) {
+ int value = attr_lookup(mce, sizeof(mce) / sizeof(str_map), str);
+ if (value != NOT_FOUND) {
+ int temp = (int8_t)value;
+ ALOGV("%s: setting MCE value of %s", __FUNCTION__, str);
+ mParameters.set(QCameraParameters::KEY_MEMORY_COLOR_ENHANCEMENT, str);
+
+ native_set_parms(MM_CAMERA_PARM_MCE, sizeof(int8_t), (void *)&temp);
+ return NO_ERROR;
+ }
+ }
+ ALOGE("Invalid MCE value: %s", (str == NULL) ? "NULL" : str);
+
+ return NO_ERROR;
+}
+
+status_t QCameraHardwareInterface::setHighFrameRate(const QCameraParameters& params)
+{
+
+ bool mCameraRunning;
+
+ int rc = cam_config_is_parm_supported(mCameraId, MM_CAMERA_PARM_HFR);
+ if(!rc) {
+ ALOGV("%s: MM_CAMERA_PARM_HFR not supported", __func__);
+ return NO_ERROR;
+ }
+
+ const char *str = params.get(QCameraParameters::KEY_VIDEO_HIGH_FRAME_RATE);
+ if (str != NULL) {
+ int value = attr_lookup(hfr, sizeof(hfr) / sizeof(str_map), str);
+ if (value != NOT_FOUND) {
+ mHFRLevel = (int32_t)value;
+ //Check for change in HFR value
+ const char *oldHfr = mParameters.get(QCameraParameters::KEY_VIDEO_HIGH_FRAME_RATE);
+ if(strcmp(oldHfr, str)){
+ mParameters.set(QCameraParameters::KEY_VIDEO_HIGH_FRAME_RATE, str);
+// mHFRMode = true;
+ mCameraRunning=isPreviewRunning();
+ if(mCameraRunning == true) {
+// mHFRThreadWaitLock.lock();
+// pthread_attr_t pattr;
+// pthread_attr_init(&pattr);
+// pthread_attr_setdetachstate(&pattr, PTHREAD_CREATE_DETACHED);
+// mHFRThreadRunning = !pthread_create(&mHFRThread,
+// &pattr,
+// hfr_thread,
+// (void*)NULL);
+// mHFRThreadWaitLock.unlock();
+ stopPreviewInternal();
+ mPreviewState = QCAMERA_HAL_PREVIEW_STOPPED;
+ native_set_parms(MM_CAMERA_PARM_HFR, sizeof(int32_t), (void *)&mHFRLevel);
+ mPreviewState = QCAMERA_HAL_PREVIEW_START;
+ if (startPreview2() == NO_ERROR)
+ mPreviewState = QCAMERA_HAL_PREVIEW_STARTED;
+ return NO_ERROR;
+ }
+ }
+ native_set_parms(MM_CAMERA_PARM_HFR, sizeof(int32_t), (void *)&mHFRLevel);
+ return NO_ERROR;
+ }
+ }
+ ALOGE("Invalid HFR value: %s", (str == NULL) ? "NULL" : str);
+ return NO_ERROR;
+}
+
+status_t QCameraHardwareInterface::setLensshadeValue(const QCameraParameters& params)
+{
+
+ int rc = cam_config_is_parm_supported(mCameraId, MM_CAMERA_PARM_ROLLOFF);
+ if(!rc) {
+ ALOGV("%s:LENS SHADING not supported", __func__);
+ return NO_ERROR;
+ }
+
+ const char *str = params.get(QCameraParameters::KEY_LENSSHADE);
+ if (str != NULL) {
+ int value = attr_lookup(lensshade,
+ sizeof(lensshade) / sizeof(str_map), str);
+ if (value != NOT_FOUND) {
+ int8_t temp = (int8_t)value;
+ mParameters.set(QCameraParameters::KEY_LENSSHADE, str);
+ native_set_parms(MM_CAMERA_PARM_ROLLOFF, sizeof(int8_t), (void *)&temp);
+ return NO_ERROR;
+ }
+ }
+ ALOGE("Invalid lensShade value: %s", (str == NULL) ? "NULL" : str);
+ return BAD_VALUE;
+}
+
+status_t QCameraHardwareInterface::setFaceDetect(const QCameraParameters& params)
+{
+ if(supportsFaceDetection() == false){
+ ALOGI("setFaceDetect support is not available");
+ return NO_ERROR;
+ }
+
+ int requested_faces = params.getInt(QCameraParameters::KEY_MAX_NUM_REQUESTED_FACES);
+ int hardware_supported_faces = mParameters.getInt(CameraParameters::KEY_MAX_NUM_DETECTED_FACES_HW);
+ if (requested_faces > hardware_supported_faces) {
+ requested_faces = hardware_supported_faces;
+ }
+ mParameters.set(QCameraParameters::KEY_MAX_NUM_REQUESTED_FACES, requested_faces);
+ const char *str = params.get(QCameraParameters::KEY_FACE_DETECTION);
+ ALOGV("setFaceDetect: %s", str);
+ if (str != NULL) {
+ fd_set_parm_t fd_set_parm;
+ int value = attr_lookup(facedetection,
+ sizeof(facedetection) / sizeof(str_map), str);
+ mFaceDetectOn = value;
+ fd_set_parm.fd_mode = value;
+ fd_set_parm.num_fd = requested_faces;
+ ALOGV("%s Face detection value = %d, num_fd = %d",__func__, value, requested_faces);
+ native_set_parms(MM_CAMERA_PARM_FD, sizeof(fd_set_parm_t), (void *)&fd_set_parm);
+ mParameters.set(QCameraParameters::KEY_FACE_DETECTION, str);
+ return NO_ERROR;
+ }
+ ALOGE("Invalid Face Detection value: %s", (str == NULL) ? "NULL" : str);
+ return BAD_VALUE;
+}
+status_t QCameraHardwareInterface::setFaceDetection(const char *str)
+{
+ if(supportsFaceDetection() == false){
+ ALOGV("Face detection is not enabled");
+ return NO_ERROR;
+ }
+ if (str != NULL) {
+ int requested_faces = mParameters.getInt(QCameraParameters::KEY_MAX_NUM_REQUESTED_FACES);
+ int value = attr_lookup(facedetection,
+ sizeof(facedetection) / sizeof(str_map), str);
+ if (value != NOT_FOUND) {
+ fd_set_parm_t fd_set_parm;
+ mMetaDataWaitLock.lock();
+ mFaceDetectOn = value;
+ mMetaDataWaitLock.unlock();
+ fd_set_parm.fd_mode = value;
+ fd_set_parm.num_fd = requested_faces;
+ ALOGV("%s Face detection value = %d, num_fd = %d",__func__, value, requested_faces);
+ native_set_parms(MM_CAMERA_PARM_FD, sizeof(fd_set_parm_t), (void *)&fd_set_parm);
+ mParameters.set(QCameraParameters::KEY_FACE_DETECTION, str);
+ return NO_ERROR;
+ }
+ }
+ ALOGE("Invalid Face Detection value: %s", (str == NULL) ? "NULL" : str);
+ return BAD_VALUE;
+}
+
+status_t QCameraHardwareInterface::setAEBracket(const QCameraParameters& params)
+{
+ const char *str;
+ if(!cam_config_is_parm_supported(mCameraId,MM_CAMERA_PARM_HDR) || (myMode & CAMERA_ZSL_MODE)) {
+ ALOGV("Parameter HDR is not supported for this sensor/ ZSL mode");
+
+ if (myMode & CAMERA_ZSL_MODE) {
+ ALOGV("In ZSL mode, reset AEBBracket to HDR_OFF mode");
+ exp_bracketing_t temp;
+ memset(&temp, 0, sizeof(temp));
+ mHdrMode = HDR_BRACKETING_OFF;
+ temp.hdr_enable= false;
+ temp.mode = HDR_BRACKETING_OFF;
+ native_set_parms(MM_CAMERA_PARM_HDR, sizeof(exp_bracketing_t), (void *)&temp);
+ }
+ return NO_ERROR;
+ }
+
+ const char *str2 = params.get(QCameraParameters::KEY_SCENE_MODE);
+ if(!strcmp(str2, "hdr")) {
+ str="HDR";
+ } else {
+ str = params.get(QCameraParameters::KEY_AE_BRACKET_HDR);
+ }
+
+ if (str != NULL) {
+ int value = attr_lookup(hdr_bracket,
+ sizeof(hdr_bracket) / sizeof(str_map), str);
+ exp_bracketing_t temp;
+ memset(&temp, 0, sizeof(temp));
+ switch (value) {
+ case HDR_MODE:
+ {
+ mHdrMode = HDR_MODE;
+ }
+ break;
+ case EXP_BRACKETING_MODE:
+ {
+ int numFrames = getNumOfSnapshots();
+ const char *str_val = params.get("capture-burst-exposures");
+ if ((str_val != NULL) && (strlen(str_val)>0)) {
+ ALOGV("%s: capture-burst-exposures %s", __FUNCTION__, str_val);
+
+ mHdrMode = EXP_BRACKETING_MODE;
+ temp.hdr_enable = false;
+ temp.mode = EXP_BRACKETING_MODE;
+ temp.total_frames = (numFrames > MAX_SNAPSHOT_BUFFERS -2) ? MAX_SNAPSHOT_BUFFERS -2 : numFrames;
+ temp.total_hal_frames = temp.total_frames;
+ strlcpy(temp.values, str_val, MAX_EXP_BRACKETING_LENGTH);
+ ALOGV("%s: setting Exposure Bracketing value of %s, frame (%d)", __FUNCTION__, temp.values, temp.total_hal_frames);
+ native_set_parms(MM_CAMERA_PARM_HDR, sizeof(exp_bracketing_t), (void *)&temp);
+ }
+ else {
+ /* Apps not set capture-burst-exposures, error case fall into bracketing off mode */
+ ALOGV("%s: capture-burst-exposures not set, back to HDR OFF mode", __FUNCTION__);
+ mHdrMode = HDR_BRACKETING_OFF;
+ temp.hdr_enable= false;
+ temp.mode = HDR_BRACKETING_OFF;
+ native_set_parms(MM_CAMERA_PARM_HDR, sizeof(exp_bracketing_t), (void *)&temp);
+ }
+ }
+ break;
+ case HDR_BRACKETING_OFF:
+ default:
+ {
+ mHdrMode = HDR_BRACKETING_OFF;
+ temp.hdr_enable= false;
+ temp.mode = HDR_BRACKETING_OFF;
+ native_set_parms(MM_CAMERA_PARM_HDR, sizeof(exp_bracketing_t), (void *)&temp);
+ }
+ break;
+ }
+
+ /* save the value*/
+ mParameters.set(QCameraParameters::KEY_AE_BRACKET_HDR, str);
+ }
+ return NO_ERROR;
+}
+
+status_t QCameraHardwareInterface::setCaptureBurstExp()
+{
+ char burst_exp[PROPERTY_VALUE_MAX];
+ memset(burst_exp, 0, sizeof(burst_exp));
+ property_get("persist.capture.burst.exposures", burst_exp, "");
+ if (NULL != burst_exp)
+ mParameters.set("capture-burst-exposures", burst_exp);
+ return NO_ERROR;
+}
+
+status_t QCameraHardwareInterface::setRedeyeReduction(const QCameraParameters& params)
+{
+ if(supportsRedEyeReduction() == false) {
+ ALOGV("Parameter Redeye Reduction is not supported for this sensor");
+ return NO_ERROR;
+ }
+
+ const char *str = params.get(QCameraParameters::KEY_REDEYE_REDUCTION);
+ if (str != NULL) {
+ int value = attr_lookup(redeye_reduction, sizeof(redeye_reduction) / sizeof(str_map), str);
+ if (value != NOT_FOUND) {
+ int8_t temp = (int8_t)value;
+ ALOGV("%s: setting Redeye Reduction value of %s", __FUNCTION__, str);
+ mParameters.set(QCameraParameters::KEY_REDEYE_REDUCTION, str);
+
+ native_set_parms(MM_CAMERA_PARM_REDEYE_REDUCTION, sizeof(int8_t), (void *)&temp);
+ return NO_ERROR;
+ }
+ }
+ ALOGE("Invalid Redeye Reduction value: %s", (str == NULL) ? "NULL" : str);
+ return BAD_VALUE;
+}
+
+status_t QCameraHardwareInterface::setGpsLocation(const QCameraParameters& params)
+{
+ const char *method = params.get(QCameraParameters::KEY_GPS_PROCESSING_METHOD);
+ if (method) {
+ mParameters.set(QCameraParameters::KEY_GPS_PROCESSING_METHOD, method);
+ }else {
+ mParameters.remove(QCameraParameters::KEY_GPS_PROCESSING_METHOD);
+ }
+
+ const char *latitude = params.get(QCameraParameters::KEY_GPS_LATITUDE);
+ if (latitude) {
+ mParameters.set(QCameraParameters::KEY_GPS_LATITUDE, latitude);
+ }else {
+ mParameters.remove(QCameraParameters::KEY_GPS_LATITUDE);
+ }
+
+ const char *latitudeRef = params.get(QCameraParameters::KEY_GPS_LATITUDE_REF);
+ if (latitudeRef) {
+ mParameters.set(QCameraParameters::KEY_GPS_LATITUDE_REF, latitudeRef);
+ }else {
+ mParameters.remove(QCameraParameters::KEY_GPS_LATITUDE_REF);
+ }
+
+ const char *longitude = params.get(QCameraParameters::KEY_GPS_LONGITUDE);
+ if (longitude) {
+ mParameters.set(QCameraParameters::KEY_GPS_LONGITUDE, longitude);
+ }else {
+ mParameters.remove(QCameraParameters::KEY_GPS_LONGITUDE);
+ }
+
+ const char *longitudeRef = params.get(QCameraParameters::KEY_GPS_LONGITUDE_REF);
+ if (longitudeRef) {
+ mParameters.set(QCameraParameters::KEY_GPS_LONGITUDE_REF, longitudeRef);
+ }else {
+ mParameters.remove(QCameraParameters::KEY_GPS_LONGITUDE_REF);
+ }
+
+ const char *altitudeRef = params.get(QCameraParameters::KEY_GPS_ALTITUDE_REF);
+ if (altitudeRef) {
+ mParameters.set(QCameraParameters::KEY_GPS_ALTITUDE_REF, altitudeRef);
+ }else {
+ mParameters.remove(QCameraParameters::KEY_GPS_ALTITUDE_REF);
+ }
+
+ const char *altitude = params.get(QCameraParameters::KEY_GPS_ALTITUDE);
+ if (altitude) {
+ mParameters.set(QCameraParameters::KEY_GPS_ALTITUDE, altitude);
+ }else {
+ mParameters.remove(QCameraParameters::KEY_GPS_ALTITUDE);
+ }
+
+ const char *status = params.get(QCameraParameters::KEY_GPS_STATUS);
+ if (status) {
+ mParameters.set(QCameraParameters::KEY_GPS_STATUS, status);
+ }
+
+ const char *dateTime = params.get(QCameraParameters::KEY_EXIF_DATETIME);
+ if (dateTime) {
+ mParameters.set(QCameraParameters::KEY_EXIF_DATETIME, dateTime);
+ }else {
+ mParameters.remove(QCameraParameters::KEY_EXIF_DATETIME);
+ }
+
+ const char *timestamp = params.get(QCameraParameters::KEY_GPS_TIMESTAMP);
+ if (timestamp) {
+ mParameters.set(QCameraParameters::KEY_GPS_TIMESTAMP, timestamp);
+ }else {
+ mParameters.remove(QCameraParameters::KEY_GPS_TIMESTAMP);
+ }
+ ALOGV("setGpsLocation X");
+ return NO_ERROR;
+}
+
+status_t QCameraHardwareInterface::setRotation(const QCameraParameters& params)
+{
+ status_t rc = NO_ERROR;
+ int rotation = params.getInt(QCameraParameters::KEY_ROTATION);
+ if (rotation != NOT_FOUND) {
+ if (rotation == 0 || rotation == 90 || rotation == 180
+ || rotation == 270) {
+ mParameters.set(QCameraParameters::KEY_ROTATION, rotation);
+ mRotation = rotation;
+ } else {
+ ALOGE("Invalid rotation value: %d", rotation);
+ rc = BAD_VALUE;
+ }
+ }
+ ALOGV("setRotation");
+ return rc;
+}
+
+status_t QCameraHardwareInterface::setDenoise(const QCameraParameters& params)
+{
+#if 0
+ if(!mCfgControl.mm_camera_is_supported(MM_CAMERA_PARM_WAVELET_DENOISE)) {
+ ALOGE("Wavelet Denoise is not supported for this sensor");
+ return NO_ERROR;
+ }
+ const char *str = params.get(QCameraParameters::KEY_DENOISE);
+ if (str != NULL) {
+ int value = attr_lookup(denoise,
+ sizeof(denoise) / sizeof(str_map), str);
+ if ((value != NOT_FOUND) && (mDenoiseValue != value)) {
+ mDenoiseValue = value;
+ mParameters.set(QCameraParameters::KEY_DENOISE, str);
+ bool ret = native_set_parms(MM_CAMERA_PARM_WAVELET_DENOISE, sizeof(value),
+ (void *)&value);
+ return ret ? NO_ERROR : UNKNOWN_ERROR;
+ }
+ return NO_ERROR;
+ }
+ ALOGE("Invalid Denoise value: %s", (str == NULL) ? "NULL" : str);
+#endif
+ return BAD_VALUE;
+}
+
+status_t QCameraHardwareInterface::setOrientation(const QCameraParameters& params)
+{
+ const char *str = params.get("orientation");
+
+ if (str != NULL) {
+ if (strcmp(str, "portrait") == 0 || strcmp(str, "landscape") == 0) {
+ // Camera service needs this to decide if the preview frames and raw
+ // pictures should be rotated.
+ mParameters.set("orientation", str);
+ } else {
+ ALOGE("Invalid orientation value: %s", str);
+ return BAD_VALUE;
+ }
+ }
+ return NO_ERROR;
+}
+
+status_t QCameraHardwareInterface::setPictureFormat(const QCameraParameters& params)
+{
+ const char * str = params.get(QCameraParameters::KEY_PICTURE_FORMAT);
+
+ if(str != NULL){
+ int32_t value = attr_lookup(picture_formats,
+ sizeof(picture_formats) / sizeof(str_map), str);
+ if(value != NOT_FOUND){
+ mParameters.set(QCameraParameters::KEY_PICTURE_FORMAT, str);
+ } else {
+ ALOGE("Invalid Picture Format value: %s", str);
+ return BAD_VALUE;
+ }
+ }
+ return NO_ERROR;
+}
+
+status_t QCameraHardwareInterface::setRecordingHintValue(const int32_t value)
+{
+ native_set_parms(MM_CAMERA_PARM_RECORDING_HINT, sizeof(value),
+ (void *)&value);
+ if (value == true){
+ native_set_parms(MM_CAMERA_PARM_CAF_ENABLE, sizeof(value),
+ (void *)&value);
+ }
+ setDISMode();
+ setFullLiveshot();
+ return NO_ERROR;
+}
+
+status_t QCameraHardwareInterface::setRecordingHint(const QCameraParameters& params)
+{
+
+ const char * str = params.get(QCameraParameters::KEY_RECORDING_HINT);
+
+ if(str != NULL){
+ int32_t value = attr_lookup(recording_Hints,
+ sizeof(recording_Hints) / sizeof(str_map), str);
+ if(value != NOT_FOUND){
+ mRecordingHint = value;
+ setRecordingHintValue(mRecordingHint);
+ mParameters.set(QCameraParameters::KEY_RECORDING_HINT, str);
+ return NO_ERROR;
+ } else {
+ ALOGE("Invalid Picture Format value: %s", str);
+ setDISMode();
+ setFullLiveshot();
+ return BAD_VALUE;
+ }
+ }
+ setDISMode();
+ setFullLiveshot();
+ return NO_ERROR;
+}
+
+status_t QCameraHardwareInterface::setDISMode() {
+ /* Enable DIS only if
+ * - Camcorder mode AND
+ * - DIS property is set AND
+ * - Not in Low power mode. */
+ uint32_t value = mRecordingHint && mDisEnabled
+ && !isLowPowerCamcorder();
+
+ ALOGV("%s DIS is %s value = %d", __func__,
+ value ? "Enabled" : "Disabled", value);
+ native_set_parms(MM_CAMERA_PARM_DIS_ENABLE, sizeof(value),
+ (void *)&value);
+ return NO_ERROR;
+}
+
+status_t QCameraHardwareInterface::setFullLiveshot()
+{
+ /* Enable full size liveshot only if
+ * - Camcorder mode AND
+ * - Full size liveshot is enabled. */
+ uint32_t value = mRecordingHint && mFullLiveshotEnabled
+ && !isLowPowerCamcorder();
+
+ if (((mDimension.picture_width == mVideoWidth) &&
+ (mDimension.picture_height == mVideoHeight))) {
+ /* If video size matches the live snapshot size
+ * turn off full size liveshot to get higher fps. */
+ value = 0;
+ }
+
+ ALOGV("%s Full size liveshot %s value = %d", __func__,
+ value ? "Enabled" : "Disabled", value);
+ native_set_parms(MM_CAMERA_PARM_FULL_LIVESHOT, sizeof(value),
+ (void *)&value);
+ return NO_ERROR;
+}
+
+
+isp3a_af_mode_t QCameraHardwareInterface::getAutoFocusMode(
+ const QCameraParameters& params)
+{
+ isp3a_af_mode_t afMode = AF_MODE_MAX;
+ afMode = (isp3a_af_mode_t)mFocusMode;
+ return afMode;
+}
+
+void QCameraHardwareInterface::getPictureSize(int *picture_width,
+ int *picture_height) const
+{
+ mParameters.getPictureSize(picture_width, picture_height);
+}
+
+void QCameraHardwareInterface::getPreviewSize(int *preview_width,
+ int *preview_height) const
+{
+ mParameters.getPreviewSize(preview_width, preview_height);
+}
+
+cam_format_t QCameraHardwareInterface::getPreviewFormat() const
+{
+ cam_format_t foramt = CAMERA_YUV_420_NV21;
+ const char *str = mParameters.getPreviewFormat();
+ int32_t value = attr_lookup(preview_formats,
+ sizeof(preview_formats)/sizeof(str_map),
+ str);
+
+ if(value != NOT_FOUND) {
+ int num = sizeof(preview_format_info_list)/sizeof(preview_format_info_t);
+ int i;
+ for (i = 0; i < num; i++) {
+ if (preview_format_info_list[i].Hal_format == value) {
+ foramt = preview_format_info_list[i].mm_cam_format;
+ break;
+ }
+ }
+ }
+
+ return foramt;
+}
+
+cam_pad_format_t QCameraHardwareInterface::getPreviewPadding() const
+{
+ return mPreviewFormatInfo.padding;
+}
+
+int QCameraHardwareInterface::getJpegQuality() const
+{
+ return mJpegQuality;
+}
+
+int QCameraHardwareInterface::getNumOfSnapshots(void) const
+{
+ char prop[PROPERTY_VALUE_MAX];
+ memset(prop, 0, sizeof(prop));
+ property_get("persist.camera.snapshot.number", prop, "0");
+ ALOGV("%s: prop enable/disable = %d", __func__, atoi(prop));
+ if (atoi(prop)) {
+ ALOGV("%s: Reading maximum no of snapshots = %d"
+ "from properties", __func__, atoi(prop));
+ return atoi(prop);
+ } else {
+ return mParameters.getInt("num-snaps-per-shutter");
+ }
+}
+
+int QCameraHardwareInterface::getNumOfSnapshots(const QCameraParameters& params)
+{
+ char prop[PROPERTY_VALUE_MAX];
+ memset(prop, 0, sizeof(prop));
+ property_get("persist.camera.snapshot.number", prop, "0");
+ ALOGV("%s: prop enable/disable = %d", __func__, atoi(prop));
+ if (atoi(prop)) {
+ ALOGV("%s: Reading maximum no of snapshots = %d"
+ "from properties", __func__, atoi(prop));
+ return atoi(prop);
+ } else {
+ return params.getInt("num-snaps-per-shutter");
+ }
+
+}
+
+int QCameraHardwareInterface::
+getThumbSizesFromAspectRatio(uint32_t aspect_ratio,
+ int *picture_width,
+ int *picture_height)
+{
+ for(unsigned int i = 0; i < THUMBNAIL_SIZE_COUNT; i++ ){
+ if(thumbnail_sizes[i].aspect_ratio == aspect_ratio)
+ {
+ *picture_width = thumbnail_sizes[i].width;
+ *picture_height = thumbnail_sizes[i].height;
+ return NO_ERROR;
+ }
+ }
+
+ return BAD_VALUE;
+}
+
+bool QCameraHardwareInterface::isRawSnapshot()
+{
+ const char *format = mParameters.getPictureFormat();
+ if( format!= NULL &&
+ !strcmp(format, QCameraParameters::PIXEL_FORMAT_RAW)){
+ return true;
+ }
+ else{
+ return false;
+ }
+}
+
+status_t QCameraHardwareInterface::setPreviewSizeTable(void)
+{
+ status_t ret = NO_ERROR;
+ mm_camera_dimension_t dim;
+ struct camera_size_type* preview_size_table;
+ int preview_table_size;
+ int i = 0;
+ char str[10] = {0};
+
+ /* Initialize table with default values */
+ preview_size_table = default_preview_sizes;
+ preview_table_size = preview_sizes_count;
+
+
+ /* Get maximum preview size supported by sensor*/
+ memset(&dim, 0, sizeof(mm_camera_dimension_t));
+ ret = cam_config_get_parm(mCameraId,
+ MM_CAMERA_PARM_MAX_PREVIEW_SIZE, &dim);
+ if (ret != NO_ERROR) {
+ ALOGE("%s: Failure getting Max Preview Size supported by camera",
+ __func__);
+ goto end;
+ }
+
+ ALOGV("%s: Max Preview Sizes Supported: %d X %d", __func__,
+ dim.width, dim.height);
+
+ for (i = 0; i < preview_table_size; i++) {
+ if ((preview_size_table->width <= dim.width) &&
+ (preview_size_table->height <= dim.height)) {
+ ALOGV("%s: Camera Preview Size Table "
+ "Max width: %d height %d table_size: %d",
+ __func__, preview_size_table->width,
+ preview_size_table->height, preview_table_size - i);
+ break;
+ }
+ preview_size_table++;
+ }
+ //set preferred preview size to maximum preview size
+ sprintf(str, "%dx%d", preview_size_table->width, preview_size_table->height);
+ mParameters.set(QCameraParameters::KEY_PREFERRED_PREVIEW_SIZE_FOR_VIDEO, str);
+ ALOGV("KEY_PREFERRED_PREVIEW_SIZE_FOR_VIDEO = %s", str);
+
+end:
+ /* Save the table in global member*/
+ mPreviewSizes = preview_size_table;
+ mPreviewSizeCount = preview_table_size - i;
+
+ return ret;
+}
+
+status_t QCameraHardwareInterface::setPictureSizeTable(void)
+{
+ status_t ret = NO_ERROR;
+ mm_camera_dimension_t dim;
+ struct camera_size_type* picture_size_table;
+ int picture_table_size;
+ int i = 0, count = 0;
+
+ /* Initialize table with default values */
+ picture_table_size = sizeof(default_picture_sizes)/
+ sizeof(default_picture_sizes[0]);
+ picture_size_table = default_picture_sizes;
+ mPictureSizes =
+ ( struct camera_size_type *)malloc(picture_table_size *
+ sizeof(struct camera_size_type));
+ if (mPictureSizes == NULL) {
+ ALOGE("%s: Failre allocating memory to store picture size table",__func__);
+ goto end;
+ }
+
+ /* Get maximum picture size supported by sensor*/
+ memset(&dim, 0, sizeof(mm_camera_dimension_t));
+ ret = cam_config_get_parm(mCameraId,
+ MM_CAMERA_PARM_MAX_PICTURE_SIZE, &dim);
+ if (ret != NO_ERROR) {
+ ALOGE("%s: Failure getting Max Picture Size supported by camera",
+ __func__);
+ ret = NO_MEMORY;
+ free(mPictureSizes);
+ mPictureSizes = NULL;
+ goto end;
+ }
+
+ ALOGV("%s: Max Picture Sizes Supported: %d X %d", __func__,
+ dim.width, dim.height);
+
+ for (i = 0; i < picture_table_size; i++) {
+ /* We'll store those dimensions whose width AND height
+ are less than or equal to maximum supported */
+ if ((picture_size_table->width <= dim.width) &&
+ (picture_size_table->height <= dim.height)) {
+ ALOGV("%s: Camera Picture Size Table "
+ "Max width: %d height %d table_size: %d",
+ __func__, picture_size_table->width,
+ picture_size_table->height, count+1);
+ mPictureSizes[count].height = picture_size_table->height;
+ mPictureSizes[count].width = picture_size_table->width;
+ count++;
+ }
+ picture_size_table++;
+ }
+ mPictureSizeCount = count;
+
+end:
+ /* In case of error, we use default picture sizes */
+ if (ret != NO_ERROR) {
+ mPictureSizes = default_picture_sizes;
+ mPictureSizeCount = picture_table_size;
+ }
+ return ret;
+}
+
+status_t QCameraHardwareInterface::setVideoSizeTable(void)
+{
+ status_t ret = NO_ERROR;
+ mm_camera_dimension_t dim;
+ struct camera_size_type* video_size_table;
+ int video_table_size;
+ int i = 0, count = 0;
+ ALOGV("%s: E", __func__);
+
+ /* Initialize table with default values */
+ video_table_size = video_sizes_count;
+ video_size_table = default_video_sizes;
+ mVideoSizes =
+ (struct camera_size_type *)malloc(video_table_size *
+ sizeof(struct camera_size_type));
+ if(mVideoSizes == NULL) {
+ ALOGE("%s: error allocating memory to store video size table",__func__);
+ ret = BAD_VALUE;
+ goto end;
+ }
+
+ /* Get maximum video size supported by sensor*/
+ memset(&dim, 0, sizeof(mm_camera_dimension_t));
+ ret = cam_config_get_parm(mCameraId,
+ MM_CAMERA_PARM_MAX_VIDEO_SIZE, &dim);
+ if(ret != NO_ERROR) {
+ ALOGE("%s: error getting Max Video Size supported by camera",
+ __func__);
+ ret = NO_MEMORY;
+ free(mVideoSizes);
+ mVideoSizes = NULL;
+ ret = BAD_VALUE;
+ goto end;
+ }
+
+ ALOGV("%s: Max Video Size Supported: %d X %d", __func__,
+ dim.width, dim.height);
+
+ for(i=0; i < video_table_size; i++) {
+ /* We'll store those dimensions whose width AND height
+ are less than or equal to maximum supported */
+ if((video_size_table->width <= dim.width) &&
+ (video_size_table->height <= dim.height)) {
+ ALOGV("%s: Supported Video Size [%d] = %dx%d", __func__, count, video_size_table->width,
+ video_size_table->height);
+ mVideoSizes[count].height = video_size_table->height;
+ mVideoSizes[count].width = video_size_table->width;
+ count++;
+ }
+ video_size_table++;
+ }
+ mVideoSizeCount = count;
+
+end:
+ ALOGV("%s: X", __func__);
+ return ret;
+}
+
+void QCameraHardwareInterface::freeVideoSizeTable(void)
+{
+ if(mVideoSizes != NULL)
+ {
+ free(mVideoSizes);
+ }
+ mVideoSizeCount = 0;
+}
+
+
+void QCameraHardwareInterface::freePictureTable(void)
+{
+ /* If we couldn't allocate memory to store picture table
+ we use the picture table pointer to point to default
+ picture table array. In that case we cannot free it.*/
+ if ((mPictureSizes != default_picture_sizes) && mPictureSizes) {
+ free(mPictureSizes);
+ }
+}
+
+status_t QCameraHardwareInterface::setHistogram(int histogram_en)
+{
+ ALOGV("setHistogram: E");
+ if(mStatsOn == histogram_en) {
+ return NO_ERROR;
+ }
+
+ mSendData = histogram_en;
+ mStatsOn = histogram_en;
+ mCurrentHisto = -1;
+ mStatSize = sizeof(uint32_t)* HISTOGRAM_STATS_SIZE;
+
+ if (histogram_en == QCAMERA_PARM_ENABLE) {
+ /*Currently the Ashmem is multiplying the buffer size with total number
+ of buffers and page aligning. This causes a crash in JNI as each buffer
+ individually expected to be page aligned */
+ int page_size_minus_1 = getpagesize() - 1;
+ int statSize = sizeof (camera_preview_histogram_info );
+ int32_t mAlignedStatSize = ((statSize + page_size_minus_1) & (~page_size_minus_1));
+#if 0
+ mStatHeap =
+ new AshmemPool(mAlignedStatSize, 3, statSize, "stat");
+ if (!mStatHeap->initialized()) {
+ ALOGE("Stat Heap X failed ");
+ mStatHeap.clear();
+ mStatHeap = NULL;
+ return UNKNOWN_ERROR;
+ }
+#endif
+ for(int cnt = 0; cnt<3; cnt++) {
+ mStatsMapped[cnt]=mGetMemory(-1, mStatSize, 1, mCallbackCookie);
+ if(mStatsMapped[cnt] == NULL) {
+ ALOGE("Failed to get camera memory for stats heap index: %d", cnt);
+ return(-1);
+ } else {
+ ALOGV("Received following info for stats mapped data:%p,handle:%p, size:%d,release:%p",
+ mStatsMapped[cnt]->data ,mStatsMapped[cnt]->handle, mStatsMapped[cnt]->size, mStatsMapped[cnt]->release);
+ }
+ mHistServer.size = sizeof(camera_preview_histogram_info);
+#ifdef USE_ION
+ if(allocate_ion_memory(&mHistServer, cnt, ION_CP_MM_HEAP_ID) < 0) {
+ ALOGE("%s ION alloc failed\n", __func__);
+ return -1;
+ }
+#else
+ mHistServer.fd[cnt] = open("/dev/pmem_adsp", O_RDWR|O_SYNC);
+ if(mHistServer.fd[cnt] <= 0) {
+ ALOGE("%s: no pmem for frame %d", __func__, cnt);
+ return -1;
+ }
+#endif
+ mHistServer.camera_memory[cnt]=mGetMemory(mHistServer.fd[cnt],mHistServer.size, 1, mCallbackCookie);
+ if(mHistServer.camera_memory[cnt] == NULL) {
+ ALOGE("Failed to get camera memory for server side histogram index: %d", cnt);
+ return(-1);
+ } else {
+ ALOGV("Received following info for server side histogram data:%p,handle:%p, size:%d,release:%p",
+ mHistServer.camera_memory[cnt]->data ,mHistServer.camera_memory[cnt]->handle,
+ mHistServer.camera_memory[cnt]->size, mHistServer.camera_memory[cnt]->release);
+ }
+ /*Register buffer at back-end*/
+ if (NO_ERROR != sendMappingBuf(0, cnt, mHistServer.fd[cnt],
+ mHistServer.size, mCameraId,
+ CAM_SOCK_MSG_TYPE_HIST_MAPPING)) {
+ ALOGE("%s could not send buffer to back-end\n", __func__);
+ }
+ }
+ }
+ ALOGV("Setting histogram = %d", histogram_en);
+ native_set_parms(MM_CAMERA_PARM_HISTOGRAM, sizeof(int), &histogram_en);
+ if(histogram_en == QCAMERA_PARM_DISABLE)
+ {
+ //release memory
+ for(int i=0; i<3; i++){
+ if(mStatsMapped[i] != NULL) {
+ mStatsMapped[i]->release(mStatsMapped[i]);
+ }
+ /*Unregister buffer at back-end */
+ if (NO_ERROR != sendUnMappingBuf(0, i, mCameraId, CAM_SOCK_MSG_TYPE_HIST_UNMAPPING)) {
+ ALOGE("%s could not unregister buffer from back-end\n", __func__);
+ }
+ if(mHistServer.camera_memory[i] != NULL) {
+ mHistServer.camera_memory[i]->release(mHistServer.camera_memory[i]);
+ }
+ close(mHistServer.fd[i]);
+#ifdef USE_ION
+ deallocate_ion_memory(&mHistServer, i);
+#endif
+ }
+ }
+ return NO_ERROR;
+}
+
+status_t QCameraHardwareInterface::setZSLBurstLookBack(const QCameraParameters& params)
+{
+ const char *v = params.get("capture-burst-retroactive");
+ if (v) {
+ int look_back = atoi(v);
+ ALOGV("%s: look_back =%d", __func__, look_back);
+ mParameters.set("capture-burst-retroactive", look_back);
+ }
+ return NO_ERROR;
+}
+
+status_t QCameraHardwareInterface::setZSLBurstInterval(const QCameraParameters& params)
+{
+ mZslInterval = BURST_INTREVAL_DEFAULT;
+ const char *v = params.get("capture-burst-interval");
+ if (v) {
+ int interval = atoi(v);
+ ALOGV("%s: Interval =%d", __func__, interval);
+ if(interval < BURST_INTREVAL_MIN ||interval > BURST_INTREVAL_MAX ) {
+ return BAD_VALUE;
+ }
+ mZslInterval = interval;
+ }
+ return NO_ERROR;
+}
+
+int QCameraHardwareInterface::getZSLBurstInterval( void )
+{
+ int val;
+
+ if (mZslInterval == BURST_INTREVAL_DEFAULT) {
+ char prop[PROPERTY_VALUE_MAX];
+ memset(prop, 0, sizeof(prop));
+ property_get("persist.camera.zsl.interval", prop, "1");
+ val = atoi(prop);
+ ALOGV("%s: prop interval = %d", __func__, val);
+ } else {
+ val = mZslInterval;
+ }
+ return val;
+}
+
+
+int QCameraHardwareInterface::getZSLQueueDepth(void) const
+{
+ char prop[PROPERTY_VALUE_MAX];
+ memset(prop, 0, sizeof(prop));
+ property_get("persist.camera.zsl.queuedepth", prop, "2");
+ ALOGV("%s: prop = %d", __func__, atoi(prop));
+ return atoi(prop);
+}
+
+int QCameraHardwareInterface::getZSLBackLookCount(void) const
+{
+ int look_back;
+ char prop[PROPERTY_VALUE_MAX];
+ memset(prop, 0, sizeof(prop));
+ property_get("persist.camera.zsl.backlookcnt", prop, "0");
+ ALOGV("%s: prop = %d", __func__, atoi(prop));
+ look_back = atoi(prop);
+ if (look_back == 0 ) {
+ look_back = mParameters.getInt("capture-burst-retroactive");
+ ALOGV("%s: look_back = %d", __func__, look_back);
+ }
+ return look_back;
+}
+
+bool QCameraHardwareInterface::getFlashCondition(void)
+{
+ int32_t rc = 0;
+ bool flash_cond = false;
+ aec_info_for_flash_t lowLightForZSL;
+
+ lowLightForZSL.aec_index_for_zsl = 0;
+ lowLightForZSL.zsl_flash_enable = 0;
+
+ if(myMode & CAMERA_ZSL_MODE){
+ switch(mLedStatusForZsl) {
+ case LED_MODE_ON:
+ flash_cond = true;
+ break;
+ case LED_MODE_AUTO:
+ rc = cam_config_get_parm(mCameraId,
+ MM_CAMERA_GET_PARM_LOW_LIGHT_FOR_ZSL, &lowLightForZSL);
+ if(MM_CAMERA_OK == rc) {
+ if(lowLightForZSL.zsl_flash_enable != 0)
+ flash_cond = true;
+ else
+ flash_cond = false;
+ }
+ else
+ ALOGE("%s: Failed to get lowLightForZSL, rc %d", __func__, rc);
+ break;
+ default:
+ break;
+ }
+ }
+
+ ALOGV("%s: myMode %d, flash mode %d, flash condition %d",
+ __func__, myMode, mLedStatusForZsl, flash_cond);
+ return flash_cond;
+}
+
+//EXIF functions
+void QCameraHardwareInterface::deinitExifData()
+{
+ ALOGV("Clearing EXIF data");
+ for(int i=0; i<MAX_EXIF_TABLE_ENTRIES; i++)
+ {
+ //clear all data
+ memset(&mExifData[i], 0x00, sizeof(exif_tags_info_t));
+ }
+ mExifTableNumEntries = 0;
+}
+
+void QCameraHardwareInterface::addExifTag(exif_tag_id_t tagid, exif_tag_type_t type,
+ uint32_t count, uint8_t copy, void *data) {
+
+ if(mExifTableNumEntries >= MAX_EXIF_TABLE_ENTRIES) {
+ ALOGE("%s: Number of entries exceeded limit", __func__);
+ return;
+ }
+ int index = mExifTableNumEntries;
+ mExifData[index].tag_id = tagid;
+ mExifData[index].tag_entry.type = type;
+ mExifData[index].tag_entry.count = count;
+ mExifData[index].tag_entry.copy = copy;
+ if((type == EXIF_RATIONAL) && (count > 1))
+ mExifData[index].tag_entry.data._rats = (rat_t *)data;
+ if((type == EXIF_RATIONAL) && (count == 1))
+ mExifData[index].tag_entry.data._rat = *(rat_t *)data;
+ else if(type == EXIF_ASCII)
+ mExifData[index].tag_entry.data._ascii = (char *)data;
+ else if(type == EXIF_BYTE)
+ mExifData[index].tag_entry.data._byte = *(uint8_t *)data;
+ else if((type == EXIF_SHORT) && (count > 1))
+ mExifData[index].tag_entry.data._shorts = (uint16_t *)data;
+ else if((type == EXIF_SHORT) && (count == 1))
+ mExifData[index].tag_entry.data._short = *(uint16_t *)data;
+ // Increase number of entries
+ mExifTableNumEntries++;
+}
+
+rat_t getRational(int num, int denom)
+{
+ rat_t temp = {num, denom};
+ return temp;
+}
+
+void QCameraHardwareInterface::initExifData(){
+ if(mExifValues.dateTime) {
+ addExifTag(EXIFTAGID_EXIF_DATE_TIME_ORIGINAL, EXIF_ASCII,
+ 20, 1, (void *)mExifValues.dateTime);
+ addExifTag(EXIFTAGID_EXIF_DATE_TIME_DIGITIZED, EXIF_ASCII,
+ 20, 1, (void *)mExifValues.dateTime);
+ }
+ addExifTag(EXIFTAGID_FOCAL_LENGTH, EXIF_RATIONAL, 1, 1, (void *)&(mExifValues.focalLength));
+ addExifTag(EXIFTAGID_ISO_SPEED_RATING,EXIF_SHORT,1,1,(void *)&(mExifValues.isoSpeed));
+
+ if(mExifValues.mGpsProcess) {
+ addExifTag(EXIFTAGID_GPS_PROCESSINGMETHOD, EXIF_ASCII,
+ EXIF_ASCII_PREFIX_SIZE + strlen(mExifValues.gpsProcessingMethod + EXIF_ASCII_PREFIX_SIZE) + 1,
+ 1, (void *)mExifValues.gpsProcessingMethod);
+ }
+
+ if(mExifValues.mLatitude) {
+ addExifTag(EXIFTAGID_GPS_LATITUDE, EXIF_RATIONAL, 3, 1, (void *)mExifValues.latitude);
+
+ if(mExifValues.latRef) {
+ addExifTag(EXIFTAGID_GPS_LATITUDE_REF, EXIF_ASCII, 2,
+ 1, (void *)mExifValues.latRef);
+ }
+ }
+
+ if(mExifValues.mLongitude) {
+ addExifTag(EXIFTAGID_GPS_LONGITUDE, EXIF_RATIONAL, 3, 1, (void *)mExifValues.longitude);
+
+ if(mExifValues.lonRef) {
+ addExifTag(EXIFTAGID_GPS_LONGITUDE_REF, EXIF_ASCII, 2,
+ 1, (void *)mExifValues.lonRef);
+ }
+ }
+
+ if(mExifValues.mAltitude) {
+ addExifTag(EXIFTAGID_GPS_ALTITUDE, EXIF_RATIONAL, 1,
+ 1, (void *)&(mExifValues.altitude));
+
+ addExifTag(EXIFTAGID_GPS_ALTITUDE_REF, EXIF_BYTE, 1, 1, (void *)&mExifValues.mAltitude_ref);
+ }
+
+ if(mExifValues.mTimeStamp) {
+ time_t unixTime;
+ struct tm *UTCTimestamp;
+
+ unixTime = (time_t)mExifValues.mGPSTimestamp;
+ UTCTimestamp = gmtime(&unixTime);
+
+ strftime(mExifValues.gpsDateStamp, sizeof(mExifValues.gpsDateStamp), "%Y:%m:%d", UTCTimestamp);
+ addExifTag(EXIFTAGID_GPS_DATESTAMP, EXIF_ASCII,
+ strlen(mExifValues.gpsDateStamp)+1 , 1, (void *)mExifValues.gpsDateStamp);
+
+ mExifValues.gpsTimeStamp[0] = getRational(UTCTimestamp->tm_hour, 1);
+ mExifValues.gpsTimeStamp[1] = getRational(UTCTimestamp->tm_min, 1);
+ mExifValues.gpsTimeStamp[2] = getRational(UTCTimestamp->tm_sec, 1);
+
+ addExifTag(EXIFTAGID_GPS_TIMESTAMP, EXIF_RATIONAL,
+ 3, 1, (void *)mExifValues.gpsTimeStamp);
+ ALOGV("EXIFTAGID_GPS_TIMESTAMP set");
+ }
+
+}
+
+//Add all exif tags in this function
+void QCameraHardwareInterface::setExifTags()
+{
+ const char *str;
+
+ //set TimeStamp
+ str = mParameters.get(QCameraParameters::KEY_EXIF_DATETIME);
+ if(str != NULL) {
+ strncpy(mExifValues.dateTime, str, 19);
+ mExifValues.dateTime[19] = '\0';
+ }
+
+ //Set focal length
+ int focalLengthValue = (int) (mParameters.getFloat(
+ QCameraParameters::KEY_FOCAL_LENGTH) * FOCAL_LENGTH_DECIMAL_PRECISION);
+
+ mExifValues.focalLength = getRational(focalLengthValue, FOCAL_LENGTH_DECIMAL_PRECISION);
+
+ focus_distances_info_t focusDistances;
+ status_t rc = NO_ERROR;
+ rc = cam_config_get_parm(mCameraId, MM_CAMERA_PARM_FOCAL_LENGTH,(void *)&focusDistances);
+ if (rc == MM_CAMERA_OK){
+ uint16_t temp1;
+ rat_t temp;
+ if(mIsoValue == 0) // ISO is auto
+ {
+ temp1 = (uint16_t)(focusDistances.real_gain + 0.5)*100;
+ mExifValues.isoSpeed = temp1;
+ ALOGV("The new ISO value is %d", temp1);
+ }
+ else{
+ temp1 = iso_speed_values[mIsoValue];
+ mExifValues.isoSpeed = temp1;
+ ALOGV("else The new ISO value is %d", temp1);
+ }
+
+ if(focusDistances.exp_time <= 0) // avoid zero-divide problem
+ focusDistances.exp_time = 0.01668; // expoure time will be 1/60 s
+
+ uint16_t temp2 = (uint16_t)(focusDistances.exp_time * 100000);
+ temp2 = (uint16_t)(100000 / temp2);
+ temp.num = 1;
+ temp.denom = temp2;
+ memcpy(&mExifValues.exposure_time, &temp, sizeof(mExifValues.exposure_time));
+ addExifTag(EXIFTAGID_EXPOSURE_TIME, EXIF_RATIONAL, 1, 1, (void *)&mExifValues.exposure_time);
+
+ ALOGV(" The exposure value is %f", temp2);
+ }
+ //get time and date from system
+ time_t rawtime;
+ struct tm * timeinfo;
+ time(&rawtime);
+ timeinfo = localtime (&rawtime);
+ //Write datetime according to EXIF Spec
+ //"YYYY:MM:DD HH:MM:SS" (20 chars including \0)
+ snprintf(mExifValues.dateTime, 20, "%04d:%02d:%02d %02d:%02d:%02d",
+ timeinfo->tm_year + 1900, timeinfo->tm_mon + 1,
+ timeinfo->tm_mday, timeinfo->tm_hour,
+ timeinfo->tm_min, timeinfo->tm_sec);
+ //set gps tags
+ setExifTagsGPS();
+}
+
+void QCameraHardwareInterface::setExifTagsGPS()
+{
+ const char *str = NULL;
+
+ //Set GPS processing method
+ str = mParameters.get(QCameraParameters::KEY_GPS_PROCESSING_METHOD);
+ if(str != NULL) {
+ memcpy(mExifValues.gpsProcessingMethod, ExifAsciiPrefix, EXIF_ASCII_PREFIX_SIZE);
+ strncpy(mExifValues.gpsProcessingMethod + EXIF_ASCII_PREFIX_SIZE, str,
+ GPS_PROCESSING_METHOD_SIZE - 1);
+ mExifValues.gpsProcessingMethod[EXIF_ASCII_PREFIX_SIZE + GPS_PROCESSING_METHOD_SIZE-1] = '\0';
+ ALOGV("EXIFTAGID_GPS_PROCESSINGMETHOD = %s %s", mExifValues.gpsProcessingMethod,
+ mExifValues.gpsProcessingMethod+8);
+ mExifValues.mGpsProcess = true;
+ }else{
+ mExifValues.mGpsProcess = false;
+ }
+ str = NULL;
+
+ //Set Latitude
+ str = mParameters.get(QCameraParameters::KEY_GPS_LATITUDE);
+ if(str != NULL) {
+ parseGPSCoordinate(str, mExifValues.latitude);
+ ALOGV("EXIFTAGID_GPS_LATITUDE = %s", str);
+
+ //set Latitude Ref
+ float latitudeValue = mParameters.getFloat(QCameraParameters::KEY_GPS_LATITUDE);
+ if(latitudeValue < 0.0f) {
+ mExifValues.latRef[0] = 'S';
+ } else {
+ mExifValues.latRef[0] = 'N';
+ }
+ mExifValues.latRef[1] = '\0';
+ mExifValues.mLatitude = true;
+ mParameters.set(QCameraParameters::KEY_GPS_LATITUDE_REF,mExifValues.latRef);
+ ALOGV("EXIFTAGID_GPS_LATITUDE_REF = %s", mExifValues.latRef);
+ }else{
+ mExifValues.mLatitude = false;
+ }
+
+ //set Longitude
+ str = NULL;
+ str = mParameters.get(QCameraParameters::KEY_GPS_LONGITUDE);
+ if(str != NULL) {
+ parseGPSCoordinate(str, mExifValues.longitude);
+ ALOGV("EXIFTAGID_GPS_LONGITUDE = %s", str);
+
+ //set Longitude Ref
+ float longitudeValue = mParameters.getFloat(QCameraParameters::KEY_GPS_LONGITUDE);
+ if(longitudeValue < 0.0f) {
+ mExifValues.lonRef[0] = 'W';
+ } else {
+ mExifValues.lonRef[0] = 'E';
+ }
+ mExifValues.lonRef[1] = '\0';
+ mExifValues.mLongitude = true;
+ ALOGV("EXIFTAGID_GPS_LONGITUDE_REF = %s", mExifValues.lonRef);
+ mParameters.set(QCameraParameters::KEY_GPS_LONGITUDE_REF, mExifValues.lonRef);
+ }else{
+ mExifValues.mLongitude = false;
+ }
+
+ //set Altitude
+ str = mParameters.get(QCameraParameters::KEY_GPS_ALTITUDE);
+ if(str != NULL) {
+ double value = atof(str);
+ mExifValues.mAltitude_ref = 0;
+ if(value < 0){
+ mExifValues.mAltitude_ref = 1;
+ value = -value;
+ }
+ mExifValues.altitude = getRational(value*1000, 1000);
+ mExifValues.mAltitude = true;
+ //set AltitudeRef
+ mParameters.set(QCameraParameters::KEY_GPS_ALTITUDE_REF, mExifValues.mAltitude_ref);
+ ALOGV("EXIFTAGID_GPS_ALTITUDE = %f", value);
+ }else{
+ mExifValues.mAltitude = false;
+ }
+
+ //set Gps TimeStamp
+ str = NULL;
+ str = mParameters.get(QCameraParameters::KEY_GPS_TIMESTAMP);
+ if(str != NULL) {
+ mExifValues.mTimeStamp = true;
+ mExifValues.mGPSTimestamp = atol(str);
+ }else{
+ mExifValues.mTimeStamp = false;
+ }
+}
+
+//latlonString is string formatted coordinate
+//coord is rat_t[3]
+void QCameraHardwareInterface::parseGPSCoordinate(const char *latlonString, rat_t* coord)
+{
+ if(coord == NULL) {
+ ALOGE("%s: error, invalid argument coord == NULL", __func__);
+ return;
+ }
+ float degF = fabs(atof(latlonString));
+ float minF = (degF- (int) degF) * 60;
+ float secF = (minF - (int) minF) * 60;
+
+ coord[0] = getRational((int) degF, 1);
+ coord[1] = getRational((int) minF, 1);
+ coord[2] = getRational((int) (secF * 10000), 10000);
+}
+
+bool QCameraHardwareInterface::isLowPowerCamcorder() {
+
+ if (mPowerMode == LOW_POWER)
+ return true;
+
+ if(mHFRLevel > 1) /* hard code the value now. Need to move tgtcommon to camear.h */
+ return true;
+
+ return false;
+}
+
+status_t QCameraHardwareInterface::setNoDisplayMode(const QCameraParameters& params)
+{
+ char prop[PROPERTY_VALUE_MAX];
+ memset(prop, 0, sizeof(prop));
+ property_get("persist.camera.nodisplay", prop, "0");
+ int prop_val = atoi(prop);
+
+ if (prop_val == 0) {
+ const char *str_val = params.get("no-display-mode");
+ if(str_val && strlen(str_val) > 0) {
+ mNoDisplayMode = atoi(str_val);
+ } else {
+ mNoDisplayMode = 0;
+ }
+ ALOGV("Param mNoDisplayMode =%d", mNoDisplayMode);
+ } else {
+ mNoDisplayMode = prop_val;
+ ALOGV("prop mNoDisplayMode =%d", mNoDisplayMode);
+ }
+ return NO_ERROR;
+}
+
+status_t QCameraHardwareInterface::setCAFLockCancel(void)
+{
+ ALOGV("%s : E", __func__);
+ status_t rc = NO_ERROR;
+ int32_t value;
+
+ //for CAF unlock
+ value = 1;
+ rc = (native_set_parms(MM_CAMERA_PARM_CAF_LOCK_CANCEL, sizeof(int32_t), (void *)(&value))) ?
+ NO_ERROR : UNKNOWN_ERROR;
+ ALOGV("%s : X", __func__);
+ return rc;
+}
+
+void QCameraHardwareInterface::prepareVideoPicture(bool disable){
+ String8 str;
+ char buffer[32];
+
+ if(disable) {
+ sprintf(buffer, "%dx%d", mDimension.video_width, mDimension.video_height);
+ str.append(buffer);
+
+ mParameters.setPictureSize(mDimension.video_width, mDimension.video_height);
+ mParameters.set(QCameraParameters::KEY_SUPPORTED_PICTURE_SIZES,
+ str.string());
+ ALOGV("%s: Video Picture size supported = %d X %d",
+ __func__,mDimension.video_width,mDimension.video_height);
+ }else{
+ //Set Picture Size
+ mParameters.setPictureSize(mDimension.picture_width, mDimension.picture_height);
+ mParameters.set(QCameraParameters::KEY_SUPPORTED_PICTURE_SIZES,
+ mPictureSizeValues.string());
+ }
+}
+
+}; /*namespace android */
diff --git a/camera/QCameraHWI_Preview.cpp b/camera/QCameraHWI_Preview.cpp
new file mode 100644
index 0000000..302afac
--- /dev/null
+++ b/camera/QCameraHWI_Preview.cpp
@@ -0,0 +1,1499 @@
+/*
+** Copyright (c) 2011-2012 The Linux Foundation. All rights reserved.
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+** http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+/*#error uncomment this for compiler test!*/
+
+#define LOG_TAG "QCameraHWI_Preview"
+#include <utils/Log.h>
+#include <utils/threads.h>
+#include <fcntl.h>
+#include <sys/mman.h>
+#include "QCameraHAL.h"
+#include "QCameraHWI.h"
+#include <genlock.h>
+#include <gralloc_priv.h>
+
+#define UNLIKELY(exp) __builtin_expect(!!(exp), 0)
+
+/* QCameraHWI_Preview class implementation goes here*/
+/* following code implement the preview mode's image capture & display logic of this class*/
+
+namespace android {
+
+// ---------------------------------------------------------------------------
+// Preview Callback
+// ---------------------------------------------------------------------------
+static void preview_notify_cb(mm_camera_ch_data_buf_t *frame,
+ void *user_data)
+{
+ QCameraStream_preview *pme = (QCameraStream_preview *)user_data;
+ mm_camera_ch_data_buf_t *bufs_used = 0;
+ ALOGV("%s: E", __func__);
+ /* for peview data, there is no queue, so directly use*/
+ if(pme==NULL) {
+ ALOGE("%s: X : Incorrect cookie",__func__);
+ /*Call buf done*/
+ return;
+ }
+
+ pme->processPreviewFrame(frame);
+ ALOGV("%s: X", __func__);
+}
+
+status_t QCameraStream_preview::setPreviewWindow(preview_stream_ops_t* window)
+{
+ status_t retVal = NO_ERROR;
+ ALOGV(" %s: E ", __FUNCTION__);
+ if( window == NULL) {
+ ALOGW(" Setting NULL preview window ");
+ /* TODO: Current preview window will be invalidated.
+ * Release all the buffers back */
+ // relinquishBuffers();
+ }
+ Mutex::Autolock lock(mStopCallbackLock);
+ mPreviewWindow = window;
+ ALOGV(" %s : X ", __FUNCTION__ );
+ return retVal;
+}
+
+status_t QCameraStream_preview::getBufferFromSurface()
+{
+ int err = 0;
+ int numMinUndequeuedBufs = 0;
+ int format = 0;
+ status_t ret = NO_ERROR;
+ int gralloc_usage;
+
+ ALOGV(" %s : E ", __FUNCTION__);
+
+ if( mPreviewWindow == NULL) {
+ ALOGE("%s: mPreviewWindow = NULL", __func__);
+ return INVALID_OPERATION;
+ }
+ cam_ctrl_dimension_t dim;
+
+ //mDisplayLock.lock();
+ ret = cam_config_get_parm(mCameraId, MM_CAMERA_PARM_DIMENSION,&dim);
+
+ format = mHalCamCtrl->getPreviewFormatInfo().Hal_format;
+ if(ret != NO_ERROR) {
+ ALOGE("%s: display format %d is not supported", __func__, dim.prev_format);
+ goto end;
+ }
+ numMinUndequeuedBufs = 0;
+ if(mPreviewWindow->get_min_undequeued_buffer_count) {
+ err = mPreviewWindow->get_min_undequeued_buffer_count(mPreviewWindow, &numMinUndequeuedBufs);
+ if (err != 0) {
+ ALOGE("get_min_undequeued_buffer_count failed: %s (%d)",
+ strerror(-err), -err);
+ ret = UNKNOWN_ERROR;
+ goto end;
+ }
+ }
+ mHalCamCtrl->mPreviewMemoryLock.lock();
+ mHalCamCtrl->mPreviewMemory.buffer_count = kPreviewBufferCount + numMinUndequeuedBufs;
+ if(mHalCamCtrl->isZSLMode()) {
+ if(mHalCamCtrl->getZSLQueueDepth() > numMinUndequeuedBufs)
+ mHalCamCtrl->mPreviewMemory.buffer_count +=
+ mHalCamCtrl->getZSLQueueDepth() - numMinUndequeuedBufs;
+ }
+ err = mPreviewWindow->set_buffer_count(mPreviewWindow, mHalCamCtrl->mPreviewMemory.buffer_count );
+ if (err != 0) {
+ ALOGE("set_buffer_count failed: %s (%d)",
+ strerror(-err), -err);
+ ret = UNKNOWN_ERROR;
+ goto end;
+ }
+ err = mPreviewWindow->set_buffers_geometry(mPreviewWindow,
+ dim.display_width, dim.display_height, format);
+ if (err != 0) {
+ ALOGE("set_buffers_geometry failed: %s (%d)",
+ strerror(-err), -err);
+ ret = UNKNOWN_ERROR;
+ goto end;
+ }
+
+ ret = cam_config_get_parm(mCameraId, MM_CAMERA_PARM_VFE_OUTPUT_ENABLE, &mVFEOutputs);
+ if(ret != MM_CAMERA_OK) {
+ ALOGE("get parm MM_CAMERA_PARM_VFE_OUTPUT_ENABLE failed");
+ ret = BAD_VALUE;
+ goto end;
+ }
+
+ //as software encoder is used to encode 720p, to enhance the performance
+ //cashed pmem is used here
+ if(mVFEOutputs == 1 && dim.display_height == 720)
+ gralloc_usage = CAMERA_GRALLOC_HEAP_ID | CAMERA_GRALLOC_FALLBACK_HEAP_ID;
+ else
+ gralloc_usage = CAMERA_GRALLOC_HEAP_ID | CAMERA_GRALLOC_FALLBACK_HEAP_ID |
+ CAMERA_GRALLOC_CACHING_ID;
+ err = mPreviewWindow->set_usage(mPreviewWindow, gralloc_usage);
+ if(err != 0) {
+ /* set_usage error out */
+ ALOGE("%s: set_usage rc = %d", __func__, err);
+ ret = UNKNOWN_ERROR;
+ goto end;
+ }
+ ret = cam_config_get_parm(mCameraId, MM_CAMERA_PARM_HFR_FRAME_SKIP, &mHFRFrameSkip);
+ if(ret != MM_CAMERA_OK) {
+ ALOGE("get parm MM_CAMERA_PARM_HFR_FRAME_SKIP failed");
+ ret = BAD_VALUE;
+ goto end;
+ }
+ for (int cnt = 0; cnt < mHalCamCtrl->mPreviewMemory.buffer_count; cnt++) {
+ int stride;
+ err = mPreviewWindow->dequeue_buffer(mPreviewWindow,
+ &mHalCamCtrl->mPreviewMemory.buffer_handle[cnt],
+ &mHalCamCtrl->mPreviewMemory.stride[cnt]);
+ if(!err) {
+ ALOGV("%s: dequeue buf hdl =%p", __func__, *mHalCamCtrl->mPreviewMemory.buffer_handle[cnt]);
+ err = mPreviewWindow->lock_buffer(this->mPreviewWindow,
+ mHalCamCtrl->mPreviewMemory.buffer_handle[cnt]);
+ // lock the buffer using genlock
+ ALOGV("%s: camera call genlock_lock, hdl=%p", __FUNCTION__, (*mHalCamCtrl->mPreviewMemory.buffer_handle[cnt]));
+ if (GENLOCK_NO_ERROR != genlock_lock_buffer((native_handle_t *)(*mHalCamCtrl->mPreviewMemory.buffer_handle[cnt]),
+ GENLOCK_WRITE_LOCK, GENLOCK_MAX_TIMEOUT)) {
+ ALOGV("%s: genlock_lock_buffer(WRITE) failed", __FUNCTION__);
+ mHalCamCtrl->mPreviewMemory.local_flag[cnt] = BUFFER_UNLOCKED;
+ //mHalCamCtrl->mPreviewMemoryLock.unlock();
+ //return -EINVAL;
+ } else {
+ ALOGV("%s: genlock_lock_buffer hdl =%p", __FUNCTION__, *mHalCamCtrl->mPreviewMemory.buffer_handle[cnt]);
+ mHalCamCtrl->mPreviewMemory.local_flag[cnt] = BUFFER_LOCKED;
+ }
+ } else {
+ mHalCamCtrl->mPreviewMemory.local_flag[cnt] = BUFFER_NOT_OWNED;
+ ALOGV("%s: dequeue_buffer idx = %d err = %d", __func__, cnt, err);
+ }
+
+ ALOGV("%s: dequeue buf: %p\n", __func__, mHalCamCtrl->mPreviewMemory.buffer_handle[cnt]);
+
+ if(err != 0) {
+ ALOGE("%s: dequeue_buffer failed: %s (%d)", __func__,
+ strerror(-err), -err);
+ ret = UNKNOWN_ERROR;
+ for(int i = 0; i < cnt; i++) {
+ if (BUFFER_LOCKED == mHalCamCtrl->mPreviewMemory.local_flag[i]) {
+ ALOGV("%s: camera call genlock_unlock", __FUNCTION__);
+ if (GENLOCK_FAILURE == genlock_unlock_buffer((native_handle_t *)
+ (*(mHalCamCtrl->mPreviewMemory.buffer_handle[i])))) {
+ ALOGE("%s: genlock_unlock_buffer failed: hdl =%p", __FUNCTION__, (*(mHalCamCtrl->mPreviewMemory.buffer_handle[i])) );
+ //mHalCamCtrl->mPreviewMemoryLock.unlock();
+ //return -EINVAL;
+ } else {
+ mHalCamCtrl->mPreviewMemory.local_flag[i] = BUFFER_UNLOCKED;
+ }
+ }
+ if( mHalCamCtrl->mPreviewMemory.local_flag[i] != BUFFER_NOT_OWNED) {
+ err = mPreviewWindow->cancel_buffer(mPreviewWindow,
+ mHalCamCtrl->mPreviewMemory.buffer_handle[i]);
+ }
+ mHalCamCtrl->mPreviewMemory.local_flag[i] = BUFFER_NOT_OWNED;
+ ALOGV("%s: cancel_buffer: hdl =%p", __func__, (*mHalCamCtrl->mPreviewMemory.buffer_handle[i]));
+ mHalCamCtrl->mPreviewMemory.buffer_handle[i] = NULL;
+ }
+ memset(&mHalCamCtrl->mPreviewMemory, 0, sizeof(mHalCamCtrl->mPreviewMemory));
+ goto end;
+ }
+
+ mHalCamCtrl->mPreviewMemory.private_buffer_handle[cnt] =
+ (struct private_handle_t *)(*mHalCamCtrl->mPreviewMemory.buffer_handle[cnt]);
+#ifdef USE_ION
+ mHalCamCtrl->mPreviewMemory.main_ion_fd[cnt] = open("/dev/ion", O_RDONLY);
+ if (mHalCamCtrl->mPreviewMemory.main_ion_fd[cnt] < 0) {
+ ALOGE("%s: failed: could not open ion device\n", __func__);
+ } else {
+ mHalCamCtrl->mPreviewMemory.ion_info_fd[cnt].fd =
+ mHalCamCtrl->mPreviewMemory.private_buffer_handle[cnt]->fd;
+ if (ioctl(mHalCamCtrl->mPreviewMemory.main_ion_fd[cnt],
+ ION_IOC_IMPORT, &mHalCamCtrl->mPreviewMemory.ion_info_fd[cnt]) < 0)
+ ALOGE("ION import failed\n");
+ }
+#endif
+ mHalCamCtrl->mPreviewMemory.camera_memory[cnt] =
+ mHalCamCtrl->mGetMemory(mHalCamCtrl->mPreviewMemory.private_buffer_handle[cnt]->fd,
+ mHalCamCtrl->mPreviewMemory.private_buffer_handle[cnt]->size, 1, (void *)this);
+ ALOGV("%s: idx = %d, fd = %d, size = %d, offset = %d", __func__,
+ cnt, mHalCamCtrl->mPreviewMemory.private_buffer_handle[cnt]->fd,
+ mHalCamCtrl->mPreviewMemory.private_buffer_handle[cnt]->size,
+ mHalCamCtrl->mPreviewMemory.private_buffer_handle[cnt]->offset);
+ }
+
+
+ memset(&mHalCamCtrl->mMetadata, 0, sizeof(mHalCamCtrl->mMetadata));
+ memset(mHalCamCtrl->mFace, 0, sizeof(mHalCamCtrl->mFace));
+
+ ALOGV(" %s : X ",__FUNCTION__);
+end:
+ //mDisplayLock.unlock();
+ mHalCamCtrl->mPreviewMemoryLock.unlock();
+
+ return ret;
+}
+
+status_t QCameraStream_preview::putBufferToSurface() {
+ int err = 0;
+ status_t ret = NO_ERROR;
+
+ ALOGV(" %s : E ", __FUNCTION__);
+
+ mHalCamCtrl->mPreviewMemoryLock.lock();
+ for (int cnt = 0; cnt < mHalCamCtrl->mPreviewMemory.buffer_count; cnt++) {
+ if (cnt < mHalCamCtrl->mPreviewMemory.buffer_count) {
+ if (NO_ERROR != mHalCamCtrl->sendUnMappingBuf(MSM_V4L2_EXT_CAPTURE_MODE_PREVIEW, cnt, mCameraId,
+ CAM_SOCK_MSG_TYPE_FD_UNMAPPING)) {
+ ALOGE("%s: unmapping Preview Buffer", __func__);
+ }
+ if(mHalCamCtrl->isZSLMode()) {
+ if (NO_ERROR != mHalCamCtrl->sendUnMappingBuf(MSM_V4L2_EXT_CAPTURE_MODE_THUMBNAIL, cnt, mCameraId,
+ CAM_SOCK_MSG_TYPE_FD_UNMAPPING)) {
+ ALOGE("%s: unmapping Thumbnail Buffer for ZSL", __func__);
+ }
+ }
+ }
+
+ mHalCamCtrl->mPreviewMemory.camera_memory[cnt]->release(mHalCamCtrl->mPreviewMemory.camera_memory[cnt]);
+#ifdef USE_ION
+ struct ion_handle_data ion_handle;
+ ion_handle.handle = mHalCamCtrl->mPreviewMemory.ion_info_fd[cnt].handle;
+ if (ioctl(mHalCamCtrl->mPreviewMemory.main_ion_fd[cnt], ION_IOC_FREE, &ion_handle)
+ < 0)
+ ALOGE("%s: ion free failed\n", __func__);
+ close(mHalCamCtrl->mPreviewMemory.main_ion_fd[cnt]);
+#endif
+ if (BUFFER_LOCKED == mHalCamCtrl->mPreviewMemory.local_flag[cnt]) {
+ ALOGV("%s: camera call genlock_unlock", __FUNCTION__);
+ if (GENLOCK_FAILURE == genlock_unlock_buffer((native_handle_t *)
+ (*(mHalCamCtrl->mPreviewMemory.buffer_handle[cnt])))) {
+ ALOGE("%s: genlock_unlock_buffer failed, handle =%p", __FUNCTION__, (*(mHalCamCtrl->mPreviewMemory.buffer_handle[cnt])));
+ continue;
+ //mHalCamCtrl->mPreviewMemoryLock.unlock();
+ //return -EINVAL;
+ } else {
+
+ ALOGV("%s: genlock_unlock_buffer, handle =%p", __FUNCTION__, (*(mHalCamCtrl->mPreviewMemory.buffer_handle[cnt])));
+ mHalCamCtrl->mPreviewMemory.local_flag[cnt] = BUFFER_UNLOCKED;
+ }
+ }
+ if( mHalCamCtrl->mPreviewMemory.local_flag[cnt] != BUFFER_NOT_OWNED) {
+ err = mPreviewWindow->cancel_buffer(mPreviewWindow, mHalCamCtrl->mPreviewMemory.buffer_handle[cnt]);
+ ALOGV("%s: cancel_buffer: hdl =%p", __func__, (*mHalCamCtrl->mPreviewMemory.buffer_handle[cnt]));
+ }
+ mHalCamCtrl->mPreviewMemory.local_flag[cnt] = BUFFER_NOT_OWNED;
+
+ ALOGV(" put buffer %d successfully", cnt);
+ }
+
+ if (mDisplayBuf.preview.buf.mp != NULL) {
+ delete[] mDisplayBuf.preview.buf.mp;
+ mDisplayBuf.preview.buf.mp = NULL;
+ }
+
+ mHalCamCtrl->mPreviewMemoryLock.unlock();
+ memset(&mHalCamCtrl->mPreviewMemory, 0, sizeof(mHalCamCtrl->mPreviewMemory));
+ ALOGV(" %s : X ",__FUNCTION__);
+ return NO_ERROR;
+}
+
+
+status_t QCameraStream_preview::getBufferNoDisplay( )
+{
+ int err = 0;
+ status_t ret = NO_ERROR;
+ int i, num_planes, frame_len, y_off, cbcr_off;
+ cam_ctrl_dimension_t dim;
+ uint32_t planes[VIDEO_MAX_PLANES];
+
+ ALOGV("%s : E ", __FUNCTION__);
+
+
+ ret = cam_config_get_parm(mCameraId, MM_CAMERA_PARM_DIMENSION, &dim);
+ if(ret != NO_ERROR) {
+ ALOGE("%s: display format %d is not supported", __func__, dim.prev_format);
+ goto end;
+ }
+ mHalCamCtrl->mPreviewMemoryLock.lock();
+ mHalCamCtrl->mNoDispPreviewMemory.buffer_count = kPreviewBufferCount;
+ if(mHalCamCtrl->isZSLMode()) {
+ if(mHalCamCtrl->getZSLQueueDepth() > kPreviewBufferCount - 3)
+ mHalCamCtrl->mNoDispPreviewMemory.buffer_count =
+ mHalCamCtrl->getZSLQueueDepth() + 3;
+ }
+
+ num_planes = dim.display_frame_offset.num_planes;
+ for ( i = 0; i < num_planes; i++) {
+ planes[i] = dim.display_frame_offset.mp[i].len;
+ }
+
+ frame_len = dim.picture_frame_offset.frame_len;
+ y_off = dim.picture_frame_offset.mp[0].offset;
+ cbcr_off = dim.picture_frame_offset.mp[1].offset;
+ ALOGV("%s: main image: rotation = %d, yoff = %d, cbcroff = %d, size = %d, width = %d, height = %d",
+ __func__, dim.rotation, y_off, cbcr_off, frame_len,
+ dim.display_width, dim.display_height);
+ if (mHalCamCtrl->initHeapMem(&mHalCamCtrl->mNoDispPreviewMemory,
+ mHalCamCtrl->mNoDispPreviewMemory.buffer_count,
+ frame_len, y_off, cbcr_off, MSM_PMEM_MAINIMG,
+ NULL,NULL, num_planes, planes) < 0) {
+ ret = NO_MEMORY;
+ goto end;
+ };
+
+ memset(&mHalCamCtrl->mMetadata, 0, sizeof(mHalCamCtrl->mMetadata));
+ memset(mHalCamCtrl->mFace, 0, sizeof(mHalCamCtrl->mFace));
+
+ ALOGV(" %s : X ",__FUNCTION__);
+end:
+ //mDisplayLock.unlock();
+ mHalCamCtrl->mPreviewMemoryLock.unlock();
+
+ return NO_ERROR;
+}
+
+status_t QCameraStream_preview::freeBufferNoDisplay()
+{
+ int err = 0;
+ status_t ret = NO_ERROR;
+
+ ALOGV(" %s : E ", __FUNCTION__);
+
+ //mDisplayLock.lock();
+ mHalCamCtrl->mPreviewMemoryLock.lock();
+ for (int cnt = 0; cnt < mHalCamCtrl->mNoDispPreviewMemory.buffer_count; cnt++) {
+ if (cnt < mHalCamCtrl->mNoDispPreviewMemory.buffer_count) {
+ if (NO_ERROR != mHalCamCtrl->sendUnMappingBuf(MSM_V4L2_EXT_CAPTURE_MODE_PREVIEW,
+ cnt, mCameraId, CAM_SOCK_MSG_TYPE_FD_UNMAPPING)) {
+ ALOGE("%s: sending data Msg Failed", __func__);
+ }
+ if(mHalCamCtrl->isZSLMode()) {
+ if (NO_ERROR != mHalCamCtrl->sendUnMappingBuf(MSM_V4L2_EXT_CAPTURE_MODE_THUMBNAIL, cnt, mCameraId,
+ CAM_SOCK_MSG_TYPE_FD_UNMAPPING)) {
+ ALOGE("%s: Send socket msg to Unmap Failed", __func__);
+ }
+ }
+ }
+ }
+ mHalCamCtrl->releaseHeapMem(&mHalCamCtrl->mNoDispPreviewMemory);
+ memset(&mHalCamCtrl->mNoDispPreviewMemory, 0, sizeof(mHalCamCtrl->mNoDispPreviewMemory));
+ if (mDisplayBuf.preview.buf.mp != NULL) {
+ delete[] mDisplayBuf.preview.buf.mp;
+ mDisplayBuf.preview.buf.mp = NULL;
+ }
+
+ mHalCamCtrl->mPreviewMemoryLock.unlock();
+ ALOGV(" %s : X ",__FUNCTION__);
+ return NO_ERROR;
+}
+
+void QCameraStream_preview::notifyROIEvent(fd_roi_t roi)
+{
+ camera_memory_t *data = mHalCamCtrl->mGetMemory(-1, 1, 1, NULL);
+ switch (roi.type) {
+ case FD_ROI_TYPE_HEADER:
+ {
+ mDisplayLock.lock();
+ mNumFDRcvd = 0;
+ memset(mHalCamCtrl->mFace, 0, sizeof(mHalCamCtrl->mFace));
+ mHalCamCtrl->mMetadata.faces = mHalCamCtrl->mFace;
+ mHalCamCtrl->mMetadata.number_of_faces = roi.d.hdr.num_face_detected;
+ if(mHalCamCtrl->mMetadata.number_of_faces > MAX_ROI)
+ mHalCamCtrl->mMetadata.number_of_faces = MAX_ROI;
+ mDisplayLock.unlock();
+
+ if (mHalCamCtrl->mMetadata.number_of_faces == 0) {
+ // Clear previous faces
+ mHalCamCtrl->mCallbackLock.lock();
+ camera_data_callback pcb = mHalCamCtrl->mDataCb;
+ mHalCamCtrl->mCallbackLock.unlock();
+
+ if (pcb && (mHalCamCtrl->mMsgEnabled & CAMERA_MSG_PREVIEW_METADATA)){
+ ALOGV("%s: Face detection RIO callback", __func__);
+ pcb(CAMERA_MSG_PREVIEW_METADATA, data, 0, &mHalCamCtrl->mMetadata, mHalCamCtrl->mCallbackCookie);
+ }
+ }
+ }
+ break;
+ case FD_ROI_TYPE_DATA:
+ {
+ #if 1
+ mDisplayLock.lock();
+ int idx = roi.d.data.idx;
+ if (idx >= mHalCamCtrl->mMetadata.number_of_faces) {
+ mDisplayLock.unlock();
+ ALOGE("%s: idx %d out of boundary %d", __func__, idx, mHalCamCtrl->mMetadata.number_of_faces);
+ break;
+ }
+
+ mHalCamCtrl->mFace[idx].id = roi.d.data.face.id;
+ mHalCamCtrl->mFace[idx].score = roi.d.data.face.score;
+
+ // top
+ mHalCamCtrl->mFace[idx].rect[0] =
+ roi.d.data.face.face_boundary.x*2000/mHalCamCtrl->mDimension.display_width - 1000;
+ //right
+ mHalCamCtrl->mFace[idx].rect[1] =
+ roi.d.data.face.face_boundary.y*2000/mHalCamCtrl->mDimension.display_height - 1000;
+ //bottom
+ mHalCamCtrl->mFace[idx].rect[2] = mHalCamCtrl->mFace[idx].rect[0] +
+ roi.d.data.face.face_boundary.dx*2000/mHalCamCtrl->mDimension.display_width;
+ //left
+ mHalCamCtrl->mFace[idx].rect[3] = mHalCamCtrl->mFace[idx].rect[1] +
+ roi.d.data.face.face_boundary.dy*2000/mHalCamCtrl->mDimension.display_height;
+
+ // Center of left eye
+ mHalCamCtrl->mFace[idx].left_eye[0] =
+ roi.d.data.face.left_eye_center[0]*2000/mHalCamCtrl->mDimension.display_width - 1000;
+ mHalCamCtrl->mFace[idx].left_eye[1] =
+ roi.d.data.face.left_eye_center[1]*2000/mHalCamCtrl->mDimension.display_height - 1000;
+
+ // Center of right eye
+ mHalCamCtrl->mFace[idx].right_eye[0] =
+ roi.d.data.face.right_eye_center[0]*2000/mHalCamCtrl->mDimension.display_width - 1000;
+ mHalCamCtrl->mFace[idx].right_eye[1] =
+ roi.d.data.face.right_eye_center[1]*2000/mHalCamCtrl->mDimension.display_height - 1000;
+#if 0
+ // Center of mouth
+ mHalCamCtrl->mFace[idx].mouth[0] =
+ roi.d.data.face.mouth_center[0]*2000/mHalCamCtrl->mDimension.display_width - 1000;
+ mHalCamCtrl->mFace[idx].mouth[1] =
+ roi.d.data.face.mouth_center[1]*2000/mHalCamCtrl->mDimension.display_height - 1000;
+
+ mHalCamCtrl->mFace[idx].smile_degree = roi.d.data.face.smile_degree;
+ mHalCamCtrl->mFace[idx].smile_score = roi.d.data.face.smile_confidence;
+ mHalCamCtrl->mFace[idx].blink_detected = roi.d.data.face.blink_detected;
+ mHalCamCtrl->mFace[idx].face_recognised = roi.d.data.face.is_face_recognised;
+ mHalCamCtrl->mFace[idx].gaze_angle = roi.d.data.face.gaze_angle;
+
+ /* newly added */
+ // upscale by 2 to recover from demaen downscaling
+ mHalCamCtrl->mFace[idx].updown_dir = roi.d.data.face.updown_dir*2;
+ mHalCamCtrl->mFace[idx].leftright_dir = roi.d.data.face.leftright_dir*2;
+ mHalCamCtrl->mFace[idx].roll_dir = roi.d.data.face.roll_dir*2;
+
+ mHalCamCtrl->mFace[idx].leye_blink = roi.d.data.face.left_blink;
+ mHalCamCtrl->mFace[idx].reye_blink = roi.d.data.face.right_blink;
+ mHalCamCtrl->mFace[idx].left_right_gaze = roi.d.data.face.left_right_gaze;
+ mHalCamCtrl->mFace[idx].top_bottom_gaze = roi.d.data.face.top_bottom_gaze;
+ ALOGE("%s: Face(%d, %d, %d, %d), leftEye(%d, %d), rightEye(%d, %d), mouth(%d, %d), smile(%d, %d), face_recg(%d)", __func__,
+ mHalCamCtrl->mFace[idx].rect[0], mHalCamCtrl->mFace[idx].rect[1],
+ mHalCamCtrl->mFace[idx].rect[2], mHalCamCtrl->mFace[idx].rect[3],
+ mHalCamCtrl->mFace[idx].left_eye[0], mHalCamCtrl->mFace[idx].left_eye[1],
+ mHalCamCtrl->mFace[idx].right_eye[0], mHalCamCtrl->mFace[idx].right_eye[1],
+ mHalCamCtrl->mFace[idx].mouth[0], mHalCamCtrl->mFace[idx].mouth[1],
+ mHalCamCtrl->mFace[idx].smile_degree, mHalCamCtrl->mFace[idx].smile_score,
+ mHalCamCtrl->mFace[idx].face_recognised);
+ ALOGE("%s: gaze(%d, %d, %d), updown(%d), leftright(%d), roll(%d), blink(%d, %d, %d)", __func__,
+ mHalCamCtrl->mFace[idx].gaze_angle, mHalCamCtrl->mFace[idx].left_right_gaze,
+ mHalCamCtrl->mFace[idx].top_bottom_gaze, mHalCamCtrl->mFace[idx].updown_dir,
+ mHalCamCtrl->mFace[idx].leftright_dir, mHalCamCtrl->mFace[idx].roll_dir,
+ mHalCamCtrl->mFace[idx].blink_detected,
+ mHalCamCtrl->mFace[idx].leye_blink, mHalCamCtrl->mFace[idx].reye_blink);
+#endif
+ mNumFDRcvd++;
+ mDisplayLock.unlock();
+
+ if (mNumFDRcvd == mHalCamCtrl->mMetadata.number_of_faces) {
+ mHalCamCtrl->mCallbackLock.lock();
+ camera_data_callback pcb = mHalCamCtrl->mDataCb;
+ mHalCamCtrl->mCallbackLock.unlock();
+
+ if (pcb && (mHalCamCtrl->mMsgEnabled & CAMERA_MSG_PREVIEW_METADATA)){
+ ALOGV("%s: Face detection RIO callback with %d faces detected (score=%d)", __func__, mNumFDRcvd, mHalCamCtrl->mFace[idx].score);
+ pcb(CAMERA_MSG_PREVIEW_METADATA, data, 0, &mHalCamCtrl->mMetadata, mHalCamCtrl->mCallbackCookie);
+ }
+ }
+ #endif
+ }
+ break;
+ }
+ if(NULL != data) data->release(data);
+}
+
+status_t QCameraStream_preview::initDisplayBuffers()
+{
+ status_t ret = NO_ERROR;
+ int width = 0; /* width of channel */
+ int height = 0; /* height of channel */
+ uint32_t frame_len = 0; /* frame planner length */
+ int buffer_num = 4, i; /* number of buffers for display */
+ const char *pmem_region;
+ uint8_t num_planes = 0;
+ uint32_t planes[VIDEO_MAX_PLANES];
+ void *vaddr = NULL;
+ cam_ctrl_dimension_t dim;
+
+ ALOGV("%s:BEGIN",__func__);
+ memset(&mHalCamCtrl->mMetadata, 0, sizeof(camera_frame_metadata_t));
+ mHalCamCtrl->mPreviewMemoryLock.lock();
+ memset(&mHalCamCtrl->mPreviewMemory, 0, sizeof(mHalCamCtrl->mPreviewMemory));
+ mHalCamCtrl->mPreviewMemoryLock.unlock();
+ memset(&mNotifyBuffer, 0, sizeof(mNotifyBuffer));
+
+/* get preview size, by qury mm_camera*/
+ memset(&dim, 0, sizeof(cam_ctrl_dimension_t));
+
+ memset(&(this->mDisplayStreamBuf),0, sizeof(this->mDisplayStreamBuf));
+
+ ret = cam_config_get_parm(mCameraId, MM_CAMERA_PARM_DIMENSION, &dim);
+ if (MM_CAMERA_OK != ret) {
+ ALOGE("%s: error - can't get camera dimension!", __func__);
+ ALOGV("%s: X", __func__);
+ return BAD_VALUE;
+ }else {
+ width = dim.display_width,
+ height = dim.display_height;
+ }
+
+ ret = getBufferFromSurface();
+ if(ret != NO_ERROR) {
+ ALOGE("%s: cannot get memory from surface texture client, ret = %d", __func__, ret);
+ return ret;
+ }
+
+ /* set 4 buffers for display */
+ mHalCamCtrl->mPreviewMemoryLock.lock();
+ memset(&mDisplayStreamBuf, 0, sizeof(mDisplayStreamBuf));
+ this->mDisplayStreamBuf.num = mHalCamCtrl->mPreviewMemory.buffer_count;
+ this->myMode=myMode; /*Need to assign this in constructor after translating from mask*/
+ num_planes = dim.display_frame_offset.num_planes;
+ for(i =0; i< num_planes; i++) {
+ planes[i] = dim.display_frame_offset.mp[i].len;
+ }
+ this->mDisplayStreamBuf.frame_len = dim.display_frame_offset.frame_len;
+
+ memset(&mDisplayBuf, 0, sizeof(mDisplayBuf));
+ mDisplayBuf.preview.buf.mp = new mm_camera_mp_buf_t[mDisplayStreamBuf.num];
+ if (!mDisplayBuf.preview.buf.mp) {
+ ALOGE("%s Error allocating memory for mplanar struct ", __func__);
+ ret = NO_MEMORY;
+ goto error;
+ }
+ memset(mDisplayBuf.preview.buf.mp, 0,
+ mDisplayStreamBuf.num * sizeof(mm_camera_mp_buf_t));
+
+ /*allocate memory for the buffers*/
+ for(int i = 0; i < mDisplayStreamBuf.num; i++){
+ if (mHalCamCtrl->mPreviewMemory.private_buffer_handle[i] == NULL)
+ continue;
+ mDisplayStreamBuf.frame[i].fd = mHalCamCtrl->mPreviewMemory.private_buffer_handle[i]->fd;
+ mDisplayStreamBuf.frame[i].cbcr_off = planes[0];
+ mDisplayStreamBuf.frame[i].y_off = 0;
+ mDisplayStreamBuf.frame[i].path = OUTPUT_TYPE_P;
+ mHalCamCtrl->mPreviewMemory.addr_offset[i] =
+ mHalCamCtrl->mPreviewMemory.private_buffer_handle[i]->offset;
+ mDisplayStreamBuf.frame[i].buffer =
+ (long unsigned int)mHalCamCtrl->mPreviewMemory.camera_memory[i]->data;
+ mDisplayStreamBuf.frame[i].ion_alloc.len = mHalCamCtrl->mPreviewMemory.private_buffer_handle[i]->size;
+ mDisplayStreamBuf.frame[i].ion_dev_fd = mHalCamCtrl->mPreviewMemory.main_ion_fd[i];
+ mDisplayStreamBuf.frame[i].fd_data = mHalCamCtrl->mPreviewMemory.ion_info_fd[i];
+
+ ALOGV("%s: idx = %d, fd = %d, size = %d, cbcr_offset = %d, y_offset = %d, "
+ "offset = %d, vaddr = 0x%x", __func__, i, mDisplayStreamBuf.frame[i].fd,
+ mHalCamCtrl->mPreviewMemory.private_buffer_handle[i]->size,
+ mDisplayStreamBuf.frame[i].cbcr_off, mDisplayStreamBuf.frame[i].y_off,
+ mHalCamCtrl->mPreviewMemory.addr_offset[i],
+ (uint32_t)mDisplayStreamBuf.frame[i].buffer);
+
+ ret = mHalCamCtrl->sendMappingBuf(
+ MSM_V4L2_EXT_CAPTURE_MODE_PREVIEW,
+ i,
+ mDisplayStreamBuf.frame[i].fd,
+ mHalCamCtrl->mPreviewMemory.private_buffer_handle[i]->size,
+ mCameraId, CAM_SOCK_MSG_TYPE_FD_MAPPING);
+ if (NO_ERROR != ret) {
+ ALOGE("%s: sending mapping data Msg Failed", __func__);
+ goto error;
+ }
+
+ if(mHalCamCtrl->isZSLMode()) {
+ ret = mHalCamCtrl->sendMappingBuf(
+ MSM_V4L2_EXT_CAPTURE_MODE_THUMBNAIL,
+ i,
+ mDisplayStreamBuf.frame[i].fd,
+ mHalCamCtrl->mPreviewMemory.private_buffer_handle[i]->size,
+ mCameraId, CAM_SOCK_MSG_TYPE_FD_MAPPING);
+ if (NO_ERROR != ret) {
+ ALOGE("%s: Send socket msg to map Failed", __func__);
+ goto error;
+ }
+ }
+ mDisplayBuf.preview.buf.mp[i].frame = mDisplayStreamBuf.frame[i];
+ mDisplayBuf.preview.buf.mp[i].frame_offset = mHalCamCtrl->mPreviewMemory.addr_offset[i];
+ mDisplayBuf.preview.buf.mp[i].num_planes = num_planes;
+
+ /* Plane 0 needs to be set seperately. Set other planes
+ * in a loop. */
+ mDisplayBuf.preview.buf.mp[i].planes[0].length = planes[0];
+ mDisplayBuf.preview.buf.mp[i].planes[0].m.userptr = mDisplayStreamBuf.frame[i].fd;
+ mDisplayBuf.preview.buf.mp[i].planes[0].data_offset = 0;
+ mDisplayBuf.preview.buf.mp[i].planes[0].reserved[0] =
+ mDisplayBuf.preview.buf.mp[i].frame_offset;
+ for (int j = 1; j < num_planes; j++) {
+ mDisplayBuf.preview.buf.mp[i].planes[j].length = planes[j];
+ mDisplayBuf.preview.buf.mp[i].planes[j].m.userptr =
+ mDisplayStreamBuf.frame[i].fd;
+ mDisplayBuf.preview.buf.mp[i].planes[j].data_offset = 0;
+ mDisplayBuf.preview.buf.mp[i].planes[j].reserved[0] =
+ mDisplayBuf.preview.buf.mp[i].planes[j-1].reserved[0] +
+ mDisplayBuf.preview.buf.mp[i].planes[j-1].length;
+ }
+
+ for (int j = 0; j < num_planes; j++)
+ ALOGV("Planes: %d length: %d userptr: %lu offset: %d\n", j,
+ mDisplayBuf.preview.buf.mp[i].planes[j].length,
+ mDisplayBuf.preview.buf.mp[i].planes[j].m.userptr,
+ mDisplayBuf.preview.buf.mp[i].planes[j].reserved[0]);
+ }/*end of for loop*/
+
+ /* register the streaming buffers for the channel*/
+ mDisplayBuf.ch_type = MM_CAMERA_CH_PREVIEW;
+ mDisplayBuf.preview.num = mDisplayStreamBuf.num;
+ mHalCamCtrl->mPreviewMemoryLock.unlock();
+ ALOGV("%s:END",__func__);
+ return NO_ERROR;
+
+error:
+ mHalCamCtrl->mPreviewMemoryLock.unlock();
+ putBufferToSurface();
+
+ ALOGV("%s: X", __func__);
+ return ret;
+}
+
+status_t QCameraStream_preview::initPreviewOnlyBuffers()
+{
+ status_t ret = NO_ERROR;
+ int width = 0; /* width of channel */
+ int height = 0; /* height of channel */
+ uint32_t frame_len = 0; /* frame planner length */
+ int buffer_num = 4; /* number of buffers for display */
+ const char *pmem_region;
+ uint8_t num_planes = 0;
+ uint32_t planes[VIDEO_MAX_PLANES];
+
+ cam_ctrl_dimension_t dim;
+
+ ALOGV("%s:BEGIN",__func__);
+ memset(&mHalCamCtrl->mMetadata, 0, sizeof(camera_frame_metadata_t));
+ mHalCamCtrl->mPreviewMemoryLock.lock();
+ memset(&mHalCamCtrl->mNoDispPreviewMemory, 0, sizeof(mHalCamCtrl->mNoDispPreviewMemory));
+ mHalCamCtrl->mPreviewMemoryLock.unlock();
+ memset(&mNotifyBuffer, 0, sizeof(mNotifyBuffer));
+
+/* get preview size, by qury mm_camera*/
+ memset(&dim, 0, sizeof(cam_ctrl_dimension_t));
+ ret = cam_config_get_parm(mCameraId, MM_CAMERA_PARM_DIMENSION, &dim);
+ if (MM_CAMERA_OK != ret) {
+ ALOGE("%s: error - can't get camera dimension!", __func__);
+ ALOGV("%s: X", __func__);
+ return BAD_VALUE;
+ }else {
+ width = dim.display_width;
+ height = dim.display_height;
+ }
+
+ ret = getBufferNoDisplay( );
+ if(ret != NO_ERROR) {
+ ALOGE("%s: cannot get memory from surface texture client, ret = %d", __func__, ret);
+ return ret;
+ }
+
+ /* set 4 buffers for display */
+ memset(&mDisplayStreamBuf, 0, sizeof(mDisplayStreamBuf));
+ mHalCamCtrl->mPreviewMemoryLock.lock();
+ this->mDisplayStreamBuf.num = mHalCamCtrl->mNoDispPreviewMemory.buffer_count;
+ this->myMode=myMode; /*Need to assign this in constructor after translating from mask*/
+ num_planes = dim.display_frame_offset.num_planes;
+ for (int i = 0; i < num_planes; i++) {
+ planes[i] = dim.display_frame_offset.mp[i].len;
+ }
+ this->mDisplayStreamBuf.frame_len = dim.display_frame_offset.frame_len;
+
+ memset(&mDisplayBuf, 0, sizeof(mDisplayBuf));
+ mDisplayBuf.preview.buf.mp = new mm_camera_mp_buf_t[mDisplayStreamBuf.num];
+ if (!mDisplayBuf.preview.buf.mp) {
+ ALOGE("%s Error allocating memory for mplanar struct ", __func__);
+ }
+ memset(mDisplayBuf.preview.buf.mp, 0,
+ mDisplayStreamBuf.num * sizeof(mm_camera_mp_buf_t));
+
+ /*allocate memory for the buffers*/
+ void *vaddr = NULL;
+ for(int i = 0; i < mDisplayStreamBuf.num; i++){
+ if (mHalCamCtrl->mNoDispPreviewMemory.camera_memory[i] == NULL)
+ continue;
+ mDisplayStreamBuf.frame[i].fd = mHalCamCtrl->mNoDispPreviewMemory.fd[i];
+ mDisplayStreamBuf.frame[i].cbcr_off = planes[0];
+ mDisplayStreamBuf.frame[i].y_off = 0;
+ mDisplayStreamBuf.frame[i].path = OUTPUT_TYPE_P;
+ mDisplayStreamBuf.frame[i].buffer =
+ (long unsigned int)mHalCamCtrl->mNoDispPreviewMemory.camera_memory[i]->data;
+ mDisplayStreamBuf.frame[i].ion_dev_fd = mHalCamCtrl->mNoDispPreviewMemory.main_ion_fd[i];
+ mDisplayStreamBuf.frame[i].fd_data = mHalCamCtrl->mNoDispPreviewMemory.ion_info_fd[i];
+
+ ALOGV("%s: idx = %d, fd = %d, size = %d, cbcr_offset = %d, y_offset = %d, "
+ "vaddr = 0x%x", __func__, i, mDisplayStreamBuf.frame[i].fd,
+ frame_len,
+ mDisplayStreamBuf.frame[i].cbcr_off, mDisplayStreamBuf.frame[i].y_off,
+ (uint32_t)mDisplayStreamBuf.frame[i].buffer);
+
+ if (NO_ERROR != mHalCamCtrl->sendMappingBuf(
+ MSM_V4L2_EXT_CAPTURE_MODE_PREVIEW,
+ i,
+ mDisplayStreamBuf.frame[i].fd,
+ mHalCamCtrl->mNoDispPreviewMemory.size,
+ mCameraId, CAM_SOCK_MSG_TYPE_FD_MAPPING)) {
+ ALOGE("%s: sending mapping data Msg Failed", __func__);
+ }
+
+ if(mHalCamCtrl->isZSLMode()) {
+ if (NO_ERROR != mHalCamCtrl->sendMappingBuf(
+ MSM_V4L2_EXT_CAPTURE_MODE_THUMBNAIL,
+ i,
+ mDisplayStreamBuf.frame[i].fd,
+ mHalCamCtrl->mNoDispPreviewMemory.size,
+ mCameraId, CAM_SOCK_MSG_TYPE_FD_MAPPING)) {
+ ALOGE("%s: sending mapping data Msg Failed", __func__);
+ }
+ }
+ mDisplayBuf.preview.buf.mp[i].frame = mDisplayStreamBuf.frame[i];
+ mDisplayBuf.preview.buf.mp[i].frame_offset = mDisplayStreamBuf.frame[i].y_off;
+ mDisplayBuf.preview.buf.mp[i].num_planes = num_planes;
+
+ /* Plane 0 needs to be set seperately. Set other planes
+ * in a loop. */
+ mDisplayBuf.preview.buf.mp[i].planes[0].length = planes[0];
+ mDisplayBuf.preview.buf.mp[i].planes[0].m.userptr = mDisplayStreamBuf.frame[i].fd;
+ mDisplayBuf.preview.buf.mp[i].planes[0].data_offset = 0;
+ mDisplayBuf.preview.buf.mp[i].planes[0].reserved[0] =
+ mDisplayBuf.preview.buf.mp[i].frame_offset;
+ for (int j = 1; j < num_planes; j++) {
+ mDisplayBuf.preview.buf.mp[i].planes[j].length = planes[j];
+ mDisplayBuf.preview.buf.mp[i].planes[j].m.userptr =
+ mDisplayStreamBuf.frame[i].fd;
+ mDisplayBuf.preview.buf.mp[i].planes[j].data_offset = 0;
+ mDisplayBuf.preview.buf.mp[i].planes[j].reserved[0] =
+ mDisplayBuf.preview.buf.mp[i].planes[j-1].reserved[0] +
+ mDisplayBuf.preview.buf.mp[i].planes[j-1].length;
+ }
+
+ for (int j = 0; j < num_planes; j++)
+ ALOGV("Planes: %d length: %d userptr: %lu offset: %d\n", j,
+ mDisplayBuf.preview.buf.mp[i].planes[j].length,
+ mDisplayBuf.preview.buf.mp[i].planes[j].m.userptr,
+ mDisplayBuf.preview.buf.mp[i].planes[j].reserved[0]);
+ }/*end of for loop*/
+
+ /* register the streaming buffers for the channel*/
+ mDisplayBuf.ch_type = MM_CAMERA_CH_PREVIEW;
+ mDisplayBuf.preview.num = mDisplayStreamBuf.num;
+ mHalCamCtrl->mPreviewMemoryLock.unlock();
+ ALOGV("%s:END",__func__);
+ return NO_ERROR;
+
+end:
+ if (MM_CAMERA_OK == ret ) {
+ ALOGV("%s: X - NO_ERROR ", __func__);
+ return NO_ERROR;
+ }
+
+ ALOGV("%s: out of memory clean up", __func__);
+ /* release the allocated memory */
+
+ ALOGV("%s: X - BAD_VALUE ", __func__);
+ return BAD_VALUE;
+}
+
+
+void QCameraStream_preview::dumpFrameToFile(struct msm_frame* newFrame)
+{
+#if 0
+ int32_t enabled = 0;
+ int frm_num;
+ uint32_t skip_mode;
+ char value[PROPERTY_VALUE_MAX];
+ char buf[32];
+ int w, h;
+ static int count = 0;
+ cam_ctrl_dimension_t dim;
+ int file_fd;
+ int rc = 0;
+ int len;
+ unsigned long addr;
+ unsigned long * tmp = (unsigned long *)newFrame->buffer;
+ addr = *tmp;
+ status_t ret = cam_config_get_parm(mHalCamCtrl->mCameraId,
+ MM_CAMERA_PARM_DIMENSION, &dim);
+
+ w = dim.display_width;
+ h = dim.display_height;
+ len = (w * h)*3/2;
+ count++;
+ if(count < 100) {
+ snprintf(buf, sizeof(buf), "/data/mzhu%d.yuv", count);
+ file_fd = open(buf, O_RDWR | O_CREAT, 0777);
+
+ rc = write(file_fd, (const void *)addr, len);
+ ALOGV("%s: file='%s', vaddr_old=0x%x, addr_map = 0x%p, len = %d, rc = %d",
+ __func__, buf, (uint32_t)newFrame->buffer, (void *)addr, len, rc);
+ close(file_fd);
+ ALOGV("%s: dump %s, rc = %d, len = %d", __func__, buf, rc, len);
+ }
+#endif
+}
+
+status_t QCameraStream_preview::processPreviewFrameWithDisplay(
+ mm_camera_ch_data_buf_t *frame)
+{
+ ALOGV("%s",__func__);
+ int err = 0;
+ int msgType = 0;
+ int i;
+ camera_memory_t *data = NULL;
+ camera_frame_metadata_t *metadata = NULL;
+
+ Mutex::Autolock lock(mStopCallbackLock);
+ if(!mActive) {
+ ALOGV("Preview Stopped. Returning callback");
+ return NO_ERROR;
+ }
+
+ if(mHalCamCtrl==NULL) {
+ ALOGE("%s: X: HAL control object not set",__func__);
+ /*Call buf done*/
+ return BAD_VALUE;
+ }
+
+ if(mHalCamCtrl->mPauseFramedispatch) {
+ if(MM_CAMERA_OK != cam_evt_buf_done(mCameraId, frame)) {
+ ALOGE("BUF DONE FAILED for the recylce buffer");
+ }
+ return NO_ERROR;
+ }
+ mHalCamCtrl->mCallbackLock.lock();
+ camera_data_timestamp_callback rcb = mHalCamCtrl->mDataCbTimestamp;
+ void *rdata = mHalCamCtrl->mCallbackCookie;
+ mHalCamCtrl->mCallbackLock.unlock();
+ nsecs_t timeStamp = seconds_to_nanoseconds(frame->def.frame->ts.tv_sec) ;
+ timeStamp += frame->def.frame->ts.tv_nsec;
+
+ if(mFirstFrameRcvd == false) {
+ mm_camera_util_profile("HAL: First preview frame received");
+ mFirstFrameRcvd = true;
+ }
+
+ if (UNLIKELY(mHalCamCtrl->mDebugFps)) {
+ mHalCamCtrl->debugShowPreviewFPS();
+ }
+ //dumpFrameToFile(frame->def.frame);
+ mHalCamCtrl->dumpFrameToFile(frame->def.frame, HAL_DUMP_FRM_PREVIEW);
+
+ mHalCamCtrl->mPreviewMemoryLock.lock();
+ mNotifyBuffer[frame->def.idx] = *frame;
+
+ ALOGV("Enqueue buf handle %p\n",
+ mHalCamCtrl->mPreviewMemory.buffer_handle[frame->def.idx]);
+ ALOGV("%s: camera call genlock_unlock", __FUNCTION__);
+
+ if (BUFFER_LOCKED == mHalCamCtrl->mPreviewMemory.local_flag[frame->def.idx]) {
+ ALOGV("%s: genlock_unlock_buffer hdl =%p", __FUNCTION__, (*mHalCamCtrl->mPreviewMemory.buffer_handle[frame->def.idx]));
+ if (GENLOCK_FAILURE == genlock_unlock_buffer((native_handle_t*)
+ (*mHalCamCtrl->mPreviewMemory.buffer_handle[frame->def.idx]))) {
+ ALOGE("%s: genlock_unlock_buffer failed", __FUNCTION__);
+ //mHalCamCtrl->mPreviewMemoryLock.unlock();
+ //return -EINVAL;
+ } else {
+ mHalCamCtrl->mPreviewMemory.local_flag[frame->def.idx] = BUFFER_UNLOCKED;
+ }
+ } else {
+ ALOGE("%s: buffer to be enqueued is not locked", __FUNCTION__);
+ //mHalCamCtrl->mPreviewMemoryLock.unlock();
+ //return -EINVAL;
+ }
+
+#ifdef USE_ION
+ struct ion_flush_data cache_inv_data;
+ int ion_fd;
+ ion_fd = frame->def.frame->ion_dev_fd;
+ cache_inv_data.vaddr = (void *)frame->def.frame->buffer;
+ cache_inv_data.fd = frame->def.frame->fd;
+ cache_inv_data.handle = frame->def.frame->fd_data.handle;
+ cache_inv_data.length = frame->def.frame->ion_alloc.len;
+
+ if (mHalCamCtrl->cache_ops(ion_fd, &cache_inv_data,
+ ION_IOC_CLEAN_INV_CACHES) < 0)
+ ALOGE("%s: Cache clean for Preview buffer %p fd = %d failed", __func__,
+ cache_inv_data.vaddr, cache_inv_data.fd);
+#endif
+
+ if(mHFRFrameSkip == 1)
+ {
+ const char *str = mHalCamCtrl->mParameters.get(
+ QCameraParameters::KEY_VIDEO_HIGH_FRAME_RATE);
+ if(str != NULL){
+ int is_hfr_off = 0;
+ mHFRFrameCnt++;
+ if(!strcmp(str, QCameraParameters::VIDEO_HFR_OFF)) {
+ is_hfr_off = 1;
+ err = this->mPreviewWindow->enqueue_buffer(this->mPreviewWindow,
+ (buffer_handle_t *)mHalCamCtrl->mPreviewMemory.buffer_handle[frame->def.idx]);
+ } else if (!strcmp(str, QCameraParameters::VIDEO_HFR_2X)) {
+ mHFRFrameCnt %= 2;
+ } else if (!strcmp(str, QCameraParameters::VIDEO_HFR_3X)) {
+ mHFRFrameCnt %= 3;
+ } else if (!strcmp(str, QCameraParameters::VIDEO_HFR_4X)) {
+ mHFRFrameCnt %= 4;
+ }
+ if(mHFRFrameCnt == 0)
+ err = this->mPreviewWindow->enqueue_buffer(this->mPreviewWindow,
+ (buffer_handle_t *)mHalCamCtrl->mPreviewMemory.buffer_handle[frame->def.idx]);
+ else if(!is_hfr_off)
+ err = this->mPreviewWindow->cancel_buffer(this->mPreviewWindow,
+ (buffer_handle_t *)mHalCamCtrl->mPreviewMemory.buffer_handle[frame->def.idx]);
+ } else
+ err = this->mPreviewWindow->enqueue_buffer(this->mPreviewWindow,
+ (buffer_handle_t *)mHalCamCtrl->mPreviewMemory.buffer_handle[frame->def.idx]);
+ } else {
+ err = this->mPreviewWindow->enqueue_buffer(this->mPreviewWindow,
+ (buffer_handle_t *)mHalCamCtrl->mPreviewMemory.buffer_handle[frame->def.idx]);
+ }
+ if(err != 0) {
+ ALOGE("%s: enqueue_buffer failed, err = %d", __func__, err);
+ } else {
+ ALOGV("%s: enqueue_buffer hdl=%p", __func__, *mHalCamCtrl->mPreviewMemory.buffer_handle[frame->def.idx]);
+ mHalCamCtrl->mPreviewMemory.local_flag[frame->def.idx] = BUFFER_NOT_OWNED;
+ }
+ buffer_handle_t *buffer_handle = NULL;
+ int tmp_stride = 0;
+ err = this->mPreviewWindow->dequeue_buffer(this->mPreviewWindow,
+ &buffer_handle, &tmp_stride);
+ if (err == NO_ERROR && buffer_handle != NULL) {
+
+ ALOGV("%s: dequed buf hdl =%p", __func__, *buffer_handle);
+ for(i = 0; i < mHalCamCtrl->mPreviewMemory.buffer_count; i++) {
+ if(mHalCamCtrl->mPreviewMemory.buffer_handle[i] == buffer_handle) {
+ mHalCamCtrl->mPreviewMemory.local_flag[i] = BUFFER_UNLOCKED;
+ break;
+ }
+ }
+ if (i < mHalCamCtrl->mPreviewMemory.buffer_count ) {
+ err = this->mPreviewWindow->lock_buffer(this->mPreviewWindow, buffer_handle);
+ ALOGV("%s: camera call genlock_lock: hdl =%p", __FUNCTION__, *buffer_handle);
+ if (GENLOCK_FAILURE == genlock_lock_buffer((native_handle_t*)(*buffer_handle), GENLOCK_WRITE_LOCK,
+ GENLOCK_MAX_TIMEOUT)) {
+ ALOGE("%s: genlock_lock_buffer(WRITE) failed", __FUNCTION__);
+ //mHalCamCtrl->mPreviewMemoryLock.unlock();
+ // return -EINVAL;
+ } else {
+ mHalCamCtrl->mPreviewMemory.local_flag[i] = BUFFER_LOCKED;
+
+ if(MM_CAMERA_OK != cam_evt_buf_done(mCameraId, &mNotifyBuffer[i])) {
+ ALOGE("BUF DONE FAILED");
+ }
+ }
+ }
+ } else
+ ALOGV("%s: error in dequeue_buffer, enqueue_buffer idx = %d, no free buffer now", __func__, frame->def.idx);
+
+ /* Save the last displayed frame. We'll be using it to fill the gap between
+ when preview stops and postview start during snapshot.*/
+ mLastQueuedFrame = &(mDisplayStreamBuf.frame[frame->def.idx]);
+ mHalCamCtrl->mPreviewMemoryLock.unlock();
+
+ mHalCamCtrl->mCallbackLock.lock();
+ camera_data_callback pcb = mHalCamCtrl->mDataCb;
+ mHalCamCtrl->mCallbackLock.unlock();
+ ALOGV("Message enabled = 0x%x", mHalCamCtrl->mMsgEnabled);
+
+ camera_memory_t *previewMem = NULL;
+
+ if (pcb != NULL) {
+ ALOGV("%s: mMsgEnabled =0x%x, preview format =%d", __func__,
+ mHalCamCtrl->mMsgEnabled, mHalCamCtrl->mPreviewFormat);
+ //Sending preview callback if corresponding Msgs are enabled
+ if(mHalCamCtrl->mMsgEnabled & CAMERA_MSG_PREVIEW_FRAME) {
+ ALOGV("%s: PCB callback enabled", __func__);
+ msgType |= CAMERA_MSG_PREVIEW_FRAME;
+ int previewBufSize;
+ /* The preview buffer size sent back in the callback should be (width*height*bytes_per_pixel)
+ * As all preview formats we support, use 12 bits per pixel, buffer size = previewWidth * previewHeight * 3/2.
+ * We need to put a check if some other formats are supported in future. (punits) */
+ if ((mHalCamCtrl->mPreviewFormat == CAMERA_YUV_420_NV21) || (mHalCamCtrl->mPreviewFormat == CAMERA_YUV_420_NV12) ||
+ (mHalCamCtrl->mPreviewFormat == CAMERA_YUV_420_YV12))
+ {
+ if (mHalCamCtrl->mPreviewFormat == CAMERA_YUV_420_YV12) {
+ previewBufSize = ((mHalCamCtrl->mPreviewWidth+15)/16) *16* mHalCamCtrl->mPreviewHeight +
+ ((mHalCamCtrl->mPreviewWidth/2+15)/16)*16* mHalCamCtrl->mPreviewHeight;
+ } else {
+ previewBufSize = mHalCamCtrl->mPreviewWidth * mHalCamCtrl->mPreviewHeight * 3/2;
+ }
+ if(previewBufSize != mHalCamCtrl->mPreviewMemory.private_buffer_handle[frame->def.idx]->size) {
+ previewMem = mHalCamCtrl->mGetMemory(mHalCamCtrl->mPreviewMemory.private_buffer_handle[frame->def.idx]->fd,
+ previewBufSize, 1, mHalCamCtrl->mCallbackCookie);
+ if (!previewMem || !previewMem->data) {
+ ALOGE("%s: mGetMemory failed.\n", __func__);
+ } else {
+ data = previewMem;
+ }
+ } else
+ data = mHalCamCtrl->mPreviewMemory.camera_memory[frame->def.idx];
+ } else {
+ data = mHalCamCtrl->mPreviewMemory.camera_memory[frame->def.idx];
+ ALOGE("Invalid preview format, buffer size in preview callback may be wrong.");
+ }
+ } else {
+ data = NULL;
+ }
+ if(msgType) {
+ mStopCallbackLock.unlock();
+ if(mActive)
+ pcb(msgType, data, 0, metadata, mHalCamCtrl->mCallbackCookie);
+ if (previewMem)
+ previewMem->release(previewMem);
+ mStopCallbackLock.lock();
+ }
+ ALOGV("end of cb");
+ } else {
+ ALOGV("%s PCB is not enabled", __func__);
+ }
+ if(rcb != NULL && mVFEOutputs == 1)
+ {
+ int flagwait = 1;
+ if(mHalCamCtrl->mStartRecording == true &&
+ ( mHalCamCtrl->mMsgEnabled & CAMERA_MSG_VIDEO_FRAME))
+ {
+ if (mHalCamCtrl->mStoreMetaDataInFrame)
+ {
+ if(mHalCamCtrl->mRecordingMemory.metadata_memory[frame->def.idx])
+ {
+ flagwait = 1;
+ mStopCallbackLock.unlock();
+ rcb(timeStamp, CAMERA_MSG_VIDEO_FRAME,
+ mHalCamCtrl->mRecordingMemory.metadata_memory[frame->def.idx],
+ 0, mHalCamCtrl->mCallbackCookie);
+ mStopCallbackLock.lock();
+ }else
+ flagwait = 0;
+ }
+ else
+ {
+ mStopCallbackLock.unlock();
+ rcb(timeStamp, CAMERA_MSG_VIDEO_FRAME,
+ mHalCamCtrl->mPreviewMemory.camera_memory[frame->def.idx],
+ 0, mHalCamCtrl->mCallbackCookie);
+ mStopCallbackLock.lock();
+ }
+
+ if(flagwait){
+ Mutex::Autolock rLock(&mHalCamCtrl->mRecordFrameLock);
+ if (mHalCamCtrl->mReleasedRecordingFrame != true) {
+ mHalCamCtrl->mRecordWait.wait(mHalCamCtrl->mRecordFrameLock);
+ }
+ mHalCamCtrl->mReleasedRecordingFrame = false;
+ }
+ }
+ }
+ /* Save the last displayed frame. We'll be using it to fill the gap between
+ when preview stops and postview start during snapshot.*/
+ //mLastQueuedFrame = frame->def.frame;
+/*
+ if(MM_CAMERA_OK != cam_evt_buf_done(mCameraId, frame))
+ {
+ ALOGE("BUF DONE FAILED");
+ return BAD_VALUE;
+ }
+*/
+ return NO_ERROR;
+}
+
+
+status_t QCameraStream_preview::processPreviewFrameWithOutDisplay(
+ mm_camera_ch_data_buf_t *frame)
+{
+ ALOGV("%s",__func__);
+ int err = 0;
+ int msgType = 0;
+ int i;
+ camera_memory_t *data = NULL;
+ camera_frame_metadata_t *metadata = NULL;
+
+ Mutex::Autolock lock(mStopCallbackLock);
+ if(!mActive) {
+ ALOGV("Preview Stopped. Returning callback");
+ return NO_ERROR;
+ }
+ if(mHalCamCtrl==NULL) {
+ ALOGE("%s: X: HAL control object not set",__func__);
+ /*Call buf done*/
+ return BAD_VALUE;
+ }
+
+ if (UNLIKELY(mHalCamCtrl->mDebugFps)) {
+ mHalCamCtrl->debugShowPreviewFPS();
+ }
+ //dumpFrameToFile(frame->def.frame);
+ mHalCamCtrl->dumpFrameToFile(frame->def.frame, HAL_DUMP_FRM_PREVIEW);
+
+ mHalCamCtrl->mPreviewMemoryLock.lock();
+ mNotifyBuffer[frame->def.idx] = *frame;
+
+ /* Save the last displayed frame. We'll be using it to fill the gap between
+ when preview stops and postview start during snapshot.*/
+ mLastQueuedFrame = &(mDisplayStreamBuf.frame[frame->def.idx]);
+ mHalCamCtrl->mPreviewMemoryLock.unlock();
+
+ mHalCamCtrl->mCallbackLock.lock();
+ camera_data_callback pcb = mHalCamCtrl->mDataCb;
+ mHalCamCtrl->mCallbackLock.unlock();
+ ALOGV("Message enabled = 0x%x", mHalCamCtrl->mMsgEnabled);
+
+ camera_memory_t *previewMem = NULL;
+ int previewWidth, previewHeight;
+ mHalCamCtrl->mParameters.getPreviewSize(&previewWidth, &previewHeight);
+
+#ifdef USE_ION
+ struct ion_flush_data cache_inv_data;
+ int ion_fd;
+ ion_fd = frame->def.frame->ion_dev_fd;
+ cache_inv_data.vaddr = (void *)frame->def.frame->buffer;
+ cache_inv_data.fd = frame->def.frame->fd;
+ cache_inv_data.handle = frame->def.frame->fd_data.handle;
+ cache_inv_data.length = frame->def.frame->ion_alloc.len;
+
+ if (mHalCamCtrl->cache_ops(ion_fd, &cache_inv_data,
+ ION_IOC_CLEAN_INV_CACHES) < 0)
+ ALOGE("%s: Cache clean for Preview buffer %p fd = %d failed", __func__,
+ cache_inv_data.vaddr, cache_inv_data.fd);
+#endif
+
+ if (pcb != NULL) {
+ //Sending preview callback if corresponding Msgs are enabled
+ if(mHalCamCtrl->mMsgEnabled & CAMERA_MSG_PREVIEW_FRAME) {
+ msgType |= CAMERA_MSG_PREVIEW_FRAME;
+ int previewBufSize;
+ /* For CTS : Forcing preview memory buffer lenth to be
+ 'previewWidth * previewHeight * 3/2'.
+ Needed when gralloc allocated extra memory.*/
+ //Can add this check for other formats as well.
+ if( mHalCamCtrl->mPreviewFormat == CAMERA_YUV_420_NV21) {
+ previewBufSize = previewWidth * previewHeight * 3/2;
+ if(previewBufSize != mHalCamCtrl->mPreviewMemory.private_buffer_handle[frame->def.idx]->size) {
+ previewMem = mHalCamCtrl->mGetMemory(mHalCamCtrl->mPreviewMemory.private_buffer_handle[frame->def.idx]->fd,
+ previewBufSize, 1, mHalCamCtrl->mCallbackCookie);
+ if (!previewMem || !previewMem->data) {
+ ALOGE("%s: mGetMemory failed.\n", __func__);
+ } else {
+ data = previewMem;
+ }
+ } else
+ data = mHalCamCtrl->mPreviewMemory.camera_memory[frame->def.idx];//mPreviewHeap->mBuffers[frame->def.idx];
+ } else
+ data = mHalCamCtrl->mPreviewMemory.camera_memory[frame->def.idx];//mPreviewHeap->mBuffers[frame->def.idx];
+ } else {
+ data = NULL;
+ }
+
+ if(mHalCamCtrl->mMsgEnabled & CAMERA_MSG_PREVIEW_METADATA){
+ msgType |= CAMERA_MSG_PREVIEW_METADATA;
+ metadata = &mHalCamCtrl->mMetadata;
+ } else {
+ metadata = NULL;
+ }
+ if(msgType) {
+ mStopCallbackLock.unlock();
+ if(mActive)
+ pcb(msgType, data, 0, metadata, mHalCamCtrl->mCallbackCookie);
+ if (previewMem)
+ previewMem->release(previewMem);
+ mStopCallbackLock.lock();
+ }
+
+ if(MM_CAMERA_OK != cam_evt_buf_done(mCameraId, &mNotifyBuffer[frame->def.idx])) {
+ ALOGE("BUF DONE FAILED");
+ }
+
+ ALOGV("end of cb");
+ }
+
+ return NO_ERROR;
+}
+
+status_t QCameraStream_preview::processPreviewFrame (
+ mm_camera_ch_data_buf_t *frame)
+{
+ if (mHalCamCtrl->isNoDisplayMode()) {
+ return processPreviewFrameWithOutDisplay(frame);
+ } else {
+ return processPreviewFrameWithDisplay(frame);
+ }
+}
+
+// ---------------------------------------------------------------------------
+// QCameraStream_preview
+// ---------------------------------------------------------------------------
+
+QCameraStream_preview::
+QCameraStream_preview(int cameraId, camera_mode_t mode)
+ : QCameraStream(cameraId,mode),
+ mLastQueuedFrame(NULL),
+ mNumFDRcvd(0),
+ mFirstFrameRcvd(false)
+ {
+ mHalCamCtrl = NULL;
+ ALOGV("%s: E", __func__);
+ ALOGV("%s: X", __func__);
+ }
+// ---------------------------------------------------------------------------
+// QCameraStream_preview
+// ---------------------------------------------------------------------------
+
+QCameraStream_preview::~QCameraStream_preview() {
+ ALOGV("%s: E", __func__);
+ if(mActive) {
+ stop();
+ }
+ if(mInit) {
+ release();
+ }
+ mInit = false;
+ mActive = false;
+ ALOGV("%s: X", __func__);
+
+}
+// ---------------------------------------------------------------------------
+// QCameraStream_preview
+// ---------------------------------------------------------------------------
+
+status_t QCameraStream_preview::init() {
+
+ status_t ret = NO_ERROR;
+ ALOGV("%s: E", __func__);
+
+ ret = QCameraStream::initChannel (mCameraId, MM_CAMERA_CH_PREVIEW_MASK);
+ if (NO_ERROR!=ret) {
+ ALOGE("%s E: can't init native cammera preview ch\n",__func__);
+ return ret;
+ }
+
+ /* register a notify into the mmmm_camera_t object*/
+ (void) cam_evt_register_buf_notify(mCameraId, MM_CAMERA_CH_PREVIEW,
+ preview_notify_cb,
+ MM_CAMERA_REG_BUF_CB_INFINITE,
+ 0,this);
+ ALOGV("Debug : %s : cam_evt_register_buf_notify",__func__);
+ buffer_handle_t *buffer_handle = NULL;
+ int tmp_stride = 0;
+ mInit = true;
+ return ret;
+}
+// ---------------------------------------------------------------------------
+// QCameraStream_preview
+// ---------------------------------------------------------------------------
+
+status_t QCameraStream_preview::start()
+{
+ ALOGV("%s: E", __func__);
+ status_t ret = NO_ERROR;
+ cam_format_t previewFmt;
+ Mutex::Autolock lock(mStopCallbackLock);
+
+ /* call start() in parent class to start the monitor thread*/
+ //QCameraStream::start ();
+ previewFmt = mHalCamCtrl->getPreviewFormat();
+ setFormat(MM_CAMERA_CH_PREVIEW_MASK, previewFmt);
+
+ if (mHalCamCtrl->isNoDisplayMode()) {
+ if(NO_ERROR!=initPreviewOnlyBuffers()){
+ return BAD_VALUE;
+ }
+ } else {
+ if(NO_ERROR!=initDisplayBuffers()){
+ return BAD_VALUE;
+ }
+ }
+ ALOGV("Debug : %s : initDisplayBuffers",__func__);
+
+ ret = cam_config_prepare_buf(mCameraId, &mDisplayBuf);
+ ALOGV("Debug : %s : cam_config_prepare_buf",__func__);
+ if(ret != MM_CAMERA_OK) {
+ ALOGV("%s:reg preview buf err=%d\n", __func__, ret);
+ ret = BAD_VALUE;
+ goto error;
+ }else {
+ ret = NO_ERROR;
+ }
+
+ /* For preview, the OP_MODE we set is dependent upon whether we are
+ starting camera or camcorder. For snapshot, anyway we disable preview.
+ However, for ZSL we need to set OP_MODE to OP_MODE_ZSL and not
+ OP_MODE_VIDEO. We'll set that for now in CamCtrl. So in case of
+ ZSL we skip setting Mode here */
+
+ if (!(myMode & CAMERA_ZSL_MODE)) {
+ ALOGV("Setting OP MODE to MM_CAMERA_OP_MODE_VIDEO");
+ mm_camera_op_mode_type_t op_mode=MM_CAMERA_OP_MODE_VIDEO;
+ ret = cam_config_set_parm (mCameraId, MM_CAMERA_PARM_OP_MODE,
+ &op_mode);
+ ALOGV("OP Mode Set");
+
+ if(MM_CAMERA_OK != ret) {
+ ALOGE("%s: X :set mode MM_CAMERA_OP_MODE_VIDEO err=%d\n", __func__, ret);
+ ret = BAD_VALUE;
+ goto error;
+ }
+ }else {
+ ALOGV("Setting OP MODE to MM_CAMERA_OP_MODE_ZSL");
+ mm_camera_op_mode_type_t op_mode=MM_CAMERA_OP_MODE_ZSL;
+ ret = cam_config_set_parm (mCameraId, MM_CAMERA_PARM_OP_MODE,
+ &op_mode);
+ if(MM_CAMERA_OK != ret) {
+ ALOGE("%s: X :set mode MM_CAMERA_OP_MODE_ZSL err=%d\n", __func__, ret);
+ ret = BAD_VALUE;
+ goto error;
+ }
+ }
+
+ /* call mm_camera action start(...) */
+ ALOGV("Starting Preview/Video Stream. ");
+ mFirstFrameRcvd = false;
+ ret = cam_ops_action(mCameraId, true, MM_CAMERA_OPS_PREVIEW, 0);
+
+ if (MM_CAMERA_OK != ret) {
+ ALOGE ("%s: preview streaming start err=%d\n", __func__, ret);
+ ret = BAD_VALUE;
+ goto error;
+ }
+
+ ALOGV("Debug : %s : Preview streaming Started",__func__);
+ ret = NO_ERROR;
+
+ mActive = true;
+ goto end;
+
+error:
+ putBufferToSurface();
+end:
+ ALOGV("%s: X", __func__);
+ return ret;
+ }
+
+
+// ---------------------------------------------------------------------------
+// QCameraStream_preview
+// ---------------------------------------------------------------------------
+ void QCameraStream_preview::stop() {
+ ALOGV("%s: E", __func__);
+ int ret=MM_CAMERA_OK;
+
+ if(!mActive) {
+ return;
+ }
+ Mutex::Autolock lock(mStopCallbackLock);
+ mActive = false;
+ /* unregister the notify fn from the mmmm_camera_t object*/
+
+ ALOGV("%s: Stop the thread \n", __func__);
+ /* call stop() in parent class to stop the monitor thread*/
+ ret = cam_ops_action(mCameraId, false, MM_CAMERA_OPS_PREVIEW, 0);
+ if(MM_CAMERA_OK != ret) {
+ ALOGE ("%s: camera preview stop err=%d\n", __func__, ret);
+ }
+ ret = cam_config_unprepare_buf(mCameraId, MM_CAMERA_CH_PREVIEW);
+ if(ret != MM_CAMERA_OK) {
+ ALOGE("%s:Unreg preview buf err=%d\n", __func__, ret);
+ //ret = BAD_VALUE;
+ }
+
+ /* In case of a clean stop, we need to clean all buffers*/
+ ALOGV("Debug : %s : Buffer Unprepared",__func__);
+ /*free camera_memory handles and return buffer back to surface*/
+ if (! mHalCamCtrl->isNoDisplayMode() ) {
+ putBufferToSurface();
+ } else {
+ freeBufferNoDisplay( );
+ }
+
+ ALOGV("%s: X", __func__);
+
+ }
+// ---------------------------------------------------------------------------
+// QCameraStream_preview
+// ---------------------------------------------------------------------------
+ void QCameraStream_preview::release() {
+
+ ALOGV("%s : BEGIN",__func__);
+ int ret=MM_CAMERA_OK,i;
+
+ if(!mInit)
+ {
+ ALOGE("%s : Stream not Initalized",__func__);
+ return;
+ }
+
+ if(mActive) {
+ this->stop();
+ }
+
+ ret= QCameraStream::deinitChannel(mCameraId, MM_CAMERA_CH_PREVIEW);
+ ALOGV("Debug : %s : De init Channel",__func__);
+ if(ret != MM_CAMERA_OK) {
+ ALOGE("%s:Deinit preview channel failed=%d\n", __func__, ret);
+ //ret = BAD_VALUE;
+ }
+
+ (void)cam_evt_register_buf_notify(mCameraId, MM_CAMERA_CH_PREVIEW,
+ NULL,
+ (mm_camera_register_buf_cb_type_t)NULL,
+ NULL,
+ NULL);
+ mInit = false;
+ ALOGV("%s: END", __func__);
+
+ }
+
+QCameraStream*
+QCameraStream_preview::createInstance(int cameraId,
+ camera_mode_t mode)
+{
+ QCameraStream* pme = new QCameraStream_preview(cameraId, mode);
+ return pme;
+}
+// ---------------------------------------------------------------------------
+// QCameraStream_preview
+// ---------------------------------------------------------------------------
+
+void QCameraStream_preview::deleteInstance(QCameraStream *p)
+{
+ if (p){
+ ALOGV("%s: BEGIN", __func__);
+ p->release();
+ delete p;
+ p = NULL;
+ ALOGV("%s: END", __func__);
+ }
+}
+
+
+/* Temp helper function */
+void *QCameraStream_preview::getLastQueuedFrame(void)
+{
+ return mLastQueuedFrame;
+}
+
+// ---------------------------------------------------------------------------
+// No code beyone this line
+// ---------------------------------------------------------------------------
+}; // namespace android
diff --git a/camera/QCameraHWI_Preview_7x27A.cpp b/camera/QCameraHWI_Preview_7x27A.cpp
new file mode 100644
index 0000000..56ec895
--- /dev/null
+++ b/camera/QCameraHWI_Preview_7x27A.cpp
@@ -0,0 +1,900 @@
+/*
+** Copyright (c) 2012 The Linux Foundation. All rights reserved.
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+** http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+/*#error uncomment this for compiler test!*/
+
+#define ALOG_NDEBUG 0
+#define ALOG_NIDEBUG 0
+#define LOG_TAG "QCameraHWI_Preview"
+#include <utils/Log.h>
+#include <utils/threads.h>
+#include <fcntl.h>
+#include <sys/mman.h>
+
+#include "QCameraHAL.h"
+#include "QCameraHWI.h"
+#include <gralloc_priv.h>
+#include <genlock.h>
+
+#define UNLIKELY(exp) __builtin_expect(!!(exp), 0)
+
+/* QCameraHWI_Preview class implementation goes here*/
+/* following code implement the preview mode's image capture & display logic of this class*/
+
+namespace android {
+
+// ---------------------------------------------------------------------------
+// Preview Callback
+// ---------------------------------------------------------------------------
+static void preview_notify_cb(mm_camera_ch_data_buf_t *frame,
+ void *user_data)
+{
+ QCameraStream_preview *pme = (QCameraStream_preview *)user_data;
+ mm_camera_ch_data_buf_t *bufs_used = 0;
+ ALOGV("%s: E", __func__);
+ /* for peview data, there is no queue, so directly use*/
+ if(pme==NULL) {
+ ALOGE("%s: X : Incorrect cookie",__func__);
+ /*Call buf done*/
+ return;
+ }
+
+ pme->processPreviewFrame(frame);
+ ALOGV("%s: X", __func__);
+}
+
+status_t QCameraStream_preview::setPreviewWindow(preview_stream_ops_t* window)
+{
+ status_t retVal = NO_ERROR;
+ ALOGE(" %s: E ", __FUNCTION__);
+ if( window == NULL) {
+ ALOGW(" Setting NULL preview window ");
+ /* TODO: Current preview window will be invalidated.
+ * Release all the buffers back */
+ // relinquishBuffers();
+ }
+ mDisplayLock.lock();
+ mPreviewWindow = window;
+ mDisplayLock.unlock();
+ ALOGV(" %s : X ", __FUNCTION__ );
+ return retVal;
+}
+
+status_t QCameraStream_preview::getBufferFromSurface() {
+ int err = 0;
+ int numMinUndequeuedBufs = 0;
+ int format = 0;
+ status_t ret = NO_ERROR;
+
+ ALOGI(" %s : E ", __FUNCTION__);
+
+ if( mPreviewWindow == NULL) {
+ ALOGE("%s: mPreviewWindow = NULL", __func__);
+ return INVALID_OPERATION;
+ }
+ cam_ctrl_dimension_t dim;
+
+ //mDisplayLock.lock();
+ cam_config_get_parm(mCameraId, MM_CAMERA_PARM_DIMENSION,&dim);
+
+ format = mHalCamCtrl->getPreviewFormatInfo().Hal_format;
+ if(ret != NO_ERROR) {
+ ALOGE("%s: display format %d is not supported", __func__, dim.prev_format);
+ goto end;
+ }
+ numMinUndequeuedBufs = 0;
+ if(mPreviewWindow->get_min_undequeued_buffer_count) {
+ err = mPreviewWindow->get_min_undequeued_buffer_count(mPreviewWindow, &numMinUndequeuedBufs);
+ if (err != 0) {
+ ALOGE("get_min_undequeued_buffer_count failed: %s (%d)",
+ strerror(-err), -err);
+ ret = UNKNOWN_ERROR;
+ goto end;
+ }
+ }
+ mHalCamCtrl->mPreviewMemoryLock.lock();
+ mHalCamCtrl->mPreviewMemory.buffer_count = kPreviewBufferCount + numMinUndequeuedBufs;;
+ err = mPreviewWindow->set_buffer_count(mPreviewWindow, mHalCamCtrl->mPreviewMemory.buffer_count );
+ if (err != 0) {
+ ALOGE("set_buffer_count failed: %s (%d)",
+ strerror(-err), -err);
+ ret = UNKNOWN_ERROR;
+ goto end;
+ }
+ err = mPreviewWindow->set_buffers_geometry(mPreviewWindow,
+ dim.display_width, dim.display_height, format);
+ if (err != 0) {
+ ALOGE("set_buffers_geometry failed: %s (%d)",
+ strerror(-err), -err);
+ ret = UNKNOWN_ERROR;
+ goto end;
+ }
+ err = mPreviewWindow->set_usage(mPreviewWindow,
+ GRALLOC_USAGE_PRIVATE_ADSP_HEAP |
+ GRALLOC_USAGE_PRIVATE_UNCACHED);
+ if(err != 0) {
+ /* set_usage error out */
+ ALOGE("%s: set_usage rc = %d", __func__, err);
+ ret = UNKNOWN_ERROR;
+ goto end;
+ }
+ for (int cnt = 0; cnt < mHalCamCtrl->mPreviewMemory.buffer_count; cnt++) {
+ int stride;
+ err = mPreviewWindow->dequeue_buffer(mPreviewWindow,
+ &mHalCamCtrl->mPreviewMemory.buffer_handle[cnt],
+ &mHalCamCtrl->mPreviewMemory.stride[cnt]);
+ if(!err) {
+ err = mPreviewWindow->lock_buffer(this->mPreviewWindow,
+ mHalCamCtrl->mPreviewMemory.buffer_handle[cnt]);
+
+ // lock the buffer using genlock
+ ALOGD("%s: camera call genlock_lock", __FUNCTION__);
+ if (GENLOCK_NO_ERROR != genlock_lock_buffer((native_handle_t *)(*mHalCamCtrl->mPreviewMemory.buffer_handle[cnt]),
+ GENLOCK_WRITE_LOCK, GENLOCK_MAX_TIMEOUT)) {
+ ALOGE("%s: genlock_lock_buffer(WRITE) failed", __FUNCTION__);
+ mHalCamCtrl->mPreviewMemory.local_flag[cnt] = BUFFER_UNLOCKED;
+ mHalCamCtrl->mPreviewMemoryLock.unlock();
+ return -EINVAL;
+ }
+ mHalCamCtrl->mPreviewMemory.local_flag[cnt] = BUFFER_LOCKED;
+ } else
+ ALOGE("%s: dequeue_buffer idx = %d err = %d", __func__, cnt, err);
+
+ ALOGE("%s: dequeue buf: %u\n", __func__, (unsigned int)mHalCamCtrl->mPreviewMemory.buffer_handle[cnt]);
+
+ if(err != 0) {
+ ALOGE("%s: dequeue_buffer failed: %s (%d)", __func__,
+ strerror(-err), -err);
+ ret = UNKNOWN_ERROR;
+ for(int i = 0; i < cnt; i++) {
+ ALOGD("%s: camera call genlock_unlock", __FUNCTION__);
+ if (BUFFER_LOCKED == mHalCamCtrl->mPreviewMemory.local_flag[i]) {
+ if (GENLOCK_FAILURE == genlock_unlock_buffer((native_handle_t *)
+ (*(mHalCamCtrl->mPreviewMemory.buffer_handle[i])))) {
+ ALOGE("%s: genlock_unlock_buffer failed", __FUNCTION__);
+ mHalCamCtrl->mPreviewMemoryLock.unlock();
+ return -EINVAL;
+ }
+ }
+ err = mPreviewWindow->cancel_buffer(mPreviewWindow,
+ mHalCamCtrl->mPreviewMemory.buffer_handle[i]);
+ mHalCamCtrl->mPreviewMemory.buffer_handle[i] = NULL;
+ mHalCamCtrl->mPreviewMemory.local_flag[i] = BUFFER_UNLOCKED;
+ }
+ goto end;
+ }
+ mHalCamCtrl->mPreviewMemory.private_buffer_handle[cnt] =
+ (struct private_handle_t *)(*mHalCamCtrl->mPreviewMemory.buffer_handle[cnt]);
+ mHalCamCtrl->mPreviewMemory.camera_memory[cnt] =
+ mHalCamCtrl->mGetMemory(mHalCamCtrl->mPreviewMemory.private_buffer_handle[cnt]->fd,
+ mHalCamCtrl->mPreviewMemory.private_buffer_handle[cnt]->size, 1, (void *)this);
+ ALOGE("%s: idx = %d, fd = %d, size = %d, offset = %d", __func__,
+ cnt, mHalCamCtrl->mPreviewMemory.private_buffer_handle[cnt]->fd,
+ mHalCamCtrl->mPreviewMemory.private_buffer_handle[cnt]->size,
+ mHalCamCtrl->mPreviewMemory.private_buffer_handle[cnt]->offset);
+ }
+
+
+ memset(&mHalCamCtrl->mMetadata, 0, sizeof(mHalCamCtrl->mMetadata));
+ memset(mHalCamCtrl->mFace, 0, sizeof(mHalCamCtrl->mFace));
+
+ ALOGI(" %s : X ",__FUNCTION__);
+end:
+ //mDisplayLock.unlock();
+ mHalCamCtrl->mPreviewMemoryLock.unlock();
+
+ return NO_ERROR;
+}
+
+status_t QCameraStream_preview::putBufferToSurface() {
+ int err = 0;
+ status_t ret = NO_ERROR;
+
+ ALOGI(" %s : E ", __FUNCTION__);
+
+ //mDisplayLock.lock();
+ mHalCamCtrl->mPreviewMemoryLock.lock();
+ for (int cnt = 0; cnt < mHalCamCtrl->mPreviewMemory.buffer_count; cnt++) {
+ mHalCamCtrl->mPreviewMemory.camera_memory[cnt]->release(mHalCamCtrl->mPreviewMemory.camera_memory[cnt]);
+ if (BUFFER_LOCKED == mHalCamCtrl->mPreviewMemory.local_flag[cnt]) {
+ ALOGD("%s: camera call genlock_unlock", __FUNCTION__);
+ if (GENLOCK_FAILURE == genlock_unlock_buffer((native_handle_t *)
+ (*(mHalCamCtrl->mPreviewMemory.buffer_handle[cnt])))) {
+ ALOGE("%s: genlock_unlock_buffer failed", __FUNCTION__);
+ mHalCamCtrl->mPreviewMemoryLock.unlock();
+ return -EINVAL;
+ } else {
+ mHalCamCtrl->mPreviewMemory.local_flag[cnt] = BUFFER_UNLOCKED;
+ }
+ }
+ err = mPreviewWindow->cancel_buffer(mPreviewWindow, mHalCamCtrl->mPreviewMemory.buffer_handle[cnt]);
+ ALOGE(" put buffer %d successfully", cnt);
+ }
+ memset(&mHalCamCtrl->mPreviewMemory, 0, sizeof(mHalCamCtrl->mPreviewMemory));
+ mHalCamCtrl->mPreviewMemoryLock.unlock();
+ //mDisplayLock.unlock();
+ ALOGI(" %s : X ",__FUNCTION__);
+ return NO_ERROR;
+}
+
+void QCameraStream_preview::notifyROIEvent(fd_roi_t roi)
+{
+ switch (roi.type) {
+ case FD_ROI_TYPE_HEADER:
+ {
+ mDisplayLock.lock();
+ mNumFDRcvd = 0;
+ memset(mHalCamCtrl->mFace, 0, sizeof(mHalCamCtrl->mFace));
+ mHalCamCtrl->mMetadata.faces = mHalCamCtrl->mFace;
+ mHalCamCtrl->mMetadata.number_of_faces = roi.d.hdr.num_face_detected;
+ if(mHalCamCtrl->mMetadata.number_of_faces > MAX_ROI)
+ mHalCamCtrl->mMetadata.number_of_faces = MAX_ROI;
+ mDisplayLock.unlock();
+
+ if (mHalCamCtrl->mMetadata.number_of_faces == 0) {
+ // Clear previous faces
+ mHalCamCtrl->mCallbackLock.lock();
+ camera_data_callback pcb = mHalCamCtrl->mDataCb;
+ mHalCamCtrl->mCallbackLock.unlock();
+
+ if (pcb && (mHalCamCtrl->mMsgEnabled & CAMERA_MSG_PREVIEW_METADATA)){
+ ALOGE("%s: Face detection RIO callback", __func__);
+ pcb(CAMERA_MSG_PREVIEW_METADATA, NULL, 0, &mHalCamCtrl->mMetadata, mHalCamCtrl->mCallbackCookie);
+ }
+ }
+ }
+ break;
+ case FD_ROI_TYPE_DATA:
+ {
+ mDisplayLock.lock();
+ int idx = roi.d.data.idx;
+ if (idx >= mHalCamCtrl->mMetadata.number_of_faces) {
+ mDisplayLock.unlock();
+ ALOGE("%s: idx %d out of boundary %d", __func__, idx, mHalCamCtrl->mMetadata.number_of_faces);
+ break;
+ }
+
+ mHalCamCtrl->mFace[idx].id = roi.d.data.face.id;
+ mHalCamCtrl->mFace[idx].score = roi.d.data.face.score / 10; // keep within range 0~100
+
+ // top
+ mHalCamCtrl->mFace[idx].rect[0] =
+ roi.d.data.face.face_boundary.x*2000/mHalCamCtrl->mDimension.display_width - 1000;
+ //right
+ mHalCamCtrl->mFace[idx].rect[1] =
+ roi.d.data.face.face_boundary.y*2000/mHalCamCtrl->mDimension.display_height - 1000;
+ //bottom
+ mHalCamCtrl->mFace[idx].rect[2] = mHalCamCtrl->mFace[idx].rect[0] +
+ roi.d.data.face.face_boundary.dx*2000/mHalCamCtrl->mDimension.display_width;
+ //left
+ mHalCamCtrl->mFace[idx].rect[3] = mHalCamCtrl->mFace[idx].rect[1] +
+ roi.d.data.face.face_boundary.dy*2000/mHalCamCtrl->mDimension.display_height;
+
+ // Center of left eye
+ mHalCamCtrl->mFace[idx].left_eye[0] =
+ roi.d.data.face.left_eye_center[0]*2000/mHalCamCtrl->mDimension.display_width - 1000;
+ mHalCamCtrl->mFace[idx].left_eye[1] =
+ roi.d.data.face.left_eye_center[1]*2000/mHalCamCtrl->mDimension.display_height - 1000;
+
+ // Center of right eye
+ mHalCamCtrl->mFace[idx].right_eye[0] =
+ roi.d.data.face.right_eye_center[0]*2000/mHalCamCtrl->mDimension.display_width - 1000;
+ mHalCamCtrl->mFace[idx].right_eye[1] =
+ roi.d.data.face.right_eye_center[1]*2000/mHalCamCtrl->mDimension.display_height - 1000;
+
+ // Center of mouth
+ mHalCamCtrl->mFace[idx].mouth[0] =
+ roi.d.data.face.mouth_center[0]*2000/mHalCamCtrl->mDimension.display_width - 1000;
+ mHalCamCtrl->mFace[idx].mouth[1] =
+ roi.d.data.face.mouth_center[1]*2000/mHalCamCtrl->mDimension.display_height - 1000;
+
+ mHalCamCtrl->mFace[idx].smile_degree = roi.d.data.face.smile_degree;
+ mHalCamCtrl->mFace[idx].smile_score = roi.d.data.face.smile_confidence;
+ mHalCamCtrl->mFace[idx].blink_detected = roi.d.data.face.blink_detected;
+ mHalCamCtrl->mFace[idx].face_recognised = roi.d.data.face.is_face_recognised;
+ mHalCamCtrl->mFace[idx].gaze_angle = roi.d.data.face.gaze_angle;
+
+ /* newly added */
+ // upscale by 2 to recover from demaen downscaling
+ mHalCamCtrl->mFace[idx].updown_dir = roi.d.data.face.updown_dir*2;
+ mHalCamCtrl->mFace[idx].leftright_dir = roi.d.data.face.leftright_dir*2;
+ mHalCamCtrl->mFace[idx].roll_dir = roi.d.data.face.roll_dir*2;
+
+ mHalCamCtrl->mFace[idx].leye_blink = roi.d.data.face.left_blink;
+ mHalCamCtrl->mFace[idx].reye_blink = roi.d.data.face.right_blink;
+ mHalCamCtrl->mFace[idx].left_right_gaze = roi.d.data.face.left_right_gaze;
+ mHalCamCtrl->mFace[idx].top_bottom_gaze = roi.d.data.face.top_bottom_gaze;
+
+ ALOGE("%s: Face(%d, %d, %d, %d), leftEye(%d, %d), rightEye(%d, %d), mouth(%d, %d), smile(%d, %d), blinked(%d)", __func__,
+ mHalCamCtrl->mFace[idx].rect[0], mHalCamCtrl->mFace[idx].rect[1],
+ mHalCamCtrl->mFace[idx].rect[2], mHalCamCtrl->mFace[idx].rect[3],
+ mHalCamCtrl->mFace[idx].left_eye[0], mHalCamCtrl->mFace[idx].left_eye[1],
+ mHalCamCtrl->mFace[idx].right_eye[0], mHalCamCtrl->mFace[idx].right_eye[1],
+ mHalCamCtrl->mFace[idx].mouth[0], mHalCamCtrl->mFace[idx].mouth[1],
+ roi.d.data.face.smile_degree, roi.d.data.face.smile_confidence, roi.d.data.face.blink_detected);
+
+ mNumFDRcvd++;
+ mDisplayLock.unlock();
+
+ if (mNumFDRcvd == mHalCamCtrl->mMetadata.number_of_faces) {
+ mHalCamCtrl->mCallbackLock.lock();
+ camera_data_callback pcb = mHalCamCtrl->mDataCb;
+ mHalCamCtrl->mCallbackLock.unlock();
+
+ if (pcb && (mHalCamCtrl->mMsgEnabled & CAMERA_MSG_PREVIEW_METADATA)){
+ ALOGE("%s: Face detection RIO callback with %d faces detected (score=%d)", __func__, mNumFDRcvd, mHalCamCtrl->mFace[idx].score);
+ pcb(CAMERA_MSG_PREVIEW_METADATA, NULL, 0, &mHalCamCtrl->mMetadata, mHalCamCtrl->mCallbackCookie);
+ }
+ }
+ }
+ break;
+ }
+}
+
+status_t QCameraStream_preview::initDisplayBuffers()
+{
+ status_t ret = NO_ERROR;
+ int width = 0; /* width of channel */
+ int height = 0; /* height of channel */
+ uint32_t frame_len = 0; /* frame planner length */
+ int buffer_num = 4; /* number of buffers for display */
+ const char *pmem_region;
+ uint8_t num_planes = 0;
+ uint32_t planes[VIDEO_MAX_PLANES];
+
+ cam_ctrl_dimension_t dim;
+
+ ALOGE("%s:BEGIN",__func__);
+ memset(&mHalCamCtrl->mMetadata, 0, sizeof(camera_frame_metadata_t));
+ mHalCamCtrl->mPreviewMemoryLock.lock();
+ memset(&mHalCamCtrl->mPreviewMemory, 0, sizeof(mHalCamCtrl->mPreviewMemory));
+ mHalCamCtrl->mPreviewMemoryLock.unlock();
+ memset(&mNotifyBuffer, 0, sizeof(mNotifyBuffer));
+
+ /* get preview size, by qury mm_camera*/
+ memset(&dim, 0, sizeof(cam_ctrl_dimension_t));
+
+ memset(&(this->mDisplayStreamBuf),0, sizeof(this->mDisplayStreamBuf));
+
+ ret = cam_config_get_parm(mCameraId, MM_CAMERA_PARM_DIMENSION, &dim);
+ if (MM_CAMERA_OK != ret) {
+ ALOGE("%s: error - can't get camera dimension!", __func__);
+ ALOGE("%s: X", __func__);
+ return BAD_VALUE;
+ }else {
+ width = dim.display_width,
+ height = dim.display_height;
+ }
+
+ ret = getBufferFromSurface();
+ if(ret != NO_ERROR) {
+ ALOGE("%s: cannot get memory from surface texture client, ret = %d", __func__, ret);
+ return ret;
+ }
+
+ /* set 4 buffers for display */
+ memset(&mDisplayStreamBuf, 0, sizeof(mDisplayStreamBuf));
+ mHalCamCtrl->mPreviewMemoryLock.lock();
+ this->mDisplayStreamBuf.num = mHalCamCtrl->mPreviewMemory.buffer_count;
+ this->myMode=myMode; /*Need to assign this in constructor after translating from mask*/
+ num_planes = 2;
+ planes[0] = dim.display_frame_offset.mp[0].len;
+ planes[1] = dim.display_frame_offset.mp[1].len;
+ this->mDisplayStreamBuf.frame_len = dim.display_frame_offset.frame_len;
+
+ mDisplayBuf.preview.buf.mp = new mm_camera_mp_buf_t[mDisplayStreamBuf.num];
+ if (!mDisplayBuf.preview.buf.mp) {
+ ALOGE("%s Error allocating memory for mplanar struct ", __func__);
+ }
+ memset(mDisplayBuf.preview.buf.mp, 0,
+ mDisplayStreamBuf.num * sizeof(mm_camera_mp_buf_t));
+
+ /*allocate memory for the buffers*/
+ void *vaddr = NULL;
+ for(int i = 0; i < mDisplayStreamBuf.num; i++){
+ if (mHalCamCtrl->mPreviewMemory.private_buffer_handle[i] == NULL)
+ continue;
+ mDisplayStreamBuf.frame[i].fd = mHalCamCtrl->mPreviewMemory.private_buffer_handle[i]->fd;
+ mDisplayStreamBuf.frame[i].cbcr_off = planes[0];
+ mDisplayStreamBuf.frame[i].y_off = 0;
+ mDisplayStreamBuf.frame[i].path = OUTPUT_TYPE_P;
+ mHalCamCtrl->mPreviewMemory.addr_offset[i] =
+ mHalCamCtrl->mPreviewMemory.private_buffer_handle[i]->offset;
+ mDisplayStreamBuf.frame[i].buffer =
+ (long unsigned int)mHalCamCtrl->mPreviewMemory.camera_memory[i]->data;
+
+ ALOGE("%s: idx = %d, fd = %d, size = %d, cbcr_offset = %d, y_offset = %d, offset = %d, vaddr = 0x%x",
+ __func__, i,
+ mDisplayStreamBuf.frame[i].fd,
+ mHalCamCtrl->mPreviewMemory.private_buffer_handle[i]->size,
+ mDisplayStreamBuf.frame[i].cbcr_off,
+ mDisplayStreamBuf.frame[i].y_off,
+ mHalCamCtrl->mPreviewMemory.addr_offset[i],
+ (uint32_t)mDisplayStreamBuf.frame[i].buffer);
+
+
+ mDisplayBuf.preview.buf.mp[i].frame = mDisplayStreamBuf.frame[i];
+ mDisplayBuf.preview.buf.mp[i].frame_offset = mHalCamCtrl->mPreviewMemory.addr_offset[i];
+ mDisplayBuf.preview.buf.mp[i].num_planes = num_planes;
+
+ /* Plane 0 needs to be set seperately. Set other planes
+ * in a loop. */
+ mDisplayBuf.preview.buf.mp[i].planes[0].length = planes[0];
+ mDisplayBuf.preview.buf.mp[i].planes[0].m.userptr = mDisplayStreamBuf.frame[i].fd;
+ mDisplayBuf.preview.buf.mp[i].planes[0].data_offset = 0;
+ mDisplayBuf.preview.buf.mp[i].planes[0].reserved[0] =
+ mDisplayBuf.preview.buf.mp[i].frame_offset;
+ for (int j = 1; j < num_planes; j++) {
+ mDisplayBuf.preview.buf.mp[i].planes[j].length = planes[j];
+ mDisplayBuf.preview.buf.mp[i].planes[j].m.userptr =
+ mDisplayStreamBuf.frame[i].fd;
+ mDisplayBuf.preview.buf.mp[i].planes[j].data_offset = 0;
+ mDisplayBuf.preview.buf.mp[i].planes[j].reserved[0] =
+ mDisplayBuf.preview.buf.mp[i].planes[j-1].reserved[0] +
+ mDisplayBuf.preview.buf.mp[i].planes[j-1].length;
+ }
+
+ for (int j = 0; j < num_planes; j++) {
+ ALOGE("Planes: %d length: %d userptr: %lu offset: %d\n",
+ j, mDisplayBuf.preview.buf.mp[i].planes[j].length,
+ mDisplayBuf.preview.buf.mp[i].planes[j].m.userptr,
+ mDisplayBuf.preview.buf.mp[i].planes[j].reserved[0]);
+ }
+
+ }/*end of for loop*/
+
+ /* register the streaming buffers for the channel*/
+ mDisplayBuf.ch_type = MM_CAMERA_CH_PREVIEW;
+ mDisplayBuf.preview.num = mDisplayStreamBuf.num;
+ mHalCamCtrl->mPreviewMemoryLock.unlock();
+ ALOGE("%s:END",__func__);
+ return NO_ERROR;
+
+end:
+ if (MM_CAMERA_OK == ret ) {
+ ALOGV("%s: X - NO_ERROR ", __func__);
+ return NO_ERROR;
+ }
+
+ ALOGV("%s: out of memory clean up", __func__);
+ /* release the allocated memory */
+
+ ALOGV("%s: X - BAD_VALUE ", __func__);
+ return BAD_VALUE;
+}
+
+void QCameraStream_preview::dumpFrameToFile(struct msm_frame* newFrame)
+{
+ int32_t enabled = 0;
+ int frm_num;
+ uint32_t skip_mode;
+ char value[PROPERTY_VALUE_MAX];
+ char buf[32];
+ int w, h;
+ static int count = 0;
+ cam_ctrl_dimension_t dim;
+ int file_fd;
+ int rc = 0;
+ int len;
+ unsigned long addr;
+ unsigned long * tmp = (unsigned long *)newFrame->buffer;
+ addr = *tmp;
+ status_t ret = cam_config_get_parm(mHalCamCtrl->mCameraId,
+ MM_CAMERA_PARM_DIMENSION, &dim);
+
+ w = dim.display_width;
+ h = dim.display_height;
+ len = (w * h)*3/2;
+ count++;
+ if(count < 100) {
+ snprintf(buf, sizeof(buf), "/data/mzhu%d.yuv", count);
+ file_fd = open(buf, O_RDWR | O_CREAT, 0777);
+
+ rc = write(file_fd, (const void *)addr, len);
+ ALOGE("%s: file='%s', vaddr_old=0x%x, addr_map = 0x%p, len = %d, rc = %d",
+ __func__, buf, (uint32_t)newFrame->buffer, (void *)addr, len, rc);
+ close(file_fd);
+ ALOGE("%s: dump %s, rc = %d, len = %d", __func__, buf, rc, len);
+ }
+}
+
+status_t QCameraStream_preview::processPreviewFrame(mm_camera_ch_data_buf_t *frame)
+{
+ ALOGV("%s",__func__);
+ int err = 0;
+ int msgType = 0;
+ camera_memory_t *data = NULL;
+ camera_frame_metadata_t *metadata = NULL;
+
+ Mutex::Autolock lock(mStopCallbackLock);
+ if(!mActive) {
+ ALOGE("Preview Stopped. Returning callback");
+ return NO_ERROR;
+ }
+ if(mHalCamCtrl==NULL) {
+ ALOGE("%s: X: HAL control object not set",__func__);
+ /*Call buf done*/
+ return BAD_VALUE;
+ }
+
+ mHalCamCtrl->mCallbackLock.lock();
+ camera_data_timestamp_callback rcb = mHalCamCtrl->mDataCbTimestamp;
+ void *rdata = mHalCamCtrl->mCallbackCookie;
+ mHalCamCtrl->mCallbackLock.unlock();
+
+ if (UNLIKELY(mHalCamCtrl->mDebugFps)) {
+ mHalCamCtrl->debugShowPreviewFPS();
+ }
+ //dumpFrameToFile(frame->def.frame);
+ mHalCamCtrl->dumpFrameToFile(frame->def.frame, HAL_DUMP_FRM_PREVIEW);
+
+ nsecs_t timeStamp = systemTime();
+
+ mHalCamCtrl->mPreviewMemoryLock.lock();
+ mNotifyBuffer[frame->def.idx] = *frame;
+ // mzhu fix me, need to check meta data also.
+
+ ALOGI("Enqueue buf handle %p\n",
+ mHalCamCtrl->mPreviewMemory.buffer_handle[frame->def.idx]);
+ ALOGD("%s: camera call genlock_unlock", __FUNCTION__);
+ if (BUFFER_LOCKED == mHalCamCtrl->mPreviewMemory.local_flag[frame->def.idx]) {
+ if (GENLOCK_FAILURE == genlock_unlock_buffer((native_handle_t*)
+ (*mHalCamCtrl->mPreviewMemory.buffer_handle[frame->def.idx]))) {
+ ALOGE("%s: genlock_unlock_buffer failed", __FUNCTION__);
+ mHalCamCtrl->mPreviewMemoryLock.unlock();
+ return -EINVAL;
+ } else {
+ mHalCamCtrl->mPreviewMemory.local_flag[frame->def.idx] = BUFFER_UNLOCKED;
+ }
+ } else {
+ ALOGE("%s: buffer to be enqueued is not locked", __FUNCTION__);
+ mHalCamCtrl->mPreviewMemoryLock.unlock();
+ return -EINVAL;
+ }
+ err = this->mPreviewWindow->enqueue_buffer(this->mPreviewWindow,
+ (buffer_handle_t *)mHalCamCtrl->mPreviewMemory.buffer_handle[frame->def.idx]);
+ if(err != 0) {
+ ALOGE("%s: enqueue_buffer failed, err = %d", __func__, err);
+ }
+ buffer_handle_t *buffer_handle = NULL;
+ int tmp_stride = 0;
+ err = this->mPreviewWindow->dequeue_buffer(this->mPreviewWindow,
+ &buffer_handle, &tmp_stride);
+ if (err == NO_ERROR && buffer_handle != NULL) {
+ err = this->mPreviewWindow->lock_buffer(this->mPreviewWindow, buffer_handle);
+ ALOGD("%s: camera call genlock_lock", __FUNCTION__);
+ if (GENLOCK_FAILURE == genlock_lock_buffer((native_handle_t*)(*buffer_handle), GENLOCK_WRITE_LOCK,
+ GENLOCK_MAX_TIMEOUT)) {
+ ALOGE("%s: genlock_lock_buffer(WRITE) failed", __FUNCTION__);
+ mHalCamCtrl->mPreviewMemoryLock.unlock();
+ return -EINVAL;
+ }
+ for(int i = 0; i < mHalCamCtrl->mPreviewMemory.buffer_count; i++) {
+ ALOGD("h1: %p h2: %p\n", mHalCamCtrl->mPreviewMemory.buffer_handle[i], buffer_handle);
+ if(mHalCamCtrl->mPreviewMemory.buffer_handle[i] == buffer_handle) {
+ mm_camera_ch_data_buf_t tmp_frame;
+ mHalCamCtrl->mPreviewMemory.local_flag[i] = BUFFER_LOCKED;
+ if(MM_CAMERA_OK != cam_evt_buf_done(mCameraId, &mNotifyBuffer[i])) {
+ ALOGD("BUF DONE FAILED");
+ mHalCamCtrl->mPreviewMemoryLock.unlock();
+ return BAD_VALUE;
+ }
+ break;
+ }
+ }
+ } else
+ ALOGE("%s: error in dequeue_buffer, enqueue_buffer idx = %d, no free buffer now", __func__, frame->def.idx);
+ /* Save the last displayed frame. We'll be using it to fill the gap between
+ when preview stops and postview start during snapshot.*/
+ mLastQueuedFrame = &(mDisplayStreamBuf.frame[frame->def.idx]);
+ mHalCamCtrl->mPreviewMemoryLock.unlock();
+
+ mHalCamCtrl->mCallbackLock.lock();
+ camera_data_callback pcb = mHalCamCtrl->mDataCb;
+ mHalCamCtrl->mCallbackLock.unlock();
+ ALOGD("Message enabled = 0x%x", mHalCamCtrl->mMsgEnabled);
+
+ if (pcb != NULL) {
+ //Sending preview callback if corresponding Msgs are enabled
+ if(mHalCamCtrl->mMsgEnabled & CAMERA_MSG_PREVIEW_FRAME) {
+ msgType |= CAMERA_MSG_PREVIEW_FRAME;
+ data = mHalCamCtrl->mPreviewMemory.camera_memory[frame->def.idx];//mPreviewHeap->mBuffers[frame->def.idx];
+ } else {
+ data = NULL;
+ }
+ if(msgType) {
+ mStopCallbackLock.unlock();
+ pcb(msgType, data, 0, metadata, mHalCamCtrl->mCallbackCookie);
+ }
+ ALOGD("end of cb");
+ }
+ if(rcb != NULL)
+ {
+ if (mHalCamCtrl->mStoreMetaDataInFrame)
+ {
+ mStopCallbackLock.unlock();
+ if(mHalCamCtrl->mStartRecording == true &&( mHalCamCtrl->mMsgEnabled & CAMERA_MSG_VIDEO_FRAME))
+ rcb(timeStamp, CAMERA_MSG_VIDEO_FRAME,
+ mHalCamCtrl->mRecordingMemory.metadata_memory[frame->def.idx],
+ 0, mHalCamCtrl->mCallbackCookie);
+ }
+ else
+ {
+ if(mHalCamCtrl->mStartRecording == true &&( mHalCamCtrl->mMsgEnabled & CAMERA_MSG_VIDEO_FRAME))
+ {
+ mStopCallbackLock.unlock();
+ rcb(timeStamp, CAMERA_MSG_VIDEO_FRAME,
+ mHalCamCtrl->mPreviewMemory.camera_memory[frame->def.idx],
+ 0, mHalCamCtrl->mCallbackCookie);
+ }
+ }
+ }
+
+ /* Save the last displayed frame. We'll be using it to fill the gap between
+ when preview stops and postview start during snapshot.*/
+ //mLastQueuedFrame = frame->def.frame;
+/*
+ if(MM_CAMERA_OK != cam_evt_buf_done(mCameraId, frame))
+ {
+ ALOGE("BUF DONE FAILED");
+ return BAD_VALUE;
+ }
+*/
+ return NO_ERROR;
+}
+
+// ---------------------------------------------------------------------------
+// QCameraStream_preview
+// ---------------------------------------------------------------------------
+
+QCameraStream_preview::
+QCameraStream_preview(int cameraId, camera_mode_t mode)
+ : QCameraStream(cameraId,mode),
+ mLastQueuedFrame(NULL),
+ mNumFDRcvd(0)
+ {
+ mHalCamCtrl = NULL;
+ ALOGE("%s: E", __func__);
+ ALOGE("%s: X", __func__);
+ }
+// ---------------------------------------------------------------------------
+// QCameraStream_preview
+// ---------------------------------------------------------------------------
+
+QCameraStream_preview::~QCameraStream_preview() {
+ ALOGV("%s: E", __func__);
+ if(mActive) {
+ stop();
+ }
+ if(mInit) {
+ release();
+ }
+ mInit = false;
+ mActive = false;
+ ALOGV("%s: X", __func__);
+
+}
+// ---------------------------------------------------------------------------
+// QCameraStream_preview
+// ---------------------------------------------------------------------------
+
+status_t QCameraStream_preview::init() {
+
+ status_t ret = NO_ERROR;
+ ALOGV("%s: E", __func__);
+
+ ret = QCameraStream::initChannel (mCameraId, MM_CAMERA_CH_PREVIEW_MASK);
+ if (NO_ERROR!=ret) {
+ ALOGE("%s E: can't init native cammera preview ch\n",__func__);
+ return ret;
+ }
+
+ ALOGE("Debug : %s : initChannel",__func__);
+ /* register a notify into the mmmm_camera_t object*/
+ (void) cam_evt_register_buf_notify(mCameraId, MM_CAMERA_CH_PREVIEW,
+ preview_notify_cb, MM_CAMERA_REG_BUF_CB_INFINITE, 0, this);
+ ALOGE("Debug : %s : cam_evt_register_buf_notify",__func__);
+ buffer_handle_t *buffer_handle = NULL;
+ int tmp_stride = 0;
+ mInit = true;
+ return ret;
+}
+// ---------------------------------------------------------------------------
+// QCameraStream_preview
+// ---------------------------------------------------------------------------
+
+status_t QCameraStream_preview::start()
+{
+ ALOGV("%s: E", __func__);
+ status_t ret = NO_ERROR;
+ mm_camera_reg_buf_t *reg_buf=&mDisplayBuf;
+
+ Mutex::Autolock lock(mStopCallbackLock);
+
+ /* call start() in parent class to start the monitor thread*/
+ //QCameraStream::start ();
+ setFormat(MM_CAMERA_CH_PREVIEW_MASK);
+
+ if(NO_ERROR!=initDisplayBuffers()){
+ return BAD_VALUE;
+ }
+ ALOGE("Debug : %s : initDisplayBuffers",__func__);
+ ret = cam_config_prepare_buf(mCameraId, reg_buf);
+ ALOGE("Debug : %s : cam_config_prepare_buf",__func__);
+ if(ret != MM_CAMERA_OK) {
+ ALOGV("%s:reg preview buf err=%d\n", __func__, ret);
+ ret = BAD_VALUE;
+ }else
+ ret = NO_ERROR;
+
+ /* For preview, the OP_MODE we set is dependent upon whether we are
+ starting camera or camcorder. For snapshot, anyway we disable preview.
+ However, for ZSL we need to set OP_MODE to OP_MODE_ZSL and not
+ OP_MODE_VIDEO. We'll set that for now in CamCtrl. So in case of
+ ZSL we skip setting Mode here */
+
+ if (!(myMode & CAMERA_ZSL_MODE)) {
+ ALOGE("Setting OP MODE to MM_CAMERA_OP_MODE_VIDEO");
+ mm_camera_op_mode_type_t op_mode=MM_CAMERA_OP_MODE_VIDEO;
+ ret = cam_config_set_parm (mCameraId, MM_CAMERA_PARM_OP_MODE,
+ &op_mode);
+ ALOGE("OP Mode Set");
+
+ if(MM_CAMERA_OK != ret) {
+ ALOGE("%s: X :set mode MM_CAMERA_OP_MODE_VIDEO err=%d\n", __func__, ret);
+ return BAD_VALUE;
+ }
+ }else {
+ ALOGE("Setting OP MODE to MM_CAMERA_OP_MODE_ZSL");
+ mm_camera_op_mode_type_t op_mode=MM_CAMERA_OP_MODE_ZSL;
+ ret = cam_config_set_parm (mCameraId, MM_CAMERA_PARM_OP_MODE,
+ &op_mode);
+ if(MM_CAMERA_OK != ret) {
+ ALOGE("%s: X :set mode MM_CAMERA_OP_MODE_ZSL err=%d\n", __func__, ret);
+ return BAD_VALUE;
+ }
+ }
+
+ /* call mm_camera action start(...) */
+ ALOGE("Starting Preview/Video Stream. ");
+ ret = cam_ops_action(mCameraId, TRUE, MM_CAMERA_OPS_PREVIEW, 0);
+
+ if (MM_CAMERA_OK != ret) {
+ ALOGE ("%s: preview streaming start err=%d\n", __func__, ret);
+ return BAD_VALUE;
+ }
+
+ ALOGE("Debug : %s : Preview streaming Started",__func__);
+ ret = NO_ERROR;
+
+ mActive = true;
+ ALOGE("%s: X", __func__);
+ return NO_ERROR;
+ }
+
+
+// ---------------------------------------------------------------------------
+// QCameraStream_preview
+// ---------------------------------------------------------------------------
+ void QCameraStream_preview::stop() {
+ ALOGE("%s: E", __func__);
+ int ret=MM_CAMERA_OK;
+
+ if(!mActive) {
+ return;
+ }
+ mActive = false;
+ Mutex::Autolock lock(mStopCallbackLock);
+ /* unregister the notify fn from the mmmm_camera_t object*/
+
+ /* call stop() in parent class to stop the monitor thread*/
+ ret = cam_ops_action(mCameraId, FALSE, MM_CAMERA_OPS_PREVIEW, 0);
+ if(MM_CAMERA_OK != ret) {
+ ALOGE ("%s: camera preview stop err=%d\n", __func__, ret);
+ }
+ ALOGE("Debug : %s : Preview streaming Stopped",__func__);
+ ret = cam_config_unprepare_buf(mCameraId, MM_CAMERA_CH_PREVIEW);
+ if(ret != MM_CAMERA_OK) {
+ ALOGE("%s:Unreg preview buf err=%d\n", __func__, ret);
+ //ret = BAD_VALUE;
+ }
+
+ ALOGE("Debug : %s : Buffer Unprepared",__func__);
+ if (mDisplayBuf.preview.buf.mp != NULL) {
+ delete[] mDisplayBuf.preview.buf.mp;
+ }
+ /*free camera_memory handles and return buffer back to surface*/
+ putBufferToSurface();
+
+ ALOGE("%s: X", __func__);
+
+ }
+// ---------------------------------------------------------------------------
+// QCameraStream_preview
+// ---------------------------------------------------------------------------
+ void QCameraStream_preview::release() {
+
+ ALOGE("%s : BEGIN",__func__);
+ int ret=MM_CAMERA_OK,i;
+
+ if(!mInit)
+ {
+ ALOGE("%s : Stream not Initalized",__func__);
+ return;
+ }
+
+ if(mActive) {
+ this->stop();
+ }
+
+ ret= QCameraStream::deinitChannel(mCameraId, MM_CAMERA_CH_PREVIEW);
+ ALOGE("Debug : %s : De init Channel",__func__);
+ if(ret != MM_CAMERA_OK) {
+ ALOGE("%s:Deinit preview channel failed=%d\n", __func__, ret);
+ //ret = BAD_VALUE;
+ }
+
+ (void)cam_evt_register_buf_notify(mCameraId, MM_CAMERA_CH_PREVIEW,
+ NULL,
+ (mm_camera_register_buf_cb_type_t)NULL,
+ NULL,
+ NULL);
+ mInit = false;
+ ALOGE("%s: END", __func__);
+
+ }
+
+QCameraStream*
+QCameraStream_preview::createInstance(int cameraId,
+ camera_mode_t mode)
+{
+ QCameraStream* pme = new QCameraStream_preview(cameraId, mode);
+ return pme;
+}
+// ---------------------------------------------------------------------------
+// QCameraStream_preview
+// ---------------------------------------------------------------------------
+
+void QCameraStream_preview::deleteInstance(QCameraStream *p)
+{
+ if (p){
+ ALOGV("%s: BEGIN", __func__);
+ p->release();
+ delete p;
+ p = NULL;
+ ALOGV("%s: END", __func__);
+ }
+}
+
+
+/* Temp helper function */
+void *QCameraStream_preview::getLastQueuedFrame(void)
+{
+ return mLastQueuedFrame;
+}
+
+status_t QCameraStream_preview::initPreviewOnlyBuffers()
+{
+ /*1. for 7x27a, this shall not called;
+ 2. this file shall be removed ASAP
+ so put a dummy function to just pass the compile*/
+ return INVALID_OPERATION;
+}
+
+// ---------------------------------------------------------------------------
+// No code beyone this line
+// ---------------------------------------------------------------------------
+}; // namespace android
diff --git a/camera/QCameraHWI_Record.cpp b/camera/QCameraHWI_Record.cpp
new file mode 100644
index 0000000..f9da650
--- /dev/null
+++ b/camera/QCameraHWI_Record.cpp
@@ -0,0 +1,581 @@
+/*
+** Copyright (c) 2011 The Linux Foundation. All rights reserved.
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+** http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+/*#error uncomment this for compiler test!*/
+
+//#define ALOG_NDEBUG 0
+#define ALOG_NIDEBUG 0
+#define LOG_TAG "QCameraHWI_Record"
+#include <utils/Log.h>
+#include <utils/threads.h>
+#include <cutils/properties.h>
+#include <fcntl.h>
+#include <sys/mman.h>
+
+#include "QCameraStream.h"
+
+#define LIKELY(exp) __builtin_expect(!!(exp), 1)
+#define UNLIKELY(exp) __builtin_expect(!!(exp), 0)
+
+/* QCameraStream_record class implementation goes here*/
+/* following code implement the video streaming capture & encoding logic of this class*/
+// ---------------------------------------------------------------------------
+// QCameraStream_record createInstance()
+// ---------------------------------------------------------------------------
+namespace android {
+
+
+QCameraStream* QCameraStream_record::createInstance(int cameraId,
+ camera_mode_t mode)
+{
+ ALOGV("%s: BEGIN", __func__);
+ QCameraStream* pme = new QCameraStream_record(cameraId, mode);
+ ALOGV("%s: END", __func__);
+ return pme;
+}
+
+// ---------------------------------------------------------------------------
+// QCameraStream_record deleteInstance()
+// ---------------------------------------------------------------------------
+void QCameraStream_record::deleteInstance(QCameraStream *ptr)
+{
+ ALOGV("%s: BEGIN", __func__);
+ if (ptr){
+ ptr->release();
+ delete ptr;
+ ptr = NULL;
+ }
+ ALOGV("%s: END", __func__);
+}
+
+// ---------------------------------------------------------------------------
+// QCameraStream_record Constructor
+// ---------------------------------------------------------------------------
+QCameraStream_record::QCameraStream_record(int cameraId,
+ camera_mode_t mode)
+ :QCameraStream(cameraId,mode),
+ mDebugFps(false)
+{
+ mHalCamCtrl = NULL;
+ char value[PROPERTY_VALUE_MAX];
+ ALOGV("%s: BEGIN", __func__);
+
+ property_get("persist.debug.sf.showfps", value, "0");
+ mDebugFps = atoi(value);
+
+ ALOGV("%s: END", __func__);
+}
+
+// ---------------------------------------------------------------------------
+// QCameraStream_record Destructor
+// ---------------------------------------------------------------------------
+QCameraStream_record::~QCameraStream_record() {
+ ALOGV("%s: BEGIN", __func__);
+ if(mActive) {
+ stop();
+ }
+ if(mInit) {
+ release();
+ }
+ mInit = false;
+ mActive = false;
+ ALOGV("%s: END", __func__);
+
+}
+
+// ---------------------------------------------------------------------------
+// QCameraStream_record Callback from mm_camera
+// ---------------------------------------------------------------------------
+static void record_notify_cb(mm_camera_ch_data_buf_t *bufs_new,
+ void *user_data)
+{
+ QCameraStream_record *pme = (QCameraStream_record *)user_data;
+ mm_camera_ch_data_buf_t *bufs_used = 0;
+ ALOGV("%s: BEGIN", __func__);
+
+ /*
+ * Call Function Process Video Data
+ */
+ pme->processRecordFrame(bufs_new);
+ ALOGV("%s: END", __func__);
+}
+
+// ---------------------------------------------------------------------------
+// QCameraStream_record
+// ---------------------------------------------------------------------------
+status_t QCameraStream_record::init()
+{
+ status_t ret = NO_ERROR;
+ ALOGV("%s: BEGIN", __func__);
+
+ /*
+ * Acquiring Video Channel
+ */
+ ret = QCameraStream::initChannel (mCameraId, MM_CAMERA_CH_VIDEO_MASK);
+ if (NO_ERROR!=ret) {
+ ALOGE("%s ERROR: Can't init native cammera preview ch\n",__func__);
+ return ret;
+ }
+
+ /*
+ * Register the Callback with camera
+ */
+ (void) cam_evt_register_buf_notify(mCameraId, MM_CAMERA_CH_VIDEO,
+ record_notify_cb,
+ MM_CAMERA_REG_BUF_CB_INFINITE,
+ 0,
+ this);
+
+ mInit = true;
+ ALOGV("%s: END", __func__);
+ return ret;
+}
+// ---------------------------------------------------------------------------
+// QCameraStream_record
+// ---------------------------------------------------------------------------
+
+status_t QCameraStream_record::start()
+{
+ status_t ret = NO_ERROR;
+ ALOGV("%s: BEGIN", __func__);
+
+ Mutex::Autolock lock(mStopCallbackLock);
+ if(!mInit) {
+ ALOGE("%s ERROR: Record buffer not registered",__func__);
+ return BAD_VALUE;
+ }
+
+ setFormat(MM_CAMERA_CH_VIDEO_MASK , (cam_format_t)0);
+ //mRecordFreeQueueLock.lock();
+ //mRecordFreeQueue.clear();
+ //mRecordFreeQueueLock.unlock();
+ /*
+ * Allocating Encoder Frame Buffers
+ */
+ ret = initEncodeBuffers();
+ if (NO_ERROR!=ret) {
+ ALOGE("%s ERROR: Buffer Allocation Failed\n",__func__);
+ goto error;
+ }
+
+ ret = cam_config_prepare_buf(mCameraId, &mRecordBuf);
+ if(ret != MM_CAMERA_OK) {
+ ALOGE("%s ERROR: Reg Record buf err=%d\n", __func__, ret);
+ ret = BAD_VALUE;
+ goto error;
+ }else{
+ ret = NO_ERROR;
+ }
+
+ /*
+ * Start Video Streaming
+ */
+ ret = cam_ops_action(mCameraId, true, MM_CAMERA_OPS_VIDEO, 0);
+ if (MM_CAMERA_OK != ret) {
+ ALOGE ("%s ERROR: Video streaming start err=%d\n", __func__, ret);
+ ret = BAD_VALUE;
+ goto error;
+ }else{
+ ALOGV("%s : Video streaming Started",__func__);
+ ret = NO_ERROR;
+ }
+ mActive = true;
+ ALOGV("%s: END", __func__);
+ return ret;
+
+error:
+ releaseEncodeBuffer();
+ ALOGV("%s: END", __func__);
+ return ret;
+}
+
+void QCameraStream_record::releaseEncodeBuffer() {
+ for(int cnt = 0; cnt < mHalCamCtrl->mRecordingMemory.buffer_count; cnt++) {
+ if (NO_ERROR !=
+ mHalCamCtrl->sendUnMappingBuf(MSM_V4L2_EXT_CAPTURE_MODE_VIDEO, cnt,
+ mCameraId, CAM_SOCK_MSG_TYPE_FD_UNMAPPING))
+ ALOGE("%s: Unmapping Video Data Failed", __func__);
+
+ if (mHalCamCtrl->mStoreMetaDataInFrame) {
+ struct encoder_media_buffer_type * packet =
+ (struct encoder_media_buffer_type *)
+ mHalCamCtrl->mRecordingMemory.metadata_memory[cnt]->data;
+ native_handle_delete(const_cast<native_handle_t *>(packet->meta_handle));
+ mHalCamCtrl->mRecordingMemory.metadata_memory[cnt]->release(
+ mHalCamCtrl->mRecordingMemory.metadata_memory[cnt]);
+
+ }
+ mHalCamCtrl->mRecordingMemory.camera_memory[cnt]->release(
+ mHalCamCtrl->mRecordingMemory.camera_memory[cnt]);
+ close(mHalCamCtrl->mRecordingMemory.fd[cnt]);
+ mHalCamCtrl->mRecordingMemory.fd[cnt] = -1;
+
+#ifdef USE_ION
+ mHalCamCtrl->deallocate_ion_memory(&mHalCamCtrl->mRecordingMemory, cnt);
+#endif
+ }
+ memset(&mHalCamCtrl->mRecordingMemory, 0, sizeof(mHalCamCtrl->mRecordingMemory));
+ //mNumRecordFrames = 0;
+ delete[] recordframes;
+ if (mRecordBuf.video.video.buf.mp)
+ delete[] mRecordBuf.video.video.buf.mp;
+}
+
+// ---------------------------------------------------------------------------
+// QCameraStream_record
+// ---------------------------------------------------------------------------
+void QCameraStream_record::stop()
+{
+ status_t ret = NO_ERROR;
+ ALOGV("%s: BEGIN", __func__);
+
+ if(!mActive) {
+ ALOGE("%s : Record stream not started",__func__);
+ return;
+ }
+ mActive = false;
+ Mutex::Autolock lock(mStopCallbackLock);
+#if 0 //mzhu, when stop recording, all frame will be dirty. no need to queue frame back to kernel any more
+ mRecordFreeQueueLock.lock();
+ while(!mRecordFreeQueue.isEmpty()) {
+ ALOGV("%s : Pre-releasing of Encoder buffers!\n", __FUNCTION__);
+ mm_camera_ch_data_buf_t releasedBuf = mRecordFreeQueue.itemAt(0);
+ mRecordFreeQueue.removeAt(0);
+ mRecordFreeQueueLock.unlock();
+ ALOGV("%s (%d): releasedBuf.idx = %d\n", __FUNCTION__, __LINE__,
+ releasedBuf.video.video.idx);
+ if(MM_CAMERA_OK != cam_evt_buf_done(mCameraId,&releasedBuf))
+ ALOGE("%s : Buf Done Failed",__func__);
+ }
+ mRecordFreeQueueLock.unlock();
+#if 0
+ while (!mRecordFreeQueue.isEmpty()) {
+ ALOGE("%s : Waiting for Encoder to release all buffer!\n", __FUNCTION__);
+ }
+#endif
+#endif // mzhu
+ /* unregister the notify fn from the mmmm_camera_t object
+ * call stop() in parent class to stop the monitor thread */
+
+ ret = cam_ops_action(mCameraId, false, MM_CAMERA_OPS_VIDEO, 0);
+ if (MM_CAMERA_OK != ret) {
+ ALOGE ("%s ERROR: Video streaming Stop err=%d\n", __func__, ret);
+ }
+
+ ret = cam_config_unprepare_buf(mCameraId, MM_CAMERA_CH_VIDEO);
+ if(ret != MM_CAMERA_OK){
+ ALOGE("%s ERROR: Ureg video buf \n", __func__);
+ }
+
+ releaseEncodeBuffer();
+
+ mActive = false;
+ ALOGV("%s: END", __func__);
+
+}
+// ---------------------------------------------------------------------------
+// QCameraStream_record
+// ---------------------------------------------------------------------------
+void QCameraStream_record::release()
+{
+ status_t ret = NO_ERROR;
+ ALOGV("%s: BEGIN", __func__);
+
+ if(mActive) {
+ stop();
+ }
+ if(!mInit) {
+ ALOGE("%s : Record stream not initialized",__func__);
+ return;
+ }
+
+ ret= QCameraStream::deinitChannel(mCameraId, MM_CAMERA_CH_VIDEO);
+ if(ret != MM_CAMERA_OK) {
+ ALOGE("%s:Deinit Video channel failed=%d\n", __func__, ret);
+ }
+ (void)cam_evt_register_buf_notify(mCameraId, MM_CAMERA_CH_VIDEO,
+ NULL,
+ (mm_camera_register_buf_cb_type_t)NULL,
+ NULL,
+ NULL);
+ mInit = false;
+ ALOGV("%s: END", __func__);
+}
+
+status_t QCameraStream_record::processRecordFrame(void *data)
+{
+ ALOGV("%s : BEGIN",__func__);
+ mm_camera_ch_data_buf_t* frame = (mm_camera_ch_data_buf_t*) data;
+
+ Mutex::Autolock lock(mStopCallbackLock);
+ if(!mActive) {
+ ALOGE("Recording Stopped. Returning callback");
+ return NO_ERROR;
+ }
+
+ if (UNLIKELY(mDebugFps)) {
+ debugShowVideoFPS();
+ }
+
+ mHalCamCtrl->dumpFrameToFile(frame->video.video.frame, HAL_DUMP_FRM_VIDEO);
+ mHalCamCtrl->mCallbackLock.lock();
+ camera_data_timestamp_callback rcb = mHalCamCtrl->mDataCbTimestamp;
+ void *rdata = mHalCamCtrl->mCallbackCookie;
+ mHalCamCtrl->mCallbackLock.unlock();
+
+ nsecs_t timeStamp = nsecs_t(frame->video.video.frame->ts.tv_sec)*1000000000LL + \
+ frame->video.video.frame->ts.tv_nsec;
+
+ ALOGV("Send Video frame to services/encoder TimeStamp : %lld",timeStamp);
+ mRecordedFrames[frame->video.video.idx] = *frame;
+
+#ifdef USE_ION
+ struct ion_flush_data cache_inv_data;
+ int ion_fd;
+ ion_fd = frame->video.video.frame->ion_dev_fd;
+ cache_inv_data.vaddr = (void *)frame->video.video.frame->buffer;
+ cache_inv_data.fd = frame->video.video.frame->fd;
+ cache_inv_data.handle = frame->video.video.frame->fd_data.handle;
+ cache_inv_data.length = frame->video.video.frame->ion_alloc.len;
+
+ if (mHalCamCtrl->cache_ops(ion_fd, &cache_inv_data, ION_IOC_CLEAN_CACHES) < 0)
+ ALOGE("%s: Cache clean for Video buffer %p fd = %d failed", __func__,
+ cache_inv_data.vaddr, cache_inv_data.fd);
+#endif
+
+ if (mHalCamCtrl->mStoreMetaDataInFrame) {
+ mStopCallbackLock.unlock();
+ if(mActive && (rcb != NULL) && (mHalCamCtrl->mMsgEnabled & CAMERA_MSG_VIDEO_FRAME)) {
+ rcb(timeStamp, CAMERA_MSG_VIDEO_FRAME,
+ mHalCamCtrl->mRecordingMemory.metadata_memory[frame->video.video.idx],
+ 0, mHalCamCtrl->mCallbackCookie);
+ }
+ } else {
+ mStopCallbackLock.unlock();
+ if(mActive && (rcb != NULL) && (mHalCamCtrl->mMsgEnabled & CAMERA_MSG_VIDEO_FRAME)) {
+ rcb(timeStamp, CAMERA_MSG_VIDEO_FRAME,
+ mHalCamCtrl->mRecordingMemory.camera_memory[frame->video.video.idx],
+ 0, mHalCamCtrl->mCallbackCookie);
+ }
+ }
+
+ ALOGV("%s : END",__func__);
+ return NO_ERROR;
+}
+
+//Record Related Functions
+status_t QCameraStream_record::initEncodeBuffers()
+{
+ ALOGV("%s : BEGIN",__func__);
+ status_t ret = NO_ERROR;
+ const char *pmem_region;
+ uint32_t frame_len;
+ uint8_t num_planes;
+ uint32_t planes[VIDEO_MAX_PLANES];
+ //cam_ctrl_dimension_t dim;
+ int width = 0; /* width of channel */
+ int height = 0; /* height of channel */
+ int buf_cnt;
+ pmem_region = "/dev/pmem_adsp";
+
+
+ memset(&mHalCamCtrl->mRecordingMemory, 0, sizeof(mHalCamCtrl->mRecordingMemory));
+ memset(&dim, 0, sizeof(cam_ctrl_dimension_t));
+ ret = cam_config_get_parm(mCameraId, MM_CAMERA_PARM_DIMENSION, &dim);
+ if (MM_CAMERA_OK != ret) {
+ ALOGE("%s: ERROR - can't get camera dimension!", __func__);
+ return BAD_VALUE;
+ }
+ else {
+ width = dim.video_width;
+ height = dim.video_height;
+ }
+ num_planes = 2;
+ planes[0] = dim.video_frame_offset.mp[0].len;
+ planes[1] = dim.video_frame_offset.mp[1].len;
+ frame_len = dim.video_frame_offset.frame_len;
+
+ buf_cnt = VIDEO_BUFFER_COUNT;
+ if(mHalCamCtrl->isLowPowerCamcorder()) {
+ ALOGV("%s: lower power camcorder selected", __func__);
+ buf_cnt = VIDEO_BUFFER_COUNT_LOW_POWER_CAMCORDER;
+ }
+ recordframes = new msm_frame[buf_cnt];
+ memset(recordframes,0,sizeof(struct msm_frame) * buf_cnt);
+
+ mRecordBuf.video.video.buf.mp = new mm_camera_mp_buf_t[buf_cnt *
+ sizeof(mm_camera_mp_buf_t)];
+ if (!mRecordBuf.video.video.buf.mp) {
+ ALOGE("%s Error allocating memory for mplanar struct ", __func__);
+ return BAD_VALUE;
+ }
+ memset(mRecordBuf.video.video.buf.mp, 0,
+ buf_cnt * sizeof(mm_camera_mp_buf_t));
+
+ memset(&mHalCamCtrl->mRecordingMemory, 0, sizeof(mHalCamCtrl->mRecordingMemory));
+ for (int i=0; i<MM_CAMERA_MAX_NUM_FRAMES;i++) {
+ mHalCamCtrl->mRecordingMemory.main_ion_fd[i] = -1;
+ mHalCamCtrl->mRecordingMemory.fd[i] = -1;
+ }
+
+ mHalCamCtrl->mRecordingMemory.buffer_count = buf_cnt;
+
+ mHalCamCtrl->mRecordingMemory.size = frame_len;
+ mHalCamCtrl->mRecordingMemory.cbcr_offset = planes[0];
+
+ for (int cnt = 0; cnt < mHalCamCtrl->mRecordingMemory.buffer_count; cnt++) {
+#ifdef USE_ION
+ if(mHalCamCtrl->allocate_ion_memory(&mHalCamCtrl->mRecordingMemory, cnt,
+ ((0x1 << CAMERA_ION_HEAP_ID) | (0x1 << CAMERA_ION_FALLBACK_HEAP_ID))) < 0) {
+ ALOGE("%s ION alloc failed\n", __func__);
+ return UNKNOWN_ERROR;
+ }
+#else
+ mHalCamCtrl->mRecordingMemory.fd[cnt] = open("/dev/pmem_adsp", O_RDWR|O_SYNC);
+ if(mHalCamCtrl->mRecordingMemory.fd[cnt] <= 0) {
+ ALOGE("%s: no pmem for frame %d", __func__, cnt);
+ return UNKNOWN_ERROR;
+ }
+#endif
+ mHalCamCtrl->mRecordingMemory.camera_memory[cnt] =
+ mHalCamCtrl->mGetMemory(mHalCamCtrl->mRecordingMemory.fd[cnt],
+ mHalCamCtrl->mRecordingMemory.size, 1, (void *)this);
+
+ if (mHalCamCtrl->mStoreMetaDataInFrame) {
+ mHalCamCtrl->mRecordingMemory.metadata_memory[cnt] =
+ mHalCamCtrl->mGetMemory(-1,
+ sizeof(struct encoder_media_buffer_type), 1, (void *)this);
+ struct encoder_media_buffer_type * packet =
+ (struct encoder_media_buffer_type *)
+ mHalCamCtrl->mRecordingMemory.metadata_memory[cnt]->data;
+ packet->meta_handle = native_handle_create(1, 2); //1 fd, 1 offset and 1 size
+ packet->buffer_type = kMetadataBufferTypeCameraSource;
+ native_handle_t * nh = const_cast<native_handle_t *>(packet->meta_handle);
+ nh->data[0] = mHalCamCtrl->mRecordingMemory.fd[cnt];
+ nh->data[1] = 0;
+ nh->data[2] = mHalCamCtrl->mRecordingMemory.size;
+ }
+ recordframes[cnt].fd = mHalCamCtrl->mRecordingMemory.fd[cnt];
+ recordframes[cnt].buffer = (uint32_t)mHalCamCtrl->mRecordingMemory.camera_memory[cnt]->data;
+ recordframes[cnt].y_off = 0;
+ recordframes[cnt].cbcr_off = mHalCamCtrl->mRecordingMemory.cbcr_offset;
+ recordframes[cnt].path = OUTPUT_TYPE_V;
+ recordframes[cnt].fd_data = mHalCamCtrl->mRecordingMemory.ion_info_fd[cnt];
+ recordframes[cnt].ion_alloc = mHalCamCtrl->mRecordingMemory.alloc[cnt];
+ recordframes[cnt].ion_dev_fd = mHalCamCtrl->mRecordingMemory.main_ion_fd[cnt];
+
+ if (NO_ERROR !=
+ mHalCamCtrl->sendMappingBuf(MSM_V4L2_EXT_CAPTURE_MODE_VIDEO, cnt,
+ recordframes[cnt].fd, mHalCamCtrl->mRecordingMemory.size, mCameraId,
+ CAM_SOCK_MSG_TYPE_FD_MAPPING))
+ ALOGE("%s: sending mapping data Msg Failed", __func__);
+
+ ALOGV("initRecord : record heap , video buffers buffer=%lu fd=%d y_off=%d cbcr_off=%d\n",
+ (unsigned long)recordframes[cnt].buffer, recordframes[cnt].fd, recordframes[cnt].y_off,
+ recordframes[cnt].cbcr_off);
+ //mNumRecordFrames++;
+
+ mRecordBuf.video.video.buf.mp[cnt].frame = recordframes[cnt];
+ mRecordBuf.video.video.buf.mp[cnt].frame_offset = 0;
+ mRecordBuf.video.video.buf.mp[cnt].num_planes = num_planes;
+ /* Plane 0 needs to be set seperately. Set other planes
+ * in a loop. */
+ mRecordBuf.video.video.buf.mp[cnt].planes[0].reserved[0] =
+ mRecordBuf.video.video.buf.mp[cnt].frame_offset;
+ mRecordBuf.video.video.buf.mp[cnt].planes[0].length = planes[0];
+ mRecordBuf.video.video.buf.mp[cnt].planes[0].m.userptr =
+ recordframes[cnt].fd;
+ for (int j = 1; j < num_planes; j++) {
+ mRecordBuf.video.video.buf.mp[cnt].planes[j].length = planes[j];
+ mRecordBuf.video.video.buf.mp[cnt].planes[j].m.userptr =
+ recordframes[cnt].fd;
+ mRecordBuf.video.video.buf.mp[cnt].planes[j].reserved[0] =
+ mRecordBuf.video.video.buf.mp[cnt].planes[j-1].reserved[0] +
+ mRecordBuf.video.video.buf.mp[cnt].planes[j-1].length;
+ }
+ }
+
+ //memset(&mRecordBuf, 0, sizeof(mRecordBuf));
+ mRecordBuf.ch_type = MM_CAMERA_CH_VIDEO;
+ mRecordBuf.video.video.num = mHalCamCtrl->mRecordingMemory.buffer_count;//kRecordBufferCount;
+ //mRecordBuf.video.video.frame_offset = &record_offset[0];
+ //mRecordBuf.video.video.frame = &recordframes[0];
+ ALOGV("%s : END",__func__);
+ return NO_ERROR;
+}
+
+void QCameraStream_record::releaseRecordingFrame(const void *opaque)
+{
+ ALOGV("%s : BEGIN, opaque = 0x%p",__func__, opaque);
+ if(!mActive)
+ {
+ ALOGE("%s : Recording already stopped!!! Leak???",__func__);
+ return;
+ }
+ for(int cnt = 0; cnt < mHalCamCtrl->mRecordingMemory.buffer_count; cnt++) {
+ if (mHalCamCtrl->mStoreMetaDataInFrame) {
+ if(mHalCamCtrl->mRecordingMemory.metadata_memory[cnt] &&
+ mHalCamCtrl->mRecordingMemory.metadata_memory[cnt]->data == opaque) {
+ /* found the match */
+ if(MM_CAMERA_OK != cam_evt_buf_done(mCameraId, &mRecordedFrames[cnt]))
+ ALOGE("%s : Buf Done Failed",__func__);
+ ALOGV("%s : END",__func__);
+ return;
+ }
+ } else {
+ if(mHalCamCtrl->mRecordingMemory.camera_memory[cnt] &&
+ mHalCamCtrl->mRecordingMemory.camera_memory[cnt]->data == opaque) {
+ /* found the match */
+ if(MM_CAMERA_OK != cam_evt_buf_done(mCameraId, &mRecordedFrames[cnt]))
+ ALOGE("%s : Buf Done Failed",__func__);
+ ALOGV("%s : END",__func__);
+ return;
+ }
+ }
+ }
+ ALOGE("%s: cannot find the matched frame with opaue = 0x%p", __func__, opaque);
+}
+
+void QCameraStream_record::debugShowVideoFPS() const
+{
+ static int mFrameCount;
+ static int mLastFrameCount = 0;
+ static nsecs_t mLastFpsTime = 0;
+ static float mFps = 0;
+ mFrameCount++;
+ nsecs_t now = systemTime();
+ nsecs_t diff = now - mLastFpsTime;
+ if (diff > ms2ns(250)) {
+ mFps = ((mFrameCount - mLastFrameCount) * float(s2ns(1))) / diff;
+ ALOGV("Video Frames Per Second: %.4f", mFps);
+ mLastFpsTime = now;
+ mLastFrameCount = mFrameCount;
+ }
+}
+
+#if 0
+sp<IMemoryHeap> QCameraStream_record::getHeap() const
+{
+ return mRecordHeap != NULL ? mRecordHeap->mHeap : NULL;
+}
+
+#endif
+status_t QCameraStream_record::takeLiveSnapshot(){
+ return true;
+}
+
+}//namespace android
+
diff --git a/camera/QCameraHWI_Record_7x27A.cpp b/camera/QCameraHWI_Record_7x27A.cpp
new file mode 100644
index 0000000..bba8d8d
--- /dev/null
+++ b/camera/QCameraHWI_Record_7x27A.cpp
@@ -0,0 +1,254 @@
+/*
+** Copyright (c) 2012 The Linux Foundation. All rights reserved.
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+** http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+/*#error uncomment this for compiler test!*/
+
+#define ALOG_NDEBUG 0
+#define ALOG_NIDEBUG 0
+#define LOG_TAG "QCameraHWI_Record"
+#include <utils/Log.h>
+#include <utils/threads.h>
+#include <cutils/properties.h>
+#include <fcntl.h>
+#include <sys/mman.h>
+
+#include "QCameraStream.h"
+
+
+#define LIKELY(exp) __builtin_expect(!!(exp), 1)
+#define UNLIKELY(exp) __builtin_expect(!!(exp), 0)
+
+/* QCameraStream_record class implementation goes here*/
+/* following code implement the video streaming capture & encoding logic of this class*/
+// ---------------------------------------------------------------------------
+// QCameraStream_record createInstance()
+// ---------------------------------------------------------------------------
+namespace android {
+
+
+QCameraStream* QCameraStream_record::createInstance(int cameraId,
+ camera_mode_t mode)
+{
+ ALOGV("%s: BEGIN", __func__);
+ QCameraStream* pme = new QCameraStream_record(cameraId, mode);
+ ALOGV("%s: END", __func__);
+ return pme;
+}
+
+// ---------------------------------------------------------------------------
+// QCameraStream_record deleteInstance()
+// ---------------------------------------------------------------------------
+void QCameraStream_record::deleteInstance(QCameraStream *ptr)
+{
+ ALOGV("%s: BEGIN", __func__);
+ if (ptr){
+ ptr->release();
+ delete ptr;
+ ptr = NULL;
+ }
+ ALOGV("%s: END", __func__);
+}
+
+// ---------------------------------------------------------------------------
+// QCameraStream_record Constructor
+// ---------------------------------------------------------------------------
+QCameraStream_record::QCameraStream_record(int cameraId,
+ camera_mode_t mode)
+ :QCameraStream(cameraId,mode),
+ mDebugFps(false)
+{
+ mHalCamCtrl = NULL;
+ char value[PROPERTY_VALUE_MAX];
+ ALOGV("%s: BEGIN", __func__);
+
+ property_get("persist.debug.sf.showfps", value, "0");
+ mDebugFps = atoi(value);
+
+ ALOGV("%s: END", __func__);
+}
+
+// ---------------------------------------------------------------------------
+// QCameraStream_record Destructor
+// ---------------------------------------------------------------------------
+QCameraStream_record::~QCameraStream_record() {
+ ALOGV("%s: BEGIN", __func__);
+ if(mActive) {
+ stop();
+ }
+ if(mInit) {
+ release();
+ }
+ mInit = false;
+ mActive = false;
+ ALOGV("%s: END", __func__);
+
+}
+
+// ---------------------------------------------------------------------------
+// QCameraStream_record Callback from mm_camera
+// ---------------------------------------------------------------------------
+static void record_notify_cb(mm_camera_ch_data_buf_t *bufs_new,
+ void *user_data)
+{
+ QCameraStream_record *pme = (QCameraStream_record *)user_data;
+ mm_camera_ch_data_buf_t *bufs_used = 0;
+ ALOGV("%s: BEGIN", __func__);
+
+ /*
+ * Call Function Process Video Data
+ */
+ pme->processRecordFrame(bufs_new);
+ ALOGV("%s: END", __func__);
+}
+
+// ---------------------------------------------------------------------------
+// QCameraStream_record
+// ---------------------------------------------------------------------------
+status_t QCameraStream_record::init()
+{
+ status_t ret = NO_ERROR;
+ ALOGV("%s: BEGIN", __func__);
+ mInit = true;
+ ALOGV("%s: END", __func__);
+ return ret;
+}
+// ---------------------------------------------------------------------------
+// QCameraStream_record
+// ---------------------------------------------------------------------------
+
+status_t QCameraStream_record::start()
+{
+ status_t ret = NO_ERROR;
+ ALOGE("%s: BEGIN", __func__);
+
+ ret = initEncodeBuffers();
+ if (NO_ERROR!=ret) {
+ ALOGE("%s ERROR: Buffer Allocation Failed\n",__func__);
+ return ret;
+ }
+ Mutex::Autolock l(&mHalCamCtrl->mRecordLock);
+ mHalCamCtrl->mReleasedRecordingFrame = false;
+
+ mHalCamCtrl->mStartRecording = true;
+
+ ALOGV("%s: END", __func__);
+ return ret;
+}
+
+// ---------------------------------------------------------------------------
+// QCameraStream_record
+// ---------------------------------------------------------------------------
+void QCameraStream_record::stop()
+{
+ status_t ret = NO_ERROR;
+ ALOGE("%s: BEGIN", __func__);
+ mHalCamCtrl->mStartRecording = false;
+ Mutex::Autolock l(&mHalCamCtrl->mRecordLock);
+ {
+ mHalCamCtrl->mRecordFrameLock.lock();
+ mHalCamCtrl->mReleasedRecordingFrame = true;
+ mHalCamCtrl->mRecordWait.signal();
+ mHalCamCtrl-> mRecordFrameLock.unlock();
+ }
+
+ for(int cnt = 0; cnt < mHalCamCtrl->mPreviewMemory.buffer_count; cnt++) {
+ if (mHalCamCtrl->mStoreMetaDataInFrame) {
+ struct encoder_media_buffer_type * packet =
+ (struct encoder_media_buffer_type *)
+ mHalCamCtrl->mRecordingMemory.metadata_memory[cnt]->data;
+ native_handle_delete(const_cast<native_handle_t *>(packet->meta_handle));
+ mHalCamCtrl->mRecordingMemory.metadata_memory[cnt]->release(
+ mHalCamCtrl->mRecordingMemory.metadata_memory[cnt]);
+ }
+ }
+ ALOGV("%s: END", __func__);
+
+}
+// ---------------------------------------------------------------------------
+// QCameraStream_record
+// ---------------------------------------------------------------------------
+void QCameraStream_record::release()
+{
+ status_t ret = NO_ERROR;
+ ALOGV("%s: BEGIN", __func__);
+ ALOGV("%s: END", __func__);
+}
+
+status_t QCameraStream_record::processRecordFrame(void *data)
+{
+ ALOGE("%s : BEGIN",__func__);
+ ALOGE("%s : END",__func__);
+ return NO_ERROR;
+}
+
+//Record Related Functions
+status_t QCameraStream_record::initEncodeBuffers()
+{
+ ALOGE("%s : BEGIN",__func__);
+ status_t ret = NO_ERROR;
+ for (int cnt = 0; cnt < mHalCamCtrl->mPreviewMemory.buffer_count; cnt++) {
+ if (mHalCamCtrl->mStoreMetaDataInFrame) {
+ mHalCamCtrl->mRecordingMemory.metadata_memory[cnt] =
+ mHalCamCtrl->mGetMemory(-1,
+ sizeof(struct encoder_media_buffer_type), 1, (void *)this);
+ struct encoder_media_buffer_type * packet =
+ (struct encoder_media_buffer_type *)
+ mHalCamCtrl->mRecordingMemory.metadata_memory[cnt]->data;
+ packet->meta_handle = native_handle_create(1, 3); //1 fd, 1 offset,1 size and 1 data
+ packet->buffer_type = kMetadataBufferTypeCameraSource;
+ native_handle_t * nh = const_cast<native_handle_t *>(packet->meta_handle);
+ nh->data[0] = mHalCamCtrl->mPreviewMemory.private_buffer_handle[cnt]->fd;
+ nh->data[1] = 0;
+ nh->data[2] = mHalCamCtrl->mPreviewMemory.private_buffer_handle[cnt]->size;
+ nh->data[3] = (uint32_t)mHalCamCtrl->mPreviewMemory.camera_memory[cnt]->data;
+ }
+ }
+ ALOGE("%s : END",__func__);
+ return NO_ERROR;
+}
+
+void QCameraStream_record::releaseEncodeBuffer() {
+ for(int cnt = 0; cnt < mHalCamCtrl->mPreviewMemory.buffer_count; cnt++) {
+ if (mHalCamCtrl->mStoreMetaDataInFrame) {
+ struct encoder_media_buffer_type * packet =
+ (struct encoder_media_buffer_type *)
+ mHalCamCtrl->mRecordingMemory.metadata_memory[cnt]->data;
+ native_handle_delete(const_cast<native_handle_t *>(packet->meta_handle));
+ mHalCamCtrl->mRecordingMemory.metadata_memory[cnt]->release(
+ mHalCamCtrl->mRecordingMemory.metadata_memory[cnt]);
+
+ }
+ }
+}
+
+void QCameraStream_record::releaseRecordingFrame(const void *opaque)
+{
+ Mutex::Autolock rLock(&mHalCamCtrl->mRecordFrameLock);
+ mHalCamCtrl->mReleasedRecordingFrame = true;
+ mHalCamCtrl->mRecordWait.signal();
+ ALOGE("%s, Signaling from-",__func__);
+}
+
+void QCameraStream_record::debugShowVideoFPS() const
+{
+
+}
+
+status_t QCameraStream_record::takeLiveSnapshot(){
+ return true;
+}
+
+}//namespace android
diff --git a/camera/QCameraHWI_Still.cpp b/camera/QCameraHWI_Still.cpp
new file mode 100755
index 0000000..1da8be5
--- /dev/null
+++ b/camera/QCameraHWI_Still.cpp
@@ -0,0 +1,2877 @@
+/*
+** Copyright (c) 2011-2012 The Linux Foundation. All rights reserved.
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+** http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+/*#error uncomment this for compiler test!*/
+
+#define ALOG_NDEBUG 0
+#define ALOG_NDDEBUG 0
+#define ALOG_NIDEBUG 0
+#define LOG_TAG "QCameraHWI_Still"
+#include <utils/Log.h>
+#include <utils/threads.h>
+#include <fcntl.h>
+#include <sys/mman.h>
+#include <media/mediarecorder.h>
+#include <math.h>
+#include "QCameraHAL.h"
+#include "QCameraHWI.h"
+
+#define THUMBNAIL_DEFAULT_WIDTH 512
+#define THUMBNAIL_DEFAULT_HEIGHT 384
+/* following code implement the still image capture & encoding logic of this class*/
+namespace android {
+
+typedef enum {
+ SNAPSHOT_STATE_ERROR,
+ SNAPSHOT_STATE_UNINIT,
+ SNAPSHOT_STATE_CH_ACQUIRED,
+ SNAPSHOT_STATE_BUF_NOTIF_REGD,
+ SNAPSHOT_STATE_BUF_INITIALIZED,
+ SNAPSHOT_STATE_INITIALIZED,
+ SNAPSHOT_STATE_IMAGE_CAPTURE_STRTD,
+ SNAPSHOT_STATE_YUV_RECVD,
+ SNAPSHOT_STATE_JPEG_ENCODING,
+ SNAPSHOT_STATE_JPEG_ENCODE_DONE,
+ SNAPSHOT_STATE_JPEG_COMPLETE_ENCODE_DONE,
+
+ /*Add any new state above*/
+ SNAPSHOT_STATE_MAX
+} snapshot_state_type_t;
+
+
+//-----------------------------------------------------------------------
+// Constants
+//----------------------------------------------------------------------
+static const int PICTURE_FORMAT_JPEG = 1;
+static const int PICTURE_FORMAT_RAW = 2;
+static const int POSTVIEW_SMALL_HEIGHT = 144;
+
+// ---------------------------------------------------------------------------
+/* static functions*/
+// ---------------------------------------------------------------------------
+
+
+
+/* TBD: Temp: to be removed*/
+static pthread_mutex_t g_s_mutex;
+static int g_status = 0;
+static pthread_cond_t g_s_cond_v;
+
+static void mm_app_snapshot_done()
+{
+ pthread_mutex_lock(&g_s_mutex);
+ g_status = true;
+ pthread_cond_signal(&g_s_cond_v);
+ pthread_mutex_unlock(&g_s_mutex);
+}
+
+static void mm_app_snapshot_wait()
+{
+ pthread_mutex_lock(&g_s_mutex);
+ if(false == g_status) pthread_cond_wait(&g_s_cond_v, &g_s_mutex);
+ pthread_mutex_unlock(&g_s_mutex);
+ g_status = false;
+}
+
+static int mm_app_dump_snapshot_frame(char *filename,
+ const void *buffer,
+ uint32_t len)
+{
+ char bufp[128];
+ int file_fdp;
+ int rc = 0;
+
+ file_fdp = open(filename, O_RDWR | O_CREAT, 0777);
+
+ if (file_fdp < 0) {
+ rc = -1;
+ goto end;
+ }
+ write(file_fdp,
+ (const void *)buffer, len);
+ close(file_fdp);
+end:
+ return rc;
+}
+
+/* Callback received when a frame is available after snapshot*/
+static void snapshot_notify_cb(mm_camera_ch_data_buf_t *recvd_frame,
+ void *user_data)
+{
+ QCameraStream_Snapshot *pme = (QCameraStream_Snapshot *)user_data;
+
+ ALOGV("%s: E", __func__);
+
+ if (pme != NULL) {
+ pme->receiveRawPicture(recvd_frame);
+ }
+ else{
+ ALOGW("%s: Snapshot obj NULL in callback", __func__);
+ }
+
+ ALOGV("%s: X", __func__);
+
+}
+
+/* Once we give frame for encoding, we get encoded jpeg image
+ fragments by fragment. We'll need to store them in a buffer
+ to form complete JPEG image */
+static void snapshot_jpeg_fragment_cb(uint8_t *ptr,
+ uint32_t size,
+ void *user_data)
+{
+ QCameraStream_Snapshot *pme = (QCameraStream_Snapshot *)user_data;
+
+ ALOGV("%s: E",__func__);
+ if (pme != NULL) {
+ pme->receiveJpegFragment(ptr,size);
+ }
+ else
+ ALOGW("%s: Receive jpeg fragment cb obj Null", __func__);
+
+ ALOGV("%s: X",__func__);
+}
+
+/* This callback is received once the complete JPEG encoding is done */
+static void snapshot_jpeg_cb(jpeg_event_t event, void *user_data)
+{
+ QCameraStream_Snapshot *pme = (QCameraStream_Snapshot *)user_data;
+ ALOGV("%s: E ",__func__);
+
+ switch(event) {
+ case JPEG_EVENT_DONE:
+ break;
+ case JPEG_EVENT_THUMBNAIL_DROPPED:
+ ALOGE("%s: Error in thumbnail encoding (event: %d) : X !!!",
+ __func__, event);
+ return;
+ case JPEG_EVENT_ERROR:
+ case JPEG_EVENT_ABORTED:
+ if (NULL != pme) {
+ pme->jpegErrorHandler(event);
+ if (!(pme->isZSLMode())) {
+ pme->stop();
+ }
+ }
+ ALOGE("Error event handled from JPEG \n");
+ return;
+ default:
+ ALOGE("Unsupported JPEG event %d \n", event);
+ break;
+ }
+
+ if (event != JPEG_EVENT_DONE) {
+ if (event == JPEG_EVENT_THUMBNAIL_DROPPED) {
+ ALOGE("%s: Error in thumbnail encoding (event: %d)!!!",
+ __func__, event);
+ ALOGV("%s: X",__func__);
+ return;
+ }
+ else {
+ ALOGE("%s: Error (event: %d) while jpeg encoding!!!",
+ __func__, event);
+ }
+ }
+
+ if (pme != NULL) {
+ pme->receiveCompleteJpegPicture(event);
+ ALOGV(" Completed issuing JPEG callback");
+ /* deinit only if we are done taking requested number of snapshots */
+ if (pme->getSnapshotState() == SNAPSHOT_STATE_JPEG_COMPLETE_ENCODE_DONE) {
+ ALOGV(" About to issue deinit callback");
+ /* If it's ZSL Mode, we don't deinit now. We'll stop the polling thread and
+ deinit the channel/buffers only when we change the mode from zsl to
+ non-zsl. */
+ if (!(pme->isZSLMode())) {
+ pme->stop();
+ }
+ }
+ }
+ else
+ ALOGW("%s: Receive jpeg cb Obj Null", __func__);
+
+
+ ALOGV("%s: X",__func__);
+}
+
+// ---------------------------------------------------------------------------
+/* private functions*/
+// ---------------------------------------------------------------------------
+
+void QCameraStream_Snapshot::
+receiveJpegFragment(uint8_t *ptr, uint32_t size)
+{
+ ALOGV("%s: E", __func__);
+#if 0
+ if (mJpegHeap != NULL) {
+ ALOGE("%s: Copy jpeg...", __func__);
+ memcpy((uint8_t *)mJpegHeap->mHeap->base()+ mJpegOffset, ptr, size);
+ mJpegOffset += size;
+ }
+ else {
+ ALOGE("%s: mJpegHeap is NULL!", __func__);
+ }
+ #else
+ if(mHalCamCtrl->mJpegMemory.camera_memory[0] != NULL && ptr != NULL && size > 0) {
+ memcpy((uint8_t *)((uint32_t)mHalCamCtrl->mJpegMemory.camera_memory[0]->data + mJpegOffset), ptr, size);
+ mJpegOffset += size;
+
+
+ /*
+ memcpy((uint8_t *)((uint32_t)mHalCamCtrl->mJpegMemory.camera_memory[0]->data + mJpegOffset), ptr, size);
+ mJpegOffset += size;
+ */
+ } else {
+ ALOGE("%s: mJpegHeap is NULL!", __func__);
+ }
+
+
+ #endif
+
+ ALOGV("%s: X", __func__);
+}
+
+void QCameraStream_Snapshot::jpegErrorHandler(jpeg_event_t event)
+{
+ ALOGV("%s: E", __func__);
+ camera_memory_t *data = mHalCamCtrl->mGetMemory(-1, 1, 1, NULL);
+ mStopCallbackLock.lock( );
+ if(mCurrentFrameEncoded) {
+ free(mCurrentFrameEncoded);
+ mCurrentFrameEncoded = NULL;
+ }
+ setSnapshotState(SNAPSHOT_STATE_ERROR);
+ if (!mSnapshotQueue.isEmpty()) {
+ ALOGV("%s: JPEG Queue not empty. flush the queue in "
+ "error case.", __func__);
+ mSnapshotQueue.flush();
+ }
+ mStopCallbackLock.unlock( );
+ if (NULL != mHalCamCtrl->mDataCb)
+ mHalCamCtrl->mDataCb (CAMERA_MSG_COMPRESSED_IMAGE,
+ data, 0, NULL,mHalCamCtrl->mCallbackCookie);
+ if(NULL != data) data->release(data);
+ ALOGV("%s: X", __func__);
+}
+
+void QCameraStream_Snapshot::
+receiveCompleteJpegPicture(jpeg_event_t event)
+{
+ int msg_type = CAMERA_MSG_COMPRESSED_IMAGE;
+ ALOGV("%s: E", __func__);
+ camera_memory_t *encodedMem = NULL;
+ camera_data_callback jpg_data_cb = NULL;
+ bool fail_cb_flag = false;
+
+ if (mHalCamCtrl->mHdrMode == HDR_MODE)
+ hdrJpegCount++;
+
+ mStopCallbackLock.lock( );
+ if(!mActive && !isLiveSnapshot()) {
+ ALOGE("%s : Cancel Picture",__func__);
+ fail_cb_flag = true;
+ goto end;
+ }
+
+ if(mCurrentFrameEncoded!=NULL /*&& !isLiveSnapshot()*/){
+ ALOGV("<DEBUG>: Calling buf done for snapshot buffer");
+ cam_evt_buf_done(mCameraId, mCurrentFrameEncoded);
+ }
+ mHalCamCtrl->dumpFrameToFile(mHalCamCtrl->mJpegMemory.camera_memory[0]->data, mJpegOffset, (char *)"debug", (char *)"jpg", 0);
+
+end:
+ msg_type = CAMERA_MSG_COMPRESSED_IMAGE;
+ if (mHalCamCtrl->mDataCb && (mHalCamCtrl->mMsgEnabled & msg_type)) {
+ jpg_data_cb = mHalCamCtrl->mDataCb;
+ }else{
+ ALOGE("%s: JPEG callback was cancelled--not delivering image.", __func__);
+ }
+ setSnapshotState(SNAPSHOT_STATE_JPEG_ENCODE_DONE);
+ mNumOfRecievedJPEG++;
+ mHalCamCtrl->deinitExifData();
+
+ /* free the resource we allocated to maintain the structure */
+ //mm_camera_do_munmap(main_fd, (void *)main_buffer_addr, mSnapshotStreamBuf.frame_len);
+ if(mCurrentFrameEncoded) {
+ free(mCurrentFrameEncoded);
+ mCurrentFrameEncoded = NULL;
+ }
+ /* Before leaving check the jpeg queue. If it's not empty give the available
+ frame for encoding*/
+ if (!mSnapshotQueue.isEmpty()) {
+ ALOGV("%s: JPEG Queue not empty. Dequeue and encode.", __func__);
+ mm_camera_ch_data_buf_t* buf =
+ (mm_camera_ch_data_buf_t *)mSnapshotQueue.dequeue();
+ //encodeDisplayAndSave(buf, 1);
+ if ( NO_ERROR != encodeDisplayAndSave(buf, 1)){
+ fail_cb_flag = true;
+ }
+ } else if (mNumOfSnapshot == mNumOfRecievedJPEG ) { /* finished */
+ ALOGV("nusnap %d, mNumjpeg %d", mNumOfSnapshot, mNumOfRecievedJPEG);
+ ALOGV("%s: Before omxJpegFinish", __func__);
+ omxJpegFinish();
+ ALOGV("%s: After omxJpegFinish", __func__);
+ /* getRemainingSnapshots call will give us number of snapshots still
+ remaining after flushing current zsl buffer once*/
+ ALOGV("%s: Complete JPEG Encoding Done!", __func__);
+ setSnapshotState(SNAPSHOT_STATE_JPEG_COMPLETE_ENCODE_DONE);
+ mBurstModeFlag = false;
+ mSnapshotQueue.flush();
+ mNumOfRecievedJPEG = 0;
+ /* in case of zsl, we need to reset some of the zsl attributes */
+ if (isZSLMode()){
+ ALOGV("%s: Resetting the ZSL attributes", __func__);
+ setZSLChannelAttribute();
+ }
+ if (!isZSLMode() && !isLiveSnapshot()){
+ //Stop polling before calling datacb for if not ZSL mode
+ stopPolling();
+ }
+
+ } else {
+ ALOGV("%s: mNumOfRecievedJPEG(%d), mNumOfSnapshot(%d)", __func__, mNumOfRecievedJPEG, mNumOfSnapshot);
+ }
+ if(fail_cb_flag && mHalCamCtrl->mDataCb &&
+ (mHalCamCtrl->mMsgEnabled & CAMERA_MSG_COMPRESSED_IMAGE)) {
+ /* get picture failed. Give jpeg callback with NULL data
+ * to the application to restore to preview mode
+ */
+ jpg_data_cb = mHalCamCtrl->mDataCb;
+ }
+ if(mHalCamCtrl->mHdrMode == HDR_MODE && (hdrJpegCount%2) != 0){
+ mStopCallbackLock.unlock( );
+ mJpegOffset = 0;
+ return;
+ }
+ if(!fail_cb_flag) {
+ camera_memory_t *encodedMem = mHalCamCtrl->mGetMemory(
+ mHalCamCtrl->mJpegMemory.fd[0], mJpegOffset, 1, mHalCamCtrl);
+ if (!encodedMem || !encodedMem->data) {
+ ALOGE("%s: mGetMemory failed.\n", __func__);
+ }
+ memcpy(encodedMem->data, mHalCamCtrl->mJpegMemory.camera_memory[0]->data, mJpegOffset );
+ mStopCallbackLock.unlock( );
+
+ if ((mActive || isLiveSnapshot()) && jpg_data_cb != NULL) {
+ ALOGV("%s: Calling upperlayer callback to store JPEG image", __func__);
+ jpg_data_cb (msg_type,encodedMem, 0, NULL,mHalCamCtrl->mCallbackCookie);
+ }
+ encodedMem->release( encodedMem );
+ jpg_data_cb = NULL;
+ }else{
+ ALOGV("Image Encoding Failed... Notify Upper layer");
+ mStopCallbackLock.unlock( );
+ if((mActive || isLiveSnapshot()) && jpg_data_cb != NULL) {
+ jpg_data_cb (CAMERA_MSG_COMPRESSED_IMAGE,NULL, 0, NULL,
+ mHalCamCtrl->mCallbackCookie);
+ }
+ }
+ //reset jpeg_offset
+ mJpegOffset = 0;
+
+ if(isLiveSnapshot() && mHalCamCtrl->mStateLiveshot) {
+ deInitBuffer();
+ }
+ mHalCamCtrl->mStateLiveshot = false;
+
+ ALOGV("%s: X", __func__);
+}
+
+status_t QCameraStream_Snapshot::
+configSnapshotDimension(cam_ctrl_dimension_t* dim)
+{
+ bool matching = true;
+ cam_format_t img_format;
+ status_t ret = NO_ERROR;
+ ALOGV("%s: E", __func__);
+
+ ALOGV("%s:Passed picture size: %d X %d", __func__,
+ dim->picture_width, dim->picture_height);
+ ALOGV("%s:Passed postview size: %d X %d", __func__,
+ dim->ui_thumbnail_width, dim->ui_thumbnail_height);
+
+ /* First check if the picture resolution is the same, if not, change it*/
+ mHalCamCtrl->getPictureSize(&mPictureWidth, &mPictureHeight);
+ ALOGV("%s: Picture size received: %d x %d", __func__,
+ mPictureWidth, mPictureHeight);
+
+ mPostviewWidth = mHalCamCtrl->mParameters.getInt(QCameraParameters::KEY_JPEG_THUMBNAIL_WIDTH);
+ mPostviewHeight = mHalCamCtrl->mParameters.getInt(QCameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT);
+ /*If application requested thumbnail size to be (0,0)
+ then configure second outout to a default size.
+ Jpeg encoder will drop thumbnail as reflected in encodeParams.
+ */
+ mDropThumbnail = false;
+ if (mPostviewWidth == 0 && mPostviewHeight == 0) {
+ mPostviewWidth = THUMBNAIL_DEFAULT_WIDTH;
+ mPostviewHeight = THUMBNAIL_DEFAULT_HEIGHT;
+ mDropThumbnail = true;
+ }
+
+ ALOGV("%s: Postview size received: %d x %d", __func__,
+ mPostviewWidth, mPostviewHeight);
+
+ matching = (mPictureWidth == dim->picture_width) &&
+ (mPictureHeight == dim->picture_height);
+ matching &= (dim->ui_thumbnail_width == mPostviewWidth) &&
+ (dim->ui_thumbnail_height == mPostviewHeight);
+
+ /* picture size currently set do not match with the one wanted
+ by user.*/
+ if (!matching) {
+ if (!isZSLMode() && (mPictureWidth < mPostviewWidth || mPictureHeight < mPostviewHeight)) {
+ //Changes to Handle VFE limitation.
+ mActualPictureWidth = mPictureWidth;
+ mActualPictureHeight = mPictureHeight;
+ mPictureWidth = mPostviewWidth;
+ mPictureHeight = mPostviewHeight;
+ mJpegDownscaling = true;
+ }else{
+ mJpegDownscaling = false;
+ }
+ dim->picture_width = mPictureWidth;
+ dim->picture_height = mPictureHeight;
+ dim->ui_thumbnail_height = mThumbnailHeight = mPostviewHeight;
+ dim->ui_thumbnail_width = mThumbnailWidth = mPostviewWidth;
+ }
+ #if 0
+ img_format = mHalCamCtrl->getPreviewFormat();
+ if (img_format) {
+ matching &= (img_format == dim->main_img_format);
+ if (!matching) {
+ dim->main_img_format = img_format;
+ dim->thumb_format = img_format;
+ }
+ }
+ #endif
+ if (!matching) {
+ ALOGV("%s: Image Sizes before set parm call: main: %dx%d thumbnail: %dx%d",
+ __func__,
+ dim->picture_width, dim->picture_height,
+ dim->ui_thumbnail_width, dim->ui_thumbnail_height);
+
+ ret = cam_config_set_parm(mCameraId, MM_CAMERA_PARM_DIMENSION,dim);
+ if (NO_ERROR != ret) {
+ ALOGE("%s: error - can't config snapshot parms!", __func__);
+ ret = FAILED_TRANSACTION;
+ goto end;
+ }
+ }
+ /* set_parm will return corrected dimension based on aspect ratio and
+ ceiling size */
+ mPictureWidth = dim->picture_width;
+ mPictureHeight = dim->picture_height;
+ mPostviewHeight = mThumbnailHeight = dim->ui_thumbnail_height;
+ mPostviewWidth = mThumbnailWidth = dim->ui_thumbnail_width;
+ mPictureFormat= dim->main_img_format;
+ mThumbnailFormat = dim->thumb_format;
+
+ ALOGV("%s: Image Format: %d", __func__, dim->main_img_format);
+ ALOGV("%s: Image Sizes: main: %dx%d thumbnail: %dx%d", __func__,
+ dim->picture_width, dim->picture_height,
+ dim->ui_thumbnail_width, dim->ui_thumbnail_height);
+end:
+ ALOGV("%s: X", __func__);
+ return ret;
+}
+
+status_t QCameraStream_Snapshot::
+initRawSnapshotChannel(cam_ctrl_dimension_t *dim,
+ int num_of_snapshots)
+{
+ status_t ret = NO_ERROR;
+ mm_camera_ch_image_fmt_parm_t fmt;
+ mm_camera_channel_attr_t ch_attr;
+ cam_format_t raw_fmt;
+
+ mm_camera_raw_streaming_type_t raw_stream_type =
+ MM_CAMERA_RAW_STREAMING_CAPTURE_SINGLE;
+
+ ALOGV("%s: E", __func__);
+
+ /* Initialize stream - set format, acquire channel */
+ /*TBD: Currently we only support single raw capture*/
+ ALOGV("num_of_snapshots = %d",num_of_snapshots);
+ if (num_of_snapshots == 1) {
+ raw_stream_type = MM_CAMERA_RAW_STREAMING_CAPTURE_SINGLE;
+ }
+
+ /* Set channel attribute */
+ ALOGV("%s: Set Raw Snapshot Channel attribute", __func__);
+ memset(&ch_attr, 0, sizeof(ch_attr));
+ ch_attr.type = MM_CAMERA_CH_ATTR_RAW_STREAMING_TYPE;
+ ch_attr.raw_streaming_mode = raw_stream_type;
+
+ if( NO_ERROR !=
+ cam_ops_ch_set_attr(mCameraId, MM_CAMERA_CH_RAW, &ch_attr)) {
+ ALOGV("%s: Failure setting Raw channel attribute.", __func__);
+ ret = FAILED_TRANSACTION;
+ goto end;
+ }
+
+ ret = cam_config_get_parm(mCameraId,
+ MM_CAMERA_PARM_RAW_SNAPSHOT_FMT, &raw_fmt);
+ if (NO_ERROR != ret) {
+ ALOGE("%s: error - can't get raw snapshot fmt!", __func__);
+ ret = FAILED_TRANSACTION;
+ goto end;
+ }
+
+ memset(&fmt, 0, sizeof(mm_camera_ch_image_fmt_parm_t));
+ fmt.ch_type = MM_CAMERA_CH_RAW;
+ fmt.def.fmt = raw_fmt;
+ fmt.def.dim.width = dim->raw_picture_width;
+ fmt.def.dim.height = dim->raw_picture_height;
+
+
+ ALOGV("%s: Raw snapshot channel fmt: %d", __func__,
+ fmt.def.fmt);
+ ALOGV("%s: Raw snapshot resolution: %dX%d", __func__,
+ dim->raw_picture_width, dim->raw_picture_height);
+
+ ALOGV("%s: Set Raw Snapshot channel image format", __func__);
+ ret = cam_config_set_parm(mCameraId, MM_CAMERA_PARM_CH_IMAGE_FMT, &fmt);
+ if (NO_ERROR != ret) {
+ ALOGE("%s: Set Raw Snapshot Channel format err=%d\n", __func__, ret);
+ ret = FAILED_TRANSACTION;
+ goto end;
+ }
+
+end:
+ if (ret != NO_ERROR) {
+ handleError();
+ }
+ ALOGV("%s: X", __func__);
+ return ret;
+
+}
+
+status_t QCameraStream_Snapshot::
+setZSLChannelAttribute(void)
+{
+ status_t ret = NO_ERROR;
+ mm_camera_channel_attr_t ch_attr;
+ ALOGV("%s: E", __func__);
+
+ memset(&ch_attr, 0, sizeof(mm_camera_channel_attr_t));
+ ch_attr.type = MM_CAMERA_CH_ATTR_BUFFERING_FRAME;
+ ch_attr.buffering_frame.look_back = mHalCamCtrl->getZSLBackLookCount();
+ ch_attr.buffering_frame.water_mark = mHalCamCtrl->getZSLQueueDepth();
+ ch_attr.buffering_frame.interval = mHalCamCtrl->getZSLBurstInterval( );
+ ALOGV("%s: ZSL queue_depth = %d, back_look_count = %d", __func__,
+ ch_attr.buffering_frame.water_mark,
+ ch_attr.buffering_frame.look_back);
+ if( NO_ERROR !=
+ cam_ops_ch_set_attr(mCameraId, MM_CAMERA_CH_SNAPSHOT, &ch_attr)) {
+ ALOGV("%s: Failure setting ZSL channel attribute.", __func__);
+ ret = FAILED_TRANSACTION;
+ goto end;
+ }
+end:
+ ALOGV("%s: X", __func__);
+ return ret;
+}
+
+status_t QCameraStream_Snapshot::
+initSnapshotFormat(cam_ctrl_dimension_t *dim)
+{
+ status_t ret = NO_ERROR;
+ mm_camera_ch_image_fmt_parm_t fmt;
+
+ ALOGV("%s: E", __func__);
+
+ /* For ZSL mode we'll need to set channel attribute */
+ if (isZSLMode()) {
+ ret = setZSLChannelAttribute();
+ if (ret != NO_ERROR) {
+ goto end;
+ }
+ }
+
+ memset(&fmt, 0, sizeof(mm_camera_ch_image_fmt_parm_t));
+ fmt.ch_type = MM_CAMERA_CH_SNAPSHOT;
+ fmt.snapshot.main.fmt = dim->main_img_format;
+ fmt.snapshot.main.dim.width = dim->picture_width;
+ fmt.snapshot.main.dim.height = dim->picture_height;
+
+ fmt.snapshot.thumbnail.fmt = dim->thumb_format;
+ fmt.snapshot.thumbnail.dim.width = dim->ui_thumbnail_width;
+ fmt.snapshot.thumbnail.dim.height = dim->ui_thumbnail_height;
+
+ ALOGV("%s: Snapshot channel fmt = main: %d thumbnail: %d", __func__,
+ dim->main_img_format, dim->thumb_format);
+ ALOGV("%s: Snapshot channel resolution = main: %dX%d thumbnail: %dX%d",
+ __func__, dim->picture_width, dim->picture_height,
+ dim->ui_thumbnail_width, dim->ui_thumbnail_height);
+
+ ALOGV("%s: Set Snapshot channel image format", __func__);
+ ret = cam_config_set_parm(mCameraId, MM_CAMERA_PARM_CH_IMAGE_FMT, &fmt);
+ if (NO_ERROR != ret) {
+ ALOGE("%s: Set Snapshot Channel format err=%d\n", __func__, ret);
+ ret = FAILED_TRANSACTION;
+ goto end;
+ }
+
+end:
+ if (ret != NO_ERROR) {
+ handleError();
+ }
+ ALOGV("%s: X", __func__);
+ return ret;
+
+}
+
+void QCameraStream_Snapshot::
+deinitSnapshotChannel(mm_camera_channel_type_t ch_type)
+{
+ ALOGV("%s: E", __func__);
+
+ /* unreg buf notify*/
+ if (getSnapshotState() >= SNAPSHOT_STATE_BUF_NOTIF_REGD){
+ if (NO_ERROR != cam_evt_register_buf_notify(mCameraId,
+ ch_type, NULL,(mm_camera_register_buf_cb_type_t)NULL,NULL, this)) {
+ ALOGE("%s: Failure to unregister buf notification", __func__);
+ }
+ }
+
+ if (getSnapshotState() >= SNAPSHOT_STATE_CH_ACQUIRED) {
+ ALOGV("%s: Release snapshot channel", __func__);
+ cam_ops_ch_release(mCameraId, ch_type);
+ }
+
+ ALOGV("%s: X",__func__);
+}
+
+status_t QCameraStream_Snapshot::
+initRawSnapshotBuffers(cam_ctrl_dimension_t *dim, int num_of_buf)
+{
+ status_t ret = NO_ERROR;
+ struct msm_frame *frame;
+ uint32_t frame_len;
+ uint8_t num_planes;
+ uint32_t planes[VIDEO_MAX_PLANES];
+ mm_camera_reg_buf_t reg_buf;
+ cam_format_t raw_fmt;
+
+ ALOGV("%s: E", __func__);
+ memset(®_buf, 0, sizeof(mm_camera_reg_buf_t));
+ memset(&mSnapshotStreamBuf, 0, sizeof(mSnapshotStreamBuf));
+
+ if ((num_of_buf == 0) || (num_of_buf > MM_CAMERA_MAX_NUM_FRAMES)) {
+ ALOGE("%s: Invalid number of buffers (=%d) requested!", __func__, num_of_buf);
+ ret = BAD_VALUE;
+ goto end;
+ }
+
+ reg_buf.def.buf.mp = new mm_camera_mp_buf_t[num_of_buf];
+ if (!reg_buf.def.buf.mp) {
+ ALOGE("%s Error allocating memory for mplanar struct ", __func__);
+ ret = NO_MEMORY;
+ goto end;
+ }
+ memset(reg_buf.def.buf.mp, 0, num_of_buf * sizeof(mm_camera_mp_buf_t));
+
+ ret = cam_config_get_parm(mCameraId,
+ MM_CAMERA_PARM_RAW_SNAPSHOT_FMT, &raw_fmt);
+ if (NO_ERROR != ret) {
+ ALOGE("%s: error - can't get raw snapshot fmt!", __func__);
+ ret = FAILED_TRANSACTION;
+ goto end;
+ }
+
+ /* Get a frame len for buffer to be allocated*/
+ frame_len = mm_camera_get_msm_frame_len(raw_fmt,
+ myMode,
+ dim->raw_picture_width,
+ dim->raw_picture_height,
+ OUTPUT_TYPE_S,
+ &num_planes, planes);
+
+ if (mHalCamCtrl->initHeapMem(&mHalCamCtrl->mRawMemory, num_of_buf,
+ frame_len, 0, planes[0], MSM_PMEM_RAW_MAINIMG,
+ &mSnapshotStreamBuf, ®_buf.def,
+ num_planes, planes) < 0) {
+ ret = NO_MEMORY;
+ goto end;
+ }
+
+ /* register the streaming buffers for the channel*/
+ reg_buf.ch_type = MM_CAMERA_CH_RAW;
+ reg_buf.def.num = mSnapshotStreamBuf.num;
+
+ ret = cam_config_prepare_buf(mCameraId, ®_buf);
+ if(ret != NO_ERROR) {
+ ALOGV("%s:reg snapshot buf err=%d\n", __func__, ret);
+ ret = FAILED_TRANSACTION;
+ mHalCamCtrl->releaseHeapMem(&mHalCamCtrl->mRawMemory);
+ goto end;
+ }
+
+ /* If we have reached here successfully, we have allocated buffer.
+ Set state machine.*/
+ setSnapshotState(SNAPSHOT_STATE_BUF_INITIALIZED);
+
+end:
+ /* If it's error, we'll need to do some needful */
+ if (ret != NO_ERROR) {
+ handleError();
+ }
+ if (reg_buf.def.buf.mp)
+ delete []reg_buf.def.buf.mp;
+ ALOGV("%s: X", __func__);
+ return ret;
+}
+
+status_t QCameraStream_Snapshot::deinitRawSnapshotBuffers(void)
+{
+ int ret = NO_ERROR;
+ ALOGV("%s: E", __func__);
+
+ int err = getSnapshotState();
+
+ /* deinit buffers only if we have already allocated */
+ if (err >= SNAPSHOT_STATE_BUF_INITIALIZED || err == SNAPSHOT_STATE_ERROR){
+ ALOGV("%s: Unpreparing Snapshot Buffer", __func__);
+ ret = cam_config_unprepare_buf(mCameraId, MM_CAMERA_CH_RAW);
+ if(ret != NO_ERROR) {
+ ALOGE("%s:Unreg Raw snapshot buf err=%d\n", __func__, ret);
+ ret = FAILED_TRANSACTION;
+ goto end;
+ }
+ mHalCamCtrl->releaseHeapMem(&mHalCamCtrl->mRawMemory);
+ }
+
+end:
+ ALOGV("%s: X", __func__);
+ return ret;
+}
+
+status_t QCameraStream_Snapshot::
+initSnapshotBuffers(cam_ctrl_dimension_t *dim, int num_of_buf)
+{
+ status_t ret = NO_ERROR;
+ struct msm_frame *frame;
+ uint32_t frame_len, y_off, cbcr_off;
+ uint8_t num_planes;
+ uint32_t planes[VIDEO_MAX_PLANES];
+ mm_camera_reg_buf_t reg_buf;
+ int rotation = 0;
+
+ ALOGV("%s: E", __func__);
+ memset(®_buf, 0, sizeof(mm_camera_reg_buf_t));
+ memset(&mSnapshotStreamBuf, 0, sizeof(mSnapshotStreamBuf));
+
+ if ((num_of_buf == 0) || (num_of_buf > MM_CAMERA_MAX_NUM_FRAMES)) {
+ ALOGE("%s: Invalid number of buffers (=%d) requested!",
+ __func__, num_of_buf);
+ ret = BAD_VALUE;
+ goto end;
+ }
+
+ ALOGV("%s: Mode: %d Num_of_buf: %d ImageSizes: main: %dx%d thumb: %dx%d",
+ __func__, myMode, num_of_buf,
+ dim->picture_width, dim->picture_height,
+ dim->ui_thumbnail_width, dim->ui_thumbnail_height);
+
+ reg_buf.snapshot.main.buf.mp = new mm_camera_mp_buf_t[num_of_buf];
+ if (!reg_buf.snapshot.main.buf.mp) {
+ ALOGE("%s Error allocating memory for mplanar struct ", __func__);
+ ret = NO_MEMORY;
+ goto end;
+ }
+ memset(reg_buf.snapshot.main.buf.mp, 0,
+ num_of_buf * sizeof(mm_camera_mp_buf_t));
+ if (!isFullSizeLiveshot()) {
+ reg_buf.snapshot.thumbnail.buf.mp = new mm_camera_mp_buf_t[num_of_buf];
+ if (!reg_buf.snapshot.thumbnail.buf.mp) {
+ ALOGE("%s Error allocating memory for mplanar struct ", __func__);
+ ret = NO_MEMORY;
+ goto end;
+ }
+ memset(reg_buf.snapshot.thumbnail.buf.mp, 0,
+ num_of_buf * sizeof(mm_camera_mp_buf_t));
+ }
+ /* Number of buffers to be set*/
+ /* Set the JPEG Rotation here since get_buffer_offset needs
+ * the value of rotation.*/
+ mHalCamCtrl->setJpegRotation(isZSLMode());
+ if(!isZSLMode())
+ rotation = mHalCamCtrl->getJpegRotation();
+ else
+ rotation = 0;
+ if(rotation != dim->rotation) {
+ dim->rotation = rotation;
+ ret = cam_config_set_parm(mHalCamCtrl->mCameraId, MM_CAMERA_PARM_DIMENSION, dim);
+ }
+
+ if(isLiveSnapshot()) {
+ ret = cam_config_set_parm(mHalCamCtrl->mCameraId, MM_CAMERA_PARM_DIMENSION, dim);
+ }
+ num_planes = 2;
+ planes[0] = dim->picture_frame_offset.mp[0].len;
+ planes[1] = dim->picture_frame_offset.mp[1].len;
+ frame_len = dim->picture_frame_offset.frame_len;
+ y_off = dim->picture_frame_offset.mp[0].offset;
+ cbcr_off = dim->picture_frame_offset.mp[1].offset;
+ ALOGV("%s: main image: rotation = %d, yoff = %d, cbcroff = %d, size = %d, width = %d, height = %d",
+ __func__, dim->rotation, y_off, cbcr_off, frame_len, dim->picture_width, dim->picture_height);
+ if (mHalCamCtrl->initHeapMem (&mHalCamCtrl->mJpegMemory, 1, frame_len, 0, cbcr_off,
+ MSM_PMEM_MAX, NULL, NULL, num_planes, planes) < 0) {
+ ALOGE("%s: Error allocating JPEG memory", __func__);
+ ret = NO_MEMORY;
+ goto end;
+ }
+ if(!isLiveSnapshot()) {
+ if (mHalCamCtrl->initHeapMem(&mHalCamCtrl->mSnapshotMemory, num_of_buf,
+ frame_len, y_off, cbcr_off, MSM_PMEM_MAINIMG, &mSnapshotStreamBuf,
+ ®_buf.snapshot.main, num_planes, planes) < 0) {
+ ret = NO_MEMORY;
+ mHalCamCtrl->releaseHeapMem(&mHalCamCtrl->mJpegMemory);
+ goto end;
+ };
+ num_planes = 2;
+ planes[0] = dim->thumb_frame_offset.mp[0].len;
+ planes[1] = dim->thumb_frame_offset.mp[1].len;
+ frame_len = planes[0] + planes[1];
+ if (!isFullSizeLiveshot()) {
+ y_off = dim->thumb_frame_offset.mp[0].offset;
+ cbcr_off = dim->thumb_frame_offset.mp[1].offset;
+ ALOGV("%s: thumbnail: rotation = %d, yoff = %d, cbcroff = %d, size = %d, width = %d, height = %d",
+ __func__, dim->rotation, y_off, cbcr_off, frame_len,
+ dim->thumbnail_width, dim->thumbnail_height);
+
+ if (mHalCamCtrl->initHeapMem(&mHalCamCtrl->mThumbnailMemory, num_of_buf,
+ frame_len, y_off, cbcr_off, MSM_PMEM_THUMBNAIL, &mPostviewStreamBuf,
+ ®_buf.snapshot.thumbnail, num_planes, planes) < 0) {
+ ret = NO_MEMORY;
+ mHalCamCtrl->releaseHeapMem(&mHalCamCtrl->mSnapshotMemory);
+ mHalCamCtrl->releaseHeapMem(&mHalCamCtrl->mJpegMemory);
+ goto end;
+ }
+ }
+ /* register the streaming buffers for the channel*/
+ reg_buf.ch_type = MM_CAMERA_CH_SNAPSHOT;
+ reg_buf.snapshot.main.num = mSnapshotStreamBuf.num;
+
+ if (!isFullSizeLiveshot())
+ reg_buf.snapshot.thumbnail.num = mPostviewStreamBuf.num;
+ else
+ reg_buf.snapshot.thumbnail.num = 0;
+
+ ret = cam_config_prepare_buf(mCameraId, ®_buf);
+ if(ret != NO_ERROR) {
+ ALOGE("%s:reg snapshot buf err=%d\n", __func__, ret);
+ ret = FAILED_TRANSACTION;
+ if (!isFullSizeLiveshot()){
+ mHalCamCtrl->releaseHeapMem(&mHalCamCtrl->mThumbnailMemory);
+ }
+ mHalCamCtrl->releaseHeapMem(&mHalCamCtrl->mSnapshotMemory);
+ mHalCamCtrl->releaseHeapMem(&mHalCamCtrl->mJpegMemory);
+ goto end;
+ }
+ }
+
+ /* If we have reached here successfully, we have allocated buffer.
+ Set state machine.*/
+ setSnapshotState(SNAPSHOT_STATE_BUF_INITIALIZED);
+end:
+ if (ret != NO_ERROR) {
+ handleError();
+ }
+ if (reg_buf.snapshot.main.buf.mp)
+ delete []reg_buf.snapshot.main.buf.mp;
+ if (reg_buf.snapshot.thumbnail.buf.mp)
+ delete []reg_buf.snapshot.thumbnail.buf.mp;
+ ALOGV("%s: X", __func__);
+ return ret;
+}
+
+status_t QCameraStream_Snapshot::
+deinitSnapshotBuffers(void)
+{
+ int ret = NO_ERROR;
+ ALOGV("%s: E", __func__);
+
+ int err = getSnapshotState();
+ /* Deinit only if we have already initialized*/
+ if (err >= SNAPSHOT_STATE_BUF_INITIALIZED || err == SNAPSHOT_STATE_ERROR){
+
+ if(!isLiveSnapshot()) {
+ ALOGV("%s: Unpreparing Snapshot Buffer", __func__);
+ ret = cam_config_unprepare_buf(mCameraId, MM_CAMERA_CH_SNAPSHOT);
+ if(ret != NO_ERROR) {
+ ALOGE("%s:unreg snapshot buf err=%d\n", __func__, ret);
+ ret = FAILED_TRANSACTION;
+ goto end;
+ }
+ }
+
+ /* Clear main and thumbnail heap*/
+ if(!isLiveSnapshot()) {
+ mHalCamCtrl->releaseHeapMem(&mHalCamCtrl->mSnapshotMemory);
+ if (!isFullSizeLiveshot())
+ mHalCamCtrl->releaseHeapMem(&mHalCamCtrl->mThumbnailMemory);
+ }
+ mHalCamCtrl->releaseHeapMem(&mHalCamCtrl->mJpegMemory);
+ }
+end:
+ ALOGV("%s: X", __func__);
+ return ret;
+}
+
+void QCameraStream_Snapshot::deInitBuffer(void)
+{
+ mm_camera_channel_type_t ch_type;
+
+ ALOGV("%s: E", __func__);
+
+ if( getSnapshotState() == SNAPSHOT_STATE_UNINIT) {
+ ALOGV("%s: Already deinit'd!", __func__);
+ return;
+ }
+
+ if (mSnapshotFormat == PICTURE_FORMAT_RAW) {
+ /* deinit buffer */
+ deinitRawSnapshotBuffers();
+ }
+ else
+ {
+ if ((!isZSLMode() && !isLiveSnapshot()) &&
+ ((mHalCamCtrl->getHDRMode() == HDR_MODE) || (mHalCamCtrl->isWDenoiseEnabled()))) {
+ /*register main and thumbnail buffers at back-end for frameproc*/
+ for (int i = 0; i < mHalCamCtrl->mSnapshotMemory.buffer_count; i++) {
+ if (NO_ERROR != mHalCamCtrl->sendUnMappingBuf(MSM_V4L2_EXT_CAPTURE_MODE_MAIN, i, mCameraId,
+ CAM_SOCK_MSG_TYPE_FD_UNMAPPING)) {
+ ALOGE("%s: unmapping Main image Buffer for HDR and Denoise", __func__);
+ }
+ if (NO_ERROR != mHalCamCtrl->sendUnMappingBuf(MSM_V4L2_EXT_CAPTURE_MODE_THUMBNAIL, i, mCameraId,
+ CAM_SOCK_MSG_TYPE_FD_UNMAPPING)) {
+ ALOGE("%s: unmapping Thumbnail buffer for HDR and Denoise", __func__);
+ }
+ }
+ }
+ if (isZSLMode()) {
+ /*register main and thumbnail buffers at back-end for frameproc*/
+ for (int i = 0; i < mHalCamCtrl->mSnapshotMemory.buffer_count; i++) {
+ if (NO_ERROR != mHalCamCtrl->sendUnMappingBuf(MSM_V4L2_EXT_CAPTURE_MODE_MAIN, i, mCameraId,
+ CAM_SOCK_MSG_TYPE_FD_UNMAPPING)) {
+ ALOGE("%s: unmapping Main image Buffer for ZSL", __func__);
+ }
+ }
+ }
+
+ deinitSnapshotBuffers();
+ }
+
+
+ /* deinit jpeg buffer if allocated */
+ if(mJpegHeap != NULL) mJpegHeap.clear();
+ mJpegHeap = NULL;
+
+ /* memset some global structure */
+ memset(&mSnapshotStreamBuf, 0, sizeof(mSnapshotStreamBuf));
+ memset(&mPostviewStreamBuf, 0, sizeof(mPostviewStreamBuf));
+ mSnapshotQueue.flush();
+ mWDNQueue.flush();
+
+ setSnapshotState(SNAPSHOT_STATE_UNINIT);
+
+ ALOGV("%s: X", __func__);
+}
+
+/*Temp: to be removed once event handling is enabled in mm-camera.
+ We need an event - one event for
+ stream-off to disable OPS_SNAPSHOT*/
+void QCameraStream_Snapshot::runSnapshotThread(void *data)
+{
+ ALOGV("%s: E", __func__);
+
+ if (mSnapshotFormat == PICTURE_FORMAT_RAW) {
+ /* TBD: Temp: Needs to be removed once event handling is enabled.
+ We cannot call mm-camera interface to stop snapshot from callback
+ function as it causes deadlock. Hence handling it here temporarily
+ in this thread. Later mm-camera intf will give us event in separate
+ thread context */
+ mm_app_snapshot_wait();
+ /* Send command to stop snapshot polling thread*/
+ stop();
+ }
+ ALOGV("%s: X", __func__);
+}
+
+/*Temp: to be removed once event handling is enabled in mm-camera*/
+static void *snapshot_thread(void *obj)
+{
+ QCameraStream_Snapshot *pme = (QCameraStream_Snapshot *)obj;
+ ALOGV("%s: E", __func__);
+ if (pme != 0) {
+ pme->runSnapshotThread(obj);
+ }
+ else ALOGW("not starting snapshot thread: the object went away!");
+ ALOGV("%s: X", __func__);
+ return NULL;
+}
+
+/*Temp: to be removed later*/
+static pthread_t mSnapshotThread;
+
+status_t QCameraStream_Snapshot::initJPEGSnapshot(int num_of_snapshots)
+{
+ status_t ret = NO_ERROR;
+ cam_ctrl_dimension_t dim;
+ mm_camera_op_mode_type_t op_mode;
+
+ ALOGV("%s: E", __func__);
+
+ if (isFullSizeLiveshot())
+ goto end;
+
+ ALOGV("%s: Get current dimension", __func__);
+ /* Query mm_camera to get current dimension */
+ memset(&dim, 0, sizeof(cam_ctrl_dimension_t));
+ ret = cam_config_get_parm(mCameraId,
+ MM_CAMERA_PARM_DIMENSION, &dim);
+ if (NO_ERROR != ret) {
+ ALOGE("%s: error - can't get preview dimension!", __func__);
+ ret = FAILED_TRANSACTION;
+ goto end;
+ }
+
+ /* Set camera op mode to MM_CAMERA_OP_MODE_CAPTURE */
+ ALOGV("Setting OP_MODE_CAPTURE");
+ op_mode = MM_CAMERA_OP_MODE_CAPTURE;
+ if( NO_ERROR != cam_config_set_parm(mCameraId,
+ MM_CAMERA_PARM_OP_MODE, &op_mode)) {
+ ALOGE("%s: MM_CAMERA_OP_MODE_CAPTURE failed", __func__);
+ ret = FAILED_TRANSACTION;
+ goto end;
+ }
+
+ /* config the parmeters and see if we need to re-init the stream*/
+ ALOGV("%s: Configure Snapshot Dimension", __func__);
+ ret = configSnapshotDimension(&dim);
+ if (ret != NO_ERROR) {
+ ALOGE("%s: Setting snapshot dimension failed", __func__);
+ goto end;
+ }
+
+ /* Initialize stream - set format, acquire channel */
+ ret = initSnapshotFormat(&dim);
+ if (NO_ERROR != ret) {
+ ALOGE("%s: error - can't init nonZSL stream!", __func__);
+ goto end;
+ }
+
+ ret = initSnapshotBuffers(&dim, num_of_snapshots);
+ if ( NO_ERROR != ret ){
+ ALOGE("%s: Failure allocating memory for Snapshot buffers", __func__);
+ goto end;
+ }
+
+ {
+ /*register main and thumbnail buffers at back-end for frameproc*/
+ for (int i = 0; i < num_of_snapshots; i++) {
+ if (NO_ERROR != mHalCamCtrl->sendMappingBuf(MSM_V4L2_EXT_CAPTURE_MODE_MAIN, i,
+ mSnapshotStreamBuf.frame[i].fd, mHalCamCtrl->mSnapshotMemory.size, mCameraId,
+ CAM_SOCK_MSG_TYPE_FD_MAPPING)) {
+ ALOGE("%s: sending mapping data Msg Failed", __func__);
+ }
+ if (NO_ERROR != mHalCamCtrl->sendMappingBuf(MSM_V4L2_EXT_CAPTURE_MODE_THUMBNAIL, i,
+ mPostviewStreamBuf.frame[i].fd, mHalCamCtrl->mThumbnailMemory.size, mCameraId,
+ CAM_SOCK_MSG_TYPE_FD_MAPPING)) {
+ ALOGE("%s: sending mapping data Msg Failed", __func__);
+ }
+ }
+ }
+
+end:
+ /* Based on what state we are in, we'll need to handle error -
+ like deallocating memory if we have already allocated */
+ if (ret != NO_ERROR) {
+ handleError();
+ }
+ ALOGV("%s: X", __func__);
+ return ret;
+
+}
+
+status_t QCameraStream_Snapshot::initRawSnapshot(int num_of_snapshots)
+{
+ status_t ret = NO_ERROR;
+ cam_ctrl_dimension_t dim;
+ bool initSnapshot = false;
+ mm_camera_op_mode_type_t op_mode;
+
+ ALOGV("%s: E", __func__);
+
+ /* Set camera op mode to MM_CAMERA_OP_MODE_CAPTURE */
+ ALOGV("%s: Setting OP_MODE_CAPTURE", __func__);
+ op_mode = MM_CAMERA_OP_MODE_CAPTURE;
+ if( NO_ERROR != cam_config_set_parm(mCameraId,
+ MM_CAMERA_PARM_OP_MODE, &op_mode)) {
+ ALOGE("%s: MM_CAMERA_OP_MODE_CAPTURE failed", __func__);
+ ret = FAILED_TRANSACTION;
+ goto end;
+ }
+
+ /* For raw snapshot, we do not know the dimension as it
+ depends on sensor to sensor. We call getDimension which will
+ give us raw width and height */
+ memset(&dim, 0, sizeof(cam_ctrl_dimension_t));
+ ret = cam_config_get_parm(mCameraId, MM_CAMERA_PARM_DIMENSION, &dim);
+ if (MM_CAMERA_OK != ret) {
+ ALOGE("%s: error - can't get dimension!", __func__);
+ ALOGV("%s: X", __func__);
+ goto end;
+ }
+ ALOGV("%s: Raw Snapshot dimension: %dx%d", __func__,
+ dim.raw_picture_width,
+ dim.raw_picture_height);
+
+
+ ret = initRawSnapshotChannel(&dim, num_of_snapshots);
+ if (NO_ERROR != ret) {
+ ALOGE("%s: error - can't init nonZSL stream!", __func__);
+ goto end;
+ }
+
+ ret = initRawSnapshotBuffers(&dim, num_of_snapshots);
+ if ( NO_ERROR != ret ){
+ ALOGE("%s: Failure allocating memory for Raw Snapshot buffers",
+ __func__);
+ goto end;
+ }
+ setSnapshotState(SNAPSHOT_STATE_INITIALIZED);
+
+end:
+ if (ret != NO_ERROR) {
+ handleError();
+ }
+ ALOGV("%s: X", __func__);
+ return ret;
+}
+
+status_t QCameraStream_Snapshot::initFullLiveshot(void)
+{
+ status_t ret = NO_ERROR;
+ cam_ctrl_dimension_t dim;
+ bool matching = true;
+
+ memset(&dim, 0, sizeof(cam_ctrl_dimension_t));
+ ret = cam_config_get_parm(mCameraId, MM_CAMERA_PARM_DIMENSION, &dim);
+ if (MM_CAMERA_OK != ret) {
+ ALOGE("%s: error - can't get dimension!", __func__);
+ return ret;
+ }
+#if 1
+ /* First check if the picture resolution is the same, if not, change it*/
+ mHalCamCtrl->getPictureSize(&mPictureWidth, &mPictureHeight);
+ ALOGV("%s: Picture size received: %d x %d", __func__,
+ mPictureWidth, mPictureHeight);
+
+ //Use main image as input to encoder to generate thumbnail
+ mThumbnailWidth = dim.picture_width;
+ mThumbnailHeight = dim.picture_height;
+ matching = (mPictureWidth == dim.picture_width) &&
+ (mPictureHeight == dim.picture_height);
+
+ //Actual thumbnail size requested
+ mPostviewWidth = mHalCamCtrl->mParameters.getInt(QCameraParameters::KEY_JPEG_THUMBNAIL_WIDTH);
+ mPostviewHeight = mHalCamCtrl->mParameters.getInt(QCameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT);
+
+ mDropThumbnail = false;
+ if (mPostviewWidth == 0 && mPostviewHeight == 0) {
+ mPostviewWidth = THUMBNAIL_DEFAULT_WIDTH;
+ mPostviewHeight = THUMBNAIL_DEFAULT_HEIGHT;
+ mDropThumbnail = true;
+ }
+
+ if (!matching) {
+ dim.picture_width = mPictureWidth;
+ dim.picture_height = mPictureHeight;
+ dim.ui_thumbnail_height = mThumbnailHeight;
+ dim.ui_thumbnail_width = mThumbnailWidth;
+ }
+ ALOGV("%s: Picture size to set: %d x %d", __func__,
+ dim.picture_width, dim.picture_height);
+ ret = cam_config_set_parm(mCameraId, MM_CAMERA_PARM_DIMENSION,&dim);
+#endif
+ /* Initialize stream - set format, acquire channel */
+ ret = initSnapshotFormat(&dim);
+ if (NO_ERROR != ret) {
+ ALOGE("%s: error - can't init nonZSL stream!", __func__);
+ return ret;
+ }
+ ret = initSnapshotBuffers(&dim, 1);
+ if ( NO_ERROR != ret ){
+ ALOGE("%s: Failure allocating memory for Snapshot buffers", __func__);
+ return ret;
+ }
+
+ return ret;
+}
+
+status_t QCameraStream_Snapshot::initZSLSnapshot(void)
+{
+ status_t ret = NO_ERROR;
+ cam_ctrl_dimension_t dim;
+ mm_camera_op_mode_type_t op_mode;
+
+ ALOGV("%s: E", __func__);
+
+ ALOGV("%s: Get current dimension", __func__);
+ /* Query mm_camera to get current dimension */
+ memset(&dim, 0, sizeof(cam_ctrl_dimension_t));
+ ret = cam_config_get_parm(mCameraId,
+ MM_CAMERA_PARM_DIMENSION, &dim);
+ if (NO_ERROR != ret) {
+ ALOGE("%s: error - can't get preview dimension!", __func__);
+ ret = FAILED_TRANSACTION;
+ goto end;
+ }
+
+ /* config the parmeters and see if we need to re-init the stream*/
+ ALOGV("%s: Configure Snapshot Dimension", __func__);
+ ret = configSnapshotDimension(&dim);
+ if (ret != NO_ERROR) {
+ ALOGE("%s: Setting snapshot dimension failed", __func__);
+ goto end;
+ }
+
+ /* Initialize stream - set format, acquire channel */
+ ret = initSnapshotFormat(&dim);
+ if (NO_ERROR != ret) {
+ ALOGE("%s: error - can't init nonZSL stream!", __func__);
+ goto end;
+ }
+
+ /* For ZSL we'll have to allocate buffers for internal queue
+ maintained by mm-camera lib plus around 3 buffers used for
+ data handling by lower layer.*/
+
+ ret = initSnapshotBuffers(&dim, mHalCamCtrl->getZSLQueueDepth() + 3);
+ if ( NO_ERROR != ret ){
+ ALOGE("%s: Failure allocating memory for Snapshot buffers", __func__);
+ goto end;
+ }
+ /*register main and thumbnail buffers at back-end for frameproc*/
+ for (int i = 0; i < mHalCamCtrl->getZSLQueueDepth() + 3; i++) {
+ if (NO_ERROR != mHalCamCtrl->sendMappingBuf(MSM_V4L2_EXT_CAPTURE_MODE_MAIN, i,
+ mSnapshotStreamBuf.frame[i].fd, mHalCamCtrl->mSnapshotMemory.size, mCameraId,
+ CAM_SOCK_MSG_TYPE_FD_MAPPING)) {
+ ALOGE("%s: sending mapping data Msg Failed", __func__);
+ }
+ }
+
+end:
+ /* Based on what state we are in, we'll need to handle error -
+ like deallocating memory if we have already allocated */
+ if (ret != NO_ERROR) {
+ handleError();
+ }
+ ALOGV("%s: X", __func__);
+ return ret;
+
+}
+
+status_t QCameraStream_Snapshot::
+takePictureJPEG(void)
+{
+ status_t ret = NO_ERROR;
+
+ ALOGV("%s: E", __func__);
+
+ if (mHalCamCtrl->mHdrMode == HDR_MODE) {
+ hdrRawCount = 0;
+ hdrJpegCount = 0;
+ }
+ /* Take snapshot */
+ ALOGV("%s: Call MM_CAMERA_OPS_SNAPSHOT", __func__);
+ if (NO_ERROR != cam_ops_action(mCameraId,
+ true,
+ MM_CAMERA_OPS_SNAPSHOT,
+ this)) {
+ ALOGE("%s: Failure taking snapshot", __func__);
+ ret = FAILED_TRANSACTION;
+ goto end;
+ }
+
+ /* TBD: Temp: to be removed once event callback
+ is implemented in mm-camera lib */
+ pthread_attr_t attr;
+ pthread_attr_init(&attr);
+ pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_DETACHED);
+ pthread_create(&mSnapshotThread,&attr,
+ snapshot_thread, (void *)this);
+
+end:
+ if (ret != NO_ERROR) {
+ handleError();
+ }
+
+ ALOGV("%s: X", __func__);
+ return ret;
+
+}
+
+status_t QCameraStream_Snapshot::
+takePictureRaw(void)
+{
+ status_t ret = NO_ERROR;
+
+ ALOGV("%s: E", __func__);
+
+ /* Take snapshot */
+ ALOGV("%s: Call MM_CAMERA_OPS_SNAPSHOT", __func__);
+ if (NO_ERROR != cam_ops_action(mCameraId,
+ true,
+ MM_CAMERA_OPS_RAW,
+ this)) {
+ ALOGE("%s: Failure taking snapshot", __func__);
+ ret = FAILED_TRANSACTION;
+ goto end;
+ }
+
+ /* TBD: Temp: to be removed once event callback
+ is implemented in mm-camera lib */
+ /* Wait for snapshot frame callback to return*/
+ pthread_attr_t attr;
+ pthread_attr_init(&attr);
+ pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_DETACHED);
+ pthread_create(&mSnapshotThread,&attr,
+ snapshot_thread, (void *)this);
+
+end:
+ if (ret != NO_ERROR) {
+ handleError();
+ }
+ ALOGV("%s: X", __func__);
+ return ret;
+
+}
+
+status_t QCameraStream_Snapshot::
+takePictureLiveshot(mm_camera_ch_data_buf_t* recvd_frame)
+{
+ status_t ret = NO_ERROR;
+ int mJpegMaxSize;
+ int mNuberOfVFEOutputs = 0;
+ common_crop_t crop_info;
+ camera_notify_callback notifyCb;
+ camera_data_callback dataCb;
+ cam_ctrl_dimension_t dim;
+
+ mStopCallbackLock.lock();
+ if (!mHalCamCtrl->mStateLiveshot) {
+ ALOGE("%s: Picture Cancelled", __func__);
+ mStopCallbackLock.unlock();
+ return FAILED_TRANSACTION;
+ }
+
+ ret = cam_config_get_parm(mHalCamCtrl->mCameraId,MM_CAMERA_PARM_VFE_OUTPUT_ENABLE,
+ &mNuberOfVFEOutputs);
+ if (ret != MM_CAMERA_OK) {
+ ALOGE("get parm MM_CAMERA_PARM_VFE_OUTPUT_ENABLE failed");
+ cam_evt_buf_done(mHalCamCtrl->mCameraId, recvd_frame);
+ mStopCallbackLock.unlock();
+ return FAILED_TRANSACTION;
+ }
+
+ mm_camera_ch_data_buf_t* frame =
+ (mm_camera_ch_data_buf_t *)malloc(sizeof(mm_camera_ch_data_buf_t));
+ if (frame == NULL) {
+ ALOGE("%s: Error allocating memory to save received_frame structure.", __func__);
+ cam_evt_buf_done(mHalCamCtrl->mCameraId, recvd_frame);
+ mStopCallbackLock.unlock();
+ return FAILED_TRANSACTION;
+ }
+ memcpy(frame, recvd_frame, sizeof(mm_camera_ch_data_buf_t));
+
+ if (mNuberOfVFEOutputs == 1){
+ ALOGV("<DEBUG> Liveshot buffer idx:%d",frame->def.idx);
+ frame->snapshot.main.frame = frame->def.frame;
+ frame->snapshot.main.idx = frame->def.idx;
+ frame->snapshot.thumbnail.frame = frame->def.frame;
+ frame->snapshot.thumbnail.idx = frame->def.idx;
+ } else {
+ ALOGV("<DEBUG> Liveshot buffer idx:%d",frame->video.video.idx);
+ frame->snapshot.main.frame = frame->video.video.frame;
+ frame->snapshot.main.idx = frame->video.video.idx;
+ frame->snapshot.thumbnail.frame = frame->video.video.frame;
+ frame->snapshot.thumbnail.idx = frame->video.video.idx;
+ }
+
+ memset(&dim, 0, sizeof(cam_ctrl_dimension_t));
+ ret = cam_config_get_parm(mHalCamCtrl->mCameraId, MM_CAMERA_PARM_DIMENSION, &dim);
+ if (MM_CAMERA_OK != ret) {
+ ALOGE("%s: X error - can't get dimension!", __func__);
+ cam_evt_buf_done(mHalCamCtrl->mCameraId, recvd_frame);
+ setModeLiveSnapshot(false);
+ mStopCallbackLock.unlock();
+ return FAILED_TRANSACTION;
+ }
+ dim.picture_width = dim.video_width;
+ dim.picture_height = dim.video_height;
+ dim.ui_thumbnail_width = dim.video_width;
+ dim.ui_thumbnail_height = dim.video_height;
+
+ if (mNuberOfVFEOutputs == 1){
+ dim.main_img_format = dim.prev_format;
+ dim.thumb_format = dim.prev_format;
+ } else {
+ dim.main_img_format = dim.enc_format;
+ dim.thumb_format = dim.enc_format;
+ }
+ initSnapshotBuffers(&dim,1);
+
+ /* set flag to indicate we are doing livesnapshot */
+ resetSnapshotCounters( );
+ setModeLiveSnapshot(true);
+
+ mStopCallbackLock.unlock();
+ if (mHalCamCtrl->mStateLiveshot) {
+ if(!mHalCamCtrl->mShutterSoundPlayed) {
+ notifyShutter(&crop_info, true);
+ }
+ notifyShutter(&crop_info, false);
+ mHalCamCtrl->mShutterSoundPlayed = false;
+ }
+ mStopCallbackLock.lock();
+
+ // send upperlayer callback for raw image (data or notify, not both)
+ if((mHalCamCtrl->mDataCb) && (mHalCamCtrl->mMsgEnabled & CAMERA_MSG_RAW_IMAGE)){
+ dataCb = mHalCamCtrl->mDataCb;
+ } else {
+ dataCb = NULL;
+ }
+ if((mHalCamCtrl->mNotifyCb) && (mHalCamCtrl->mMsgEnabled & CAMERA_MSG_RAW_IMAGE_NOTIFY)){
+ notifyCb = mHalCamCtrl->mNotifyCb;
+ } else {
+ notifyCb = NULL;
+ }
+
+ mPictureWidth = dim.picture_width;
+ mPictureHeight = dim.picture_height;
+ mThumbnailWidth = dim.ui_thumbnail_width;
+ mThumbnailHeight = dim.ui_thumbnail_height;
+ mPictureFormat = dim.main_img_format;
+ mThumbnailFormat = dim.thumb_format;
+
+ mJpegMaxSize = mPictureWidth * mPictureHeight * 1.5;
+
+ ALOGV("Liveshot res = %d X %d, Thumabnail = %d % %d",
+ mPictureWidth,mPictureHeight,mThumbnailWidth,mThumbnailHeight);
+
+ memset(&crop_info, 0, sizeof(common_crop_t));
+ crop_info.in1_w = mPictureWidth;
+ crop_info.in1_h = mPictureHeight;
+ /* For low power live snapshot the thumbnail output size is set to default size.
+ In case of live snapshot video buffer = thumbnail buffer. For higher resolutions
+ the thumnail will be dropped if its more than 64KB. To avoid thumbnail drop
+ set thumbnail as configured by application. This will be a size lower than video size*/
+ mDropThumbnail = false;
+ if(mHalCamCtrl->thumbnailWidth == 0 && mHalCamCtrl->thumbnailHeight == 0) {
+ ALOGE("Live Snapshot thumbnail will be dropped as indicated by application");
+ mDropThumbnail = true;
+ }
+ crop_info.out1_w = mHalCamCtrl->thumbnailWidth;
+ crop_info.out1_h = mHalCamCtrl->thumbnailHeight;
+ ret = encodeData(frame, &crop_info, mJpegMaxSize, 0);
+ if (ret != NO_ERROR) {
+ ALOGE("%s: Failure configuring JPEG encoder", __func__);
+ setModeLiveSnapshot(false);
+ mStopCallbackLock.unlock();
+ ret = FAILED_TRANSACTION;
+ goto end;
+ }
+ mStopCallbackLock.unlock();
+ if (dataCb && mHalCamCtrl->mStateLiveshot) {
+ dataCb(CAMERA_MSG_RAW_IMAGE, mHalCamCtrl->mRecordingMemory.camera_memory[frame->video.video.idx],
+ 1, NULL, mHalCamCtrl->mCallbackCookie);
+ }
+ if (notifyCb && mHalCamCtrl->mStateLiveshot) {
+ notifyCb(CAMERA_MSG_RAW_IMAGE_NOTIFY, 0, 0, mHalCamCtrl->mCallbackCookie);
+ }
+end:
+ ALOGV("%s: X", __func__);
+ return ret;
+}
+
+status_t QCameraStream_Snapshot::
+takePictureZSL(void)
+{
+ status_t ret = NO_ERROR;
+ mm_camera_ops_parm_get_buffered_frame_t param;
+
+ ALOGV("%s: E", __func__);
+
+ memset(¶m, 0, sizeof(param));
+ param.ch_type = MM_CAMERA_CH_SNAPSHOT;
+
+ /* Take snapshot */
+ ALOGV("%s: Call MM_CAMERA_OPS_GET_BUFFERED_FRAME", __func__);
+
+ mNumOfSnapshot = mHalCamCtrl->getNumOfSnapshots();
+ // it's just for stabilization for dequeuing zsl buffer frame
+ // 13.2 ~ 22.4fps
+ usleep(80000);
+ if (NO_ERROR != cam_ops_action(mCameraId,
+ true,
+ MM_CAMERA_OPS_GET_BUFFERED_FRAME,
+ ¶m)) {
+ ALOGE("%s: Failure getting zsl frame(s)", __func__);
+ ret = FAILED_TRANSACTION;
+ goto end;
+ }
+
+ /* TBD: Temp: to be removed once event callback
+ is implemented in mm-camera lib */
+/* pthread_attr_t attr;
+ pthread_attr_init(&attr);
+ pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_DETACHED);
+ pthread_create(&mSnapshotThread,&attr,
+ snapshot_thread, (void *)this);
+*/
+end:
+ ALOGV("%s: X", __func__);
+ return ret;
+}
+
+status_t QCameraStream_Snapshot::
+startStreamZSL(void)
+{
+ status_t ret = NO_ERROR;
+
+ ALOGV("%s: E", __func__);
+
+ /* Start ZSL - it'll start queuing the frames */
+ ALOGV("%s: Call MM_CAMERA_OPS_ZSL", __func__);
+ if (NO_ERROR != cam_ops_action(mCameraId,
+ true,
+ MM_CAMERA_OPS_ZSL,
+ this)) {
+ ALOGE("%s: Failure starting ZSL stream", __func__);
+ ret = FAILED_TRANSACTION;
+ goto end;
+ }
+
+end:
+ ALOGV("%s: X", __func__);
+ return ret;
+
+}
+
+status_t QCameraStream_Snapshot::
+encodeData(mm_camera_ch_data_buf_t* recvd_frame,
+ common_crop_t *crop_info,
+ int frame_len,
+ bool enqueued)
+{
+ status_t ret = NO_ERROR;
+ cam_ctrl_dimension_t dimension;
+ struct msm_frame *postviewframe;
+ struct msm_frame *mainframe;
+ common_crop_t crop;
+ cam_point_t main_crop_offset;
+ cam_point_t thumb_crop_offset;
+ int width, height;
+ uint8_t *thumbnail_buf;
+ uint32_t thumbnail_fd;
+ uint8_t hw_encode = true;
+ int mNuberOfVFEOutputs = 0;
+
+ omx_jpeg_encode_params encode_params;
+
+ /* If it's the only frame, we directly pass to encoder.
+ If not, we'll queue it and check during next jpeg .
+ Also, if the queue isn't empty then we need to queue this
+ one too till its turn comes (only if it's not already
+ queued up there)*/
+ ALOGV("%s: getSnapshotState()=%d, enqueued =%d, Q empty=%d", __func__, getSnapshotState(), enqueued, mSnapshotQueue.isEmpty());
+ ALOGV("%s: mNumOfRecievedJPEG=%d, mNumOfSnapshot =%d", __func__, mNumOfRecievedJPEG, mNumOfSnapshot);
+ if(mHalCamCtrl->thumbnailWidth == 0 || mHalCamCtrl->thumbnailHeight == 0) {
+ ALOGV("Snapshot thumbnail will be dropped as indicated by application");
+ mDropThumbnail = true;
+ }
+ if((getSnapshotState() == SNAPSHOT_STATE_JPEG_ENCODING) ||
+ (!mSnapshotQueue.isEmpty() && !enqueued)){ /*busy and new buffer*/
+ /* encoding is going on. Just queue the frame for now.*/
+ ALOGV("%s: JPEG encoding in progress."
+ "Enqueuing frame id(%d) for later processing.", __func__,
+ recvd_frame->snapshot.main.idx);
+ mSnapshotQueue.enqueue((void *)recvd_frame);
+ } else if (enqueued ||
+ (mNumOfRecievedJPEG != mNumOfSnapshot && mNumOfRecievedJPEG != 0)) { /*not busy, not first*/
+ ALOGV("%s: JPG not busy, not first frame.", __func__);
+
+ // For full-size live shot, use mainimage to generate thumbnail
+ if (isFullSizeLiveshot()) {
+ postviewframe = recvd_frame->snapshot.main.frame;
+ } else {
+ postviewframe = recvd_frame->snapshot.thumbnail.frame;
+ }
+ mainframe = recvd_frame->snapshot.main.frame;
+ cam_config_get_parm(mHalCamCtrl->mCameraId, MM_CAMERA_PARM_DIMENSION, &dimension);
+ ALOGV("%s: main_fmt =%d, tb_fmt =%d", __func__, dimension.main_img_format, dimension.thumb_format);
+ /*since this is the continue job, we only care about the input buffer*/
+ encode_params.thumbnail_buf = (uint8_t *)postviewframe->buffer;
+ encode_params.thumbnail_fd = postviewframe->fd;
+ encode_params.snapshot_buf = (uint8_t *)mainframe->buffer;
+ encode_params.snapshot_fd = mainframe->fd;
+ encode_params.dimension = &dimension;
+ /*update exif parameters in HAL*/
+ mHalCamCtrl->setExifTags();
+
+ encode_params.exif_data = mHalCamCtrl->getExifData();
+ encode_params.exif_numEntries = mHalCamCtrl->getExifTableNumEntries();
+ if (!omxJpegEncodeNext(&encode_params)){
+ ALOGE("%s: Failure! JPEG encoder returned error.", __func__);
+ ret = FAILED_TRANSACTION;
+ goto end;
+ }
+ /* Save the pointer to the frame sent for encoding. we'll need it to
+ tell kernel that we are done with the frame.*/
+ mCurrentFrameEncoded = recvd_frame;
+ setSnapshotState(SNAPSHOT_STATE_JPEG_ENCODING);
+ } else { /*not busy and new buffer (first job)*/
+
+ ALOGV("%s: JPG Idle and first frame.", __func__);
+
+ // For full-size live shot, use mainimage to generate thumbnail
+ if (isFullSizeLiveshot()){
+ postviewframe = recvd_frame->snapshot.main.frame;
+ } else {
+ postviewframe = recvd_frame->snapshot.thumbnail.frame;
+ }
+ mainframe = recvd_frame->snapshot.main.frame;
+ cam_config_get_parm(mHalCamCtrl->mCameraId, MM_CAMERA_PARM_DIMENSION, &dimension);
+ ALOGV("%s: main_fmt =%d, tb_fmt =%d", __func__, dimension.main_img_format, dimension.thumb_format);
+
+ dimension.orig_picture_dx = mPictureWidth;
+ dimension.orig_picture_dy = mPictureHeight;
+
+ if(!mDropThumbnail) {
+ if(isZSLMode()) {
+ ALOGV("Setting input thumbnail size to previewWidth= %d previewheight= %d in ZSL mode",
+ mHalCamCtrl->mPreviewWidth, mHalCamCtrl->mPreviewHeight);
+ dimension.thumbnail_width = width = mHalCamCtrl->mPreviewWidth;
+ dimension.thumbnail_height = height = mHalCamCtrl->mPreviewHeight;
+ } else {
+ dimension.thumbnail_width = width = mThumbnailWidth;
+ dimension.thumbnail_height = height = mThumbnailHeight;
+ }
+ } else {
+ dimension.thumbnail_width = width = 0;
+ dimension.thumbnail_height = height = 0;
+ }
+ dimension.main_img_format = mPictureFormat;
+ dimension.thumb_format = mThumbnailFormat;
+
+ /*TBD: Move JPEG handling to the mm-camera library */
+ ALOGV("Setting callbacks, initializing encoder and start encoding.");
+ ALOGV(" Passing my obj: %x", (unsigned int) this);
+ set_callbacks(snapshot_jpeg_fragment_cb, snapshot_jpeg_cb, this,
+ mHalCamCtrl->mJpegMemory.camera_memory[0]->data, &mJpegOffset);
+
+ if (isLiveSnapshot() || isFullSizeLiveshot()) {
+ /* determine the target type */
+ ret = cam_config_get_parm(mCameraId,MM_CAMERA_PARM_VFE_OUTPUT_ENABLE,
+ &mNuberOfVFEOutputs);
+ if (ret != MM_CAMERA_OK) {
+ ALOGE("get parm MM_CAMERA_PARM_VFE_OUTPUT_ENABLE failed");
+ ret = BAD_VALUE;
+ }
+ /* VFE 2x has hardware limitation:
+ * It can't support concurrent
+ * video encoding and jpeg encoding
+ * So switch to software for liveshot
+ */
+ if (mNuberOfVFEOutputs == 1)
+ hw_encode = false;
+ }
+ ALOGV("%s: hw_encode: %d\n",__func__, hw_encode);
+
+ if(omxJpegStart(hw_encode) != NO_ERROR){
+ ALOGE("Error In omxJpegStart!!! Return");
+ ret = FAILED_TRANSACTION;
+ goto end;
+ }
+
+ if (mHalCamCtrl->getJpegQuality())
+ mm_jpeg_encoder_setMainImageQuality(mHalCamCtrl->getJpegQuality());
+ else
+ mm_jpeg_encoder_setMainImageQuality(85);
+
+ ALOGV("%s: Dimension to encode: main: %dx%d thumbnail: %dx%d", __func__,
+ dimension.orig_picture_dx, dimension.orig_picture_dy,
+ dimension.thumbnail_width, dimension.thumbnail_height);
+
+ /*TBD: Pass 0 as cropinfo for now as v4l2 doesn't provide
+ cropinfo. It'll be changed later.*/
+ memset(&crop,0,sizeof(common_crop_t));
+ memset(&main_crop_offset,0,sizeof(cam_point_t));
+ memset(&thumb_crop_offset,0,sizeof(cam_point_t));
+
+ /* Setting crop info */
+
+ /*Main image*/
+ crop.in2_w=mCrop.snapshot.main_crop.width;// dimension.picture_width
+ crop.in2_h=mCrop.snapshot.main_crop.height;// dimension.picture_height;
+ if (!mJpegDownscaling) {
+ crop.out2_w = mPictureWidth;
+ crop.out2_h = mPictureHeight;
+ } else {
+ crop.out2_w = mActualPictureWidth;
+ crop.out2_h = mActualPictureHeight;
+ if (!crop.in2_w || !crop.in2_h) {
+ crop.in2_w = mPictureWidth;
+ crop.in2_h = mPictureHeight;
+ }
+ }
+ main_crop_offset.x=mCrop.snapshot.main_crop.left;
+ main_crop_offset.y=mCrop.snapshot.main_crop.top;
+ /*Thumbnail image*/
+ crop.in1_w=mCrop.snapshot.thumbnail_crop.width; //dimension.thumbnail_width;
+ crop.in1_h=mCrop.snapshot.thumbnail_crop.height; // dimension.thumbnail_height;
+ if(isLiveSnapshot() || isFullSizeLiveshot() || isZSLMode()) {
+ crop.out1_w= mHalCamCtrl->thumbnailWidth;
+ crop.out1_h= mHalCamCtrl->thumbnailHeight;
+ ALOGV("Thumbnail width= %d height= %d for livesnapshot", crop.out1_w, crop.out1_h);
+ } else {
+ crop.out1_w = width;
+ crop.out1_h = height;
+ }
+ thumb_crop_offset.x=mCrop.snapshot.thumbnail_crop.left;
+ thumb_crop_offset.y=mCrop.snapshot.thumbnail_crop.top;
+
+ if (crop.out1_w > crop.out2_w || crop.out1_h > crop.out2_h) {
+ crop.out1_w = crop.out2_w;
+ crop.out1_h = crop.out2_h;
+ }
+ //update exif parameters in HAL
+ mHalCamCtrl->initExifData();
+
+ /*Fill in the encode parameters*/
+ encode_params.dimension = (const cam_ctrl_dimension_t *)&dimension;
+ //if (!isFullSizeLiveshot()) {
+ encode_params.thumbnail_buf = (uint8_t *)postviewframe->buffer;
+ encode_params.thumbnail_fd = postviewframe->fd;
+ encode_params.thumbnail_offset = postviewframe->phy_offset;
+ encode_params.thumb_crop_offset = &thumb_crop_offset;
+ //}
+ encode_params.snapshot_buf = (uint8_t *)mainframe->buffer;
+ encode_params.snapshot_fd = mainframe->fd;
+ encode_params.snapshot_offset = mainframe->phy_offset;
+ encode_params.scaling_params = &crop;
+ encode_params.exif_data = mHalCamCtrl->getExifData();
+ encode_params.exif_numEntries = mHalCamCtrl->getExifTableNumEntries();
+
+ if (isLiveSnapshot() && !isFullSizeLiveshot())
+ encode_params.a_cbcroffset = mainframe->cbcr_off;
+ else
+ encode_params.a_cbcroffset = -1;
+ encode_params.main_crop_offset = &main_crop_offset;
+
+ if (mDropThumbnail)
+ encode_params.hasThumbnail = 0;
+ else
+ encode_params.hasThumbnail = 1;
+ encode_params.thumb_crop_offset = &thumb_crop_offset;
+ encode_params.main_format = dimension.main_img_format;
+ encode_params.thumbnail_format = dimension.thumb_format;
+
+ if (!omxJpegEncode(&encode_params)){
+ ALOGE("%s: Failure! JPEG encoder returned error.", __func__);
+ ret = FAILED_TRANSACTION;
+ goto end;
+ }
+
+ /* Save the pointer to the frame sent for encoding. we'll need it to
+ tell kernel that we are done with the frame.*/
+ mCurrentFrameEncoded = recvd_frame;
+ setSnapshotState(SNAPSHOT_STATE_JPEG_ENCODING);
+ }
+
+end:
+ ALOGV("%s: X", __func__);
+ return ret;
+}
+
+/* Called twice - 1st to play shutter sound and 2nd to configure
+ overlay/surfaceflinger for postview */
+void QCameraStream_Snapshot::notifyShutter(common_crop_t *crop,
+ bool mPlayShutterSoundOnly)
+{
+ ALOGV("%s: E", __func__);
+ if(!mActive && !isLiveSnapshot()) {
+ ALOGE("__debbug: Snapshot thread stopped \n");
+ return;
+ }
+ if(mHalCamCtrl->mNotifyCb)
+ mHalCamCtrl->mNotifyCb(CAMERA_MSG_SHUTTER, 0, mPlayShutterSoundOnly,
+ mHalCamCtrl->mCallbackCookie);
+ ALOGV("%s: X", __func__);
+}
+
+status_t QCameraStream_Snapshot::
+encodeDisplayAndSave(mm_camera_ch_data_buf_t* recvd_frame,
+ bool enqueued)
+{
+ status_t ret = NO_ERROR;
+ struct msm_frame *postview_frame;
+ struct ion_flush_data cache_inv_data;
+ int ion_fd;
+ int buf_index = 0;
+ ssize_t offset_addr = 0;
+ common_crop_t dummy_crop;
+ /* send frame for encoding */
+ ALOGV("%s: Send frame for encoding", __func__);
+ /*TBD: Pass 0 as cropinfo for now as v4l2 doesn't provide
+ cropinfo. It'll be changed later.*/
+ if(!mActive) {
+ ALOGE("Cancel Picture.. Stop is called");
+ return NO_ERROR;
+ }
+ if(isZSLMode()){
+ ALOGV("%s: set JPEG rotation in ZSL mode", __func__);
+ mHalCamCtrl->setJpegRotation(isZSLMode());
+ }
+#ifdef USE_ION
+ /*Clean out(Write-back) cache before sending for JPEG*/
+ memset(&cache_inv_data, 0, sizeof(struct ion_flush_data));
+ cache_inv_data.vaddr = (void*)recvd_frame->snapshot.main.frame->buffer;
+ cache_inv_data.fd = recvd_frame->snapshot.main.frame->fd;
+ cache_inv_data.handle = recvd_frame->snapshot.main.frame->fd_data.handle;
+ cache_inv_data.length = recvd_frame->snapshot.main.frame->ion_alloc.len;
+ ion_fd = recvd_frame->snapshot.main.frame->ion_dev_fd;
+ if(ion_fd > 0) {
+ if(mHalCamCtrl->cache_ops(ion_fd, &cache_inv_data, ION_IOC_CLEAN_INV_CACHES) < 0)
+ ALOGE("%s: Cache Invalidate failed\n", __func__);
+ else {
+ ALOGV("%s: Successful cache invalidate\n", __func__);
+ if(!isFullSizeLiveshot()) {
+ ion_fd = recvd_frame->snapshot.thumbnail.frame->ion_dev_fd;
+ cache_inv_data.vaddr = (void*)recvd_frame->snapshot.thumbnail.frame->buffer;
+ cache_inv_data.fd = recvd_frame->snapshot.thumbnail.frame->fd;
+ cache_inv_data.handle = recvd_frame->snapshot.thumbnail.frame->fd_data.handle;
+ cache_inv_data.length = recvd_frame->snapshot.thumbnail.frame->ion_alloc.len;
+ if(mHalCamCtrl->cache_ops(ion_fd, &cache_inv_data, ION_IOC_CLEAN_INV_CACHES) < 0)
+ ALOGE("%s: Cache Invalidate failed\n", __func__);
+ else
+ ALOGV("%s: Successful cache invalidate\n", __func__);
+ }
+ }
+ }
+#endif
+ memset(&dummy_crop,0,sizeof(common_crop_t));
+ ret = encodeData(recvd_frame, &dummy_crop, mSnapshotStreamBuf.frame_len,
+ enqueued);
+ if (ret != NO_ERROR) {
+ ALOGE("%s: Failure configuring JPEG encoder", __func__);
+
+ goto end;
+ }
+
+ /* Display postview image*/
+ /* If it's burst mode, we won't be displaying postview of all the captured
+ images - only the first one */
+ ALOGV("%s: Burst mode flag %d", __func__, mBurstModeFlag);
+
+end:
+ ALOGV("%s: X", __func__);
+ return ret;
+}
+
+status_t QCameraStream_Snapshot::receiveRawPicture(mm_camera_ch_data_buf_t* recvd_frame)
+{
+ int buf_index = 0;
+ common_crop_t crop;
+ int rc = NO_ERROR;
+ int cur_lux_idx = 0;
+
+ camera_notify_callback notifyCb;
+ camera_data_callback dataCb, jpgDataCb;
+
+ ALOGV("%s: E ", __func__);
+ mStopCallbackLock.lock( );
+ if(!mActive) {
+ mStopCallbackLock.unlock();
+ ALOGV("%s: Stop receiving raw pic ", __func__);
+ return NO_ERROR;
+ }
+
+ if(getSnapshotState() == SNAPSHOT_STATE_ERROR) {
+ cam_evt_buf_done(mCameraId, recvd_frame);
+ }
+
+ mHalCamCtrl->dumpFrameToFile(recvd_frame->snapshot.main.frame, HAL_DUMP_FRM_MAIN);
+ if (!isFullSizeLiveshot())
+ mHalCamCtrl->dumpFrameToFile(recvd_frame->snapshot.thumbnail.frame,
+ HAL_DUMP_FRM_THUMBNAIL);
+
+ /* If it's raw snapshot, we just want to tell upperlayer to save the image*/
+ if(mSnapshotFormat == PICTURE_FORMAT_RAW) {
+ ALOGV("%s: Call notifyShutter 2nd time in case of RAW", __func__);
+ mStopCallbackLock.unlock();
+ if(!mHalCamCtrl->mShutterSoundPlayed) {
+ notifyShutter(&crop, true);
+ }
+ notifyShutter(&crop, false);
+ mHalCamCtrl->mShutterSoundPlayed = false;
+
+ mStopCallbackLock.lock( );
+ ALOGV("%s: Sending Raw Snapshot Callback to Upperlayer", __func__);
+ buf_index = recvd_frame->def.idx;
+
+ if (mHalCamCtrl->mDataCb && mActive &&
+ (mHalCamCtrl->mMsgEnabled & CAMERA_MSG_COMPRESSED_IMAGE)){
+ dataCb = mHalCamCtrl->mDataCb;
+ } else {
+ dataCb = NULL;
+ }
+ mStopCallbackLock.unlock();
+
+ if(dataCb) {
+ dataCb(
+ CAMERA_MSG_COMPRESSED_IMAGE,
+ mHalCamCtrl->mRawMemory.camera_memory[buf_index], 0, NULL,
+ mHalCamCtrl->mCallbackCookie);
+ }
+ /* TBD: Temp: To be removed once event handling is enabled */
+ mm_app_snapshot_done();
+ } else {
+ /*TBD: v4l2 doesn't have support to provide cropinfo along with
+ frame. We'll need to query.*/
+ memset(&crop, 0, sizeof(common_crop_t));
+
+ if(isZSLMode()){
+ //Changes to stop sending Preview Frames when Snapshot issued
+ //in ZSL case.
+ mHalCamCtrl->mPauseFramedispatch = true;
+ }
+
+ ALOGV("%s: Call notifyShutter 2nd time", __func__);
+ /* The recvd_frame structre we receive from lower library is a local
+ variable. So we'll need to save this structure so that we won't
+ be later pointing to garbage data when that variable goes out of
+ scope */
+ mm_camera_ch_data_buf_t* frame =
+ (mm_camera_ch_data_buf_t *)malloc(sizeof(mm_camera_ch_data_buf_t));
+ if (frame == NULL) {
+ ALOGE("%s: Error allocating memory to save received_frame structure.", __func__);
+ cam_evt_buf_done(mCameraId, recvd_frame);
+ mStopCallbackLock.unlock();
+ return BAD_VALUE;
+ }
+ memcpy(frame, recvd_frame, sizeof(mm_camera_ch_data_buf_t));
+ rc = cam_config_get_parm(mCameraId, MM_CAMERA_PARM_LUX_IDX, &cur_lux_idx);
+
+ if (cur_lux_idx > 370) {
+ static int input_width = 0, input_height = 0;
+ static int ret = 0;
+ unsigned char *dnr_buffer;
+ input_width = mPictureWidth;
+ input_height = mPictureHeight;
+
+ ALOGV("%s: received frame %d * %d", __func__, input_width, input_height);
+ {
+ dnr_buffer = (uint8_t *)malloc(input_width*input_height*3/2);
+ if (dnr_buffer == NULL)
+ ALOGE("dnr_buffer alloc fail");
+ ALOGV("dnr_buffer allocated size : %d", input_width*input_height*3/2);
+ memcpy(dnr_buffer, (uint8_t *)frame->snapshot.main.frame->buffer, input_width*input_height*3/2);
+ ALOGV("dnr_buffer memcpy completed.");
+ }
+ ALOGV("[DNR] DNR Processing Start.... %d * %d\n", mPictureWidth, mPictureHeight);
+ ret = NO_ERROR;
+ if (mHalCamCtrl->LINK_morpho_DNR_ProcessFrame)
+ ret = (int)mHalCamCtrl->LINK_morpho_DNR_ProcessFrame(dnr_buffer, mPictureWidth, mPictureHeight, 0, 1); //bright->normal->dark
+ ALOGV("[DNR] DNR Processing result.... ret = %d\n", ret);
+ memcpy((uint8_t *)recvd_frame->snapshot.main.frame->buffer, (uint8_t *)dnr_buffer, mPictureWidth*mPictureHeight*3/2);
+ ALOGV("[DNR] DNR Processing END....\n");
+ if(dnr_buffer)
+ free(dnr_buffer);
+ }
+ //mStopCallbackLock.lock();
+
+ // only in ZSL mode and Wavelet Denoise is enabled, we will send frame to deamon to do WDN
+ if (isZSLMode() && mHalCamCtrl->isWDenoiseEnabled()) {
+ if(mIsDoingWDN){
+ mWDNQueue.enqueue((void *)frame);
+ ALOGV("%s: Wavelet denoise is going on, queue frame", __func__);
+ rc = NO_ERROR;
+ } else {
+ ALOGV("%s: Start Wavelet denoise", __func__);
+ mIsDoingWDN = true; // set the falg to true because we are going to do WDN
+
+ // No WDN is going on so far, we will start it here
+ rc = doWaveletDenoise(frame);
+ if ( NO_ERROR != rc ) {
+ ALOGE("%s: Error while doing wavelet denoise", __func__);
+ mIsDoingWDN = false;
+ }
+ }
+ } else if (mHdrInfo.hdr_on) {
+ mHdrInfo.recvd_frame[mHdrInfo.num_raw_received] = frame;
+ mHdrInfo.num_raw_received++;
+ ALOGV("%s Total %d Received %d frames, still need to receive %d frames", __func__,
+ mHdrInfo.num_frame, mHdrInfo.num_raw_received, (mHdrInfo.num_frame - mHdrInfo.num_raw_received));
+ if (mHdrInfo.num_raw_received == mHdrInfo.num_frame) {
+ ALOGV(" Received all %d YUV frames, Invoke HDR", mHdrInfo.num_raw_received);
+ doHdrProcessing();
+ }
+ }
+ else {
+ ALOGV("%s: encodeDisplayAndSave ", __func__);
+ rc = encodeDisplayAndSave(frame, 0);
+ }
+
+
+ // send upperlayer callback for raw image (data or notify, not both)
+ if((mHalCamCtrl->mDataCb) && (mHalCamCtrl->mMsgEnabled & CAMERA_MSG_RAW_IMAGE)){
+ dataCb = mHalCamCtrl->mDataCb;
+ } else {
+ dataCb = NULL;
+ }
+ if((mHalCamCtrl->mNotifyCb) && (mHalCamCtrl->mMsgEnabled & CAMERA_MSG_RAW_IMAGE_NOTIFY)){
+ notifyCb = mHalCamCtrl->mNotifyCb;
+ } else {
+ notifyCb = NULL;
+ }
+
+ mStopCallbackLock.unlock();
+ if(!mHalCamCtrl->mShutterSoundPlayed) {
+ notifyShutter(&crop, true);
+ }
+ notifyShutter(&crop, false);
+ mHalCamCtrl->mShutterSoundPlayed = false;
+
+ if(mHalCamCtrl->mHdrMode == HDR_MODE) {
+ if ((hdrRawCount % 3) != 2)
+ return NO_ERROR;
+ else
+ hdrRawCount++;
+ }
+
+ if (rc != NO_ERROR)
+ {
+ ALOGE("%s: Error while encoding/displaying/saving image", __func__);
+ cam_evt_buf_done(mCameraId, recvd_frame);
+
+ if(mHalCamCtrl->mDataCb &&
+ (mHalCamCtrl->mMsgEnabled & CAMERA_MSG_COMPRESSED_IMAGE)) {
+ /* get picture failed. Give jpeg callback with NULL data
+ * to the application to restore to preview mode
+ */
+ jpgDataCb = mHalCamCtrl->mDataCb;
+ } else {
+ jpgDataCb = NULL;
+ }
+ ALOGE("%s: encode err so data cb", __func__);
+ //mStopCallbackLock.unlock();
+ if (dataCb) {
+ dataCb(CAMERA_MSG_RAW_IMAGE, mHalCamCtrl->mSnapshotMemory.camera_memory[0],
+ 1, NULL, mHalCamCtrl->mCallbackCookie);
+ }
+ if (notifyCb) {
+ notifyCb(CAMERA_MSG_RAW_IMAGE_NOTIFY, 0, 0, mHalCamCtrl->mCallbackCookie);
+ }
+ if (jpgDataCb) {
+ jpgDataCb(CAMERA_MSG_COMPRESSED_IMAGE,
+ NULL, 0, NULL,
+ mHalCamCtrl->mCallbackCookie);
+ }
+
+ if (frame != NULL) {
+ free(frame);
+ }
+ } else {
+
+ //mStopCallbackLock.unlock();
+ if (dataCb) {
+ dataCb(CAMERA_MSG_RAW_IMAGE, mHalCamCtrl->mSnapshotMemory.camera_memory[0],
+ 1, NULL, mHalCamCtrl->mCallbackCookie);
+ }
+ if (notifyCb) {
+ notifyCb(CAMERA_MSG_RAW_IMAGE_NOTIFY, 0, 0, mHalCamCtrl->mCallbackCookie);
+ }
+
+ if (!isZSLMode() &&
+ (!isLiveSnapshot() && !isFullSizeLiveshot())) {
+ if(mHalCamCtrl->mDataCb &&
+ (mHalCamCtrl->mMsgEnabled & CAMERA_MSG_POSTVIEW_FRAME)) {
+ mHalCamCtrl->mDataCb(CAMERA_MSG_POSTVIEW_FRAME,mHalCamCtrl->mThumbnailMemory.camera_memory[0],
+ 0, NULL, mHalCamCtrl->mCallbackCookie);
+ }
+ }
+ }
+ }
+
+ ALOGV("%s: X", __func__);
+ return NO_ERROR;
+}
+
+//-------------------------------------------------------------------
+// Helper Functions
+//-------------------------------------------------------------------
+void QCameraStream_Snapshot::handleError()
+{
+ mm_camera_channel_type_t ch_type;
+ ALOGV("%s: E", __func__);
+
+ /* Depending upon the state we'll have to
+ handle error */
+ switch(getSnapshotState()) {
+ case SNAPSHOT_STATE_JPEG_ENCODING:
+ if(mJpegHeap != NULL) mJpegHeap.clear();
+ mJpegHeap = NULL;
+
+ case SNAPSHOT_STATE_YUV_RECVD:
+ case SNAPSHOT_STATE_IMAGE_CAPTURE_STRTD:
+ stopPolling();
+ case SNAPSHOT_STATE_INITIALIZED:
+ case SNAPSHOT_STATE_BUF_INITIALIZED:
+ if (mSnapshotFormat == PICTURE_FORMAT_JPEG) {
+ deinitSnapshotBuffers();
+ }else
+ {
+ deinitRawSnapshotBuffers();
+ }
+ case SNAPSHOT_STATE_BUF_NOTIF_REGD:
+ case SNAPSHOT_STATE_CH_ACQUIRED:
+ if (mSnapshotFormat == PICTURE_FORMAT_JPEG) {
+ deinitSnapshotChannel(MM_CAMERA_CH_SNAPSHOT);
+ }else
+ {
+ deinitSnapshotChannel(MM_CAMERA_CH_RAW);
+ }
+ default:
+ /* Set the state to ERROR */
+ setSnapshotState(SNAPSHOT_STATE_ERROR);
+ break;
+ }
+
+ ALOGV("%s: X", __func__);
+}
+
+void QCameraStream_Snapshot::setSnapshotState(int state)
+{
+ ALOGV("%s: Setting snapshot state to: %d",
+ __func__, state);
+ mSnapshotState = state;
+}
+
+int QCameraStream_Snapshot::getSnapshotState()
+{
+ return mSnapshotState;
+}
+
+void QCameraStream_Snapshot::setModeLiveSnapshot(bool value)
+{
+ mModeLiveSnapshot = value;
+}
+
+bool QCameraStream_Snapshot::isLiveSnapshot(void)
+{
+ return mModeLiveSnapshot;
+}
+bool QCameraStream_Snapshot::isZSLMode()
+{
+ return (myMode & CAMERA_ZSL_MODE);
+}
+
+void QCameraStream_Snapshot::setFullSizeLiveshot(bool value)
+{
+ mFullLiveshot = value;
+}
+
+bool QCameraStream_Snapshot::isFullSizeLiveshot()
+{
+ return mFullLiveshot;
+}
+
+void QCameraStream_Snapshot::resetSnapshotCounters(void )
+{
+ mNumOfSnapshot = mHalCamCtrl->getNumOfSnapshots();
+ if (mNumOfSnapshot <= 0) {
+ mNumOfSnapshot = 1;
+ }
+ mNumOfRecievedJPEG = 0;
+ ALOGV("%s: Number of images to be captured: %d", __func__, mNumOfSnapshot);
+}
+
+//------------------------------------------------------------------
+// Constructor and Destructor
+//------------------------------------------------------------------
+QCameraStream_Snapshot::
+QCameraStream_Snapshot(int cameraId, camera_mode_t mode)
+ : QCameraStream(cameraId,mode),
+ mSnapshotFormat(PICTURE_FORMAT_JPEG),
+ mPictureWidth(0), mPictureHeight(0),
+ mPictureFormat(CAMERA_YUV_420_NV21),
+ mPostviewWidth(0), mPostviewHeight(0),
+ mThumbnailWidth(0), mThumbnailHeight(0),
+ mThumbnailFormat(CAMERA_YUV_420_NV21),
+ mJpegOffset(0),
+ mSnapshotState(SNAPSHOT_STATE_UNINIT),
+ mNumOfSnapshot(1),
+ mModeLiveSnapshot(false),
+ mBurstModeFlag(false),
+ mActualPictureWidth(0),
+ mActualPictureHeight(0),
+ mJpegDownscaling(false),
+ mJpegHeap(NULL),
+ mDisplayHeap(NULL),
+ mPostviewHeap(NULL),
+ mCurrentFrameEncoded(NULL),
+ mJpegSessionId(0),
+ mFullLiveshot(false),
+ mDropThumbnail(false)
+ {
+ ALOGV("%s: E", __func__);
+
+ /*initialize snapshot queue*/
+ mSnapshotQueue.init();
+ memset (&mHdrInfo, 0, sizeof(snap_hdr_record_t ));
+
+ /*initialize WDN queue*/
+ mWDNQueue.init();
+ mIsDoingWDN = false;
+
+ memset(&mSnapshotStreamBuf, 0, sizeof(mSnapshotStreamBuf));
+ memset(&mPostviewStreamBuf, 0, sizeof(mPostviewStreamBuf));
+ mSnapshotBufferNum = 0;
+ mMainSize = 0;
+ mThumbSize = 0;
+ for(int i = 0; i < mMaxSnapshotBufferCount; i++) {
+ mMainfd[i] = 0;
+ mThumbfd[i] = 0;
+ mCameraMemoryPtrMain[i] = NULL;
+ mCameraMemoryPtrThumb[i] = NULL;
+ }
+ /*load the jpeg lib*/
+ mJpegSessionId = omxJpegOpen( );
+ ALOGV("%s: X", __func__);
+ }
+
+
+QCameraStream_Snapshot::~QCameraStream_Snapshot() {
+ ALOGV("%s: E", __func__);
+
+ /* deinit snapshot queue */
+ if (mSnapshotQueue.isInitialized()) {
+ mSnapshotQueue.deinit();
+ }
+ /* deinit snapshot queue */
+ if (mWDNQueue.isInitialized()) {
+ mWDNQueue.deinit();
+ }
+
+ if(mActive) {
+ stop();
+ }
+ if(mInit) {
+ release();
+ }
+ mInit = false;
+ mActive = false;
+ if (mJpegSessionId > 0) {
+ omxJpegClose( );
+ mJpegSessionId = 0;
+ }
+ ALOGV("%s: X", __func__);
+
+}
+
+//------------------------------------------------------------------
+// Public Members
+//------------------------------------------------------------------
+status_t QCameraStream_Snapshot::init()
+{
+ status_t ret = NO_ERROR;
+ mm_camera_op_mode_type_t op_mode;
+
+ ALOGV("%s: E", __func__);
+ /* Check the state. If we have already started snapshot
+ process just return*/
+ if (getSnapshotState() != SNAPSHOT_STATE_UNINIT) {
+ ret = isZSLMode() ? NO_ERROR : INVALID_OPERATION;
+ ALOGE("%s: Trying to take picture while snapshot is in progress",
+ __func__);
+ goto end;
+ }
+ mInit = true;
+
+end:
+ /*if (ret == NO_ERROR) {
+ setSnapshotState(SNAPSHOT_STATE_INITIALIZED);
+ }*/
+ ALOGV("%s: X", __func__);
+ return ret;
+}
+
+status_t QCameraStream_Snapshot::start(void) {
+ status_t ret = NO_ERROR;
+
+ ALOGV("%s: E", __func__);
+
+ Mutex::Autolock lock(mStopCallbackLock);
+
+ /* Keep track of number of snapshots to take - in case of
+ multiple snapshot/burst mode */
+
+ if(mHalCamCtrl->isRawSnapshot()) {
+ ALOGV("%s: Acquire Raw Snapshot Channel", __func__);
+ ret = cam_ops_ch_acquire(mCameraId, MM_CAMERA_CH_RAW);
+ if (NO_ERROR != ret) {
+ ALOGE("%s: Failure Acquiring Raw Snapshot Channel error =%d\n",
+ __func__, ret);
+ ret = FAILED_TRANSACTION;
+ goto end;
+ }
+ /* Snapshot channel is acquired */
+ setSnapshotState(SNAPSHOT_STATE_CH_ACQUIRED);
+ ALOGV("%s: Register buffer notification. My object: %x",
+ __func__, (unsigned int) this);
+ (void) cam_evt_register_buf_notify(mCameraId, MM_CAMERA_CH_RAW,
+ snapshot_notify_cb,
+ MM_CAMERA_REG_BUF_CB_INFINITE,
+ 0,
+ this);
+ /* Set the state to buffer notification completed */
+ setSnapshotState(SNAPSHOT_STATE_BUF_NOTIF_REGD);
+ }else{
+ ALOGV("%s: Acquire Snapshot Channel", __func__);
+ ret = cam_ops_ch_acquire(mCameraId, MM_CAMERA_CH_SNAPSHOT);
+ if (NO_ERROR != ret) {
+ ALOGE("%s: Failure Acquiring Snapshot Channel error =%d\n", __func__, ret);
+ ret = FAILED_TRANSACTION;
+ goto end;
+ }
+ /* Snapshot channel is acquired */
+ setSnapshotState(SNAPSHOT_STATE_CH_ACQUIRED);
+ ALOGV("%s: Register buffer notification. My object: %x",
+ __func__, (unsigned int) this);
+ (void) cam_evt_register_buf_notify(mCameraId, MM_CAMERA_CH_SNAPSHOT,
+ snapshot_notify_cb,
+ MM_CAMERA_REG_BUF_CB_INFINITE,
+ 0,
+ this);
+ /* Set the state to buffer notification completed */
+ setSnapshotState(SNAPSHOT_STATE_BUF_NOTIF_REGD);
+ }
+
+ if (isZSLMode()) {
+ prepareHardware();
+ ret = initZSLSnapshot();
+ if(ret != NO_ERROR) {
+ ALOGE("%s : Error while Initializing ZSL snapshot",__func__);
+ goto end;
+ }
+ mHalCamCtrl->setExifTags();
+ /* In case of ZSL, start will only start snapshot stream and
+ continuously queue the frames in a queue. When user clicks
+ shutter we'll call get buffer from the queue and pass it on */
+ ret = startStreamZSL();
+ goto end;
+ }
+
+ if (isFullSizeLiveshot())
+ ret = initFullLiveshot();
+
+ /* Check if it's a raw snapshot or JPEG*/
+ if(mHalCamCtrl->isRawSnapshot()) {
+ mSnapshotFormat = PICTURE_FORMAT_RAW;
+ ret = initRawSnapshot(mNumOfSnapshot);
+ }else{
+ //JPEG
+ mSnapshotFormat = PICTURE_FORMAT_JPEG;
+ if (mHdrInfo.hdr_on) {
+ ret = initJPEGSnapshot(mHdrInfo.num_frame);
+ } else {
+ ret = initJPEGSnapshot(mNumOfSnapshot);
+ }
+ }
+ if(ret != NO_ERROR) {
+ ALOGE("%s : Error while Initializing snapshot",__func__);
+ goto end;
+ }
+
+ //Update Exiftag values.
+ mHalCamCtrl->setExifTags();
+
+ if (mSnapshotFormat == PICTURE_FORMAT_RAW) {
+ ret = takePictureRaw();
+ goto end;
+ }
+ else{
+ ret = takePictureJPEG();
+ goto end;
+ }
+
+end:
+ if (ret == NO_ERROR) {
+ setSnapshotState(SNAPSHOT_STATE_IMAGE_CAPTURE_STRTD);
+ mActive = true;
+ } else {
+ deInitBuffer();
+ }
+
+ ALOGV("%s: X", __func__);
+ return ret;
+ }
+
+void QCameraStream_Snapshot::stopPolling(void)
+{
+ mm_camera_ops_type_t ops_type;
+
+ if (mSnapshotFormat == PICTURE_FORMAT_JPEG) {
+ ops_type = isZSLMode() ? MM_CAMERA_OPS_ZSL : MM_CAMERA_OPS_SNAPSHOT;
+ }else
+ ops_type = MM_CAMERA_OPS_RAW;
+
+ if( NO_ERROR != cam_ops_action(mCameraId, false,
+ ops_type, this)) {
+ ALOGE("%s: Failure stopping snapshot", __func__);
+ }
+}
+
+void QCameraStream_Snapshot::stop(void)
+{
+ mm_camera_ops_type_t ops_type;
+ status_t ret = NO_ERROR;
+
+ ALOGV("%s: E", __func__);
+
+ if(isLiveSnapshot() && mHalCamCtrl->mStateLiveshot) {
+ if(getSnapshotState() == SNAPSHOT_STATE_JPEG_ENCODING) {
+ ALOGV("Destroy Liveshot Jpeg Instance");
+ omxJpegAbort();
+ }
+ mStopCallbackLock.lock();
+ deInitBuffer();
+ mHalCamCtrl->mStateLiveshot = false;
+ mStopCallbackLock.unlock();
+ return;
+ }
+
+ if(!mActive) {
+ ALOGV("%s: Not Active return now", __func__);
+ return;
+ }
+ mActive = false;
+ mStopCallbackLock.lock();
+ if (getSnapshotState() != SNAPSHOT_STATE_UNINIT) {
+ /* Stop polling for further frames */
+ stopPolling();
+
+ if(getSnapshotState() == SNAPSHOT_STATE_JPEG_ENCODING) {
+ mStopCallbackLock.unlock();
+ ALOGV("Destroy Jpeg Instance");
+ omxJpegAbort();
+ mStopCallbackLock.lock();
+ }
+ /* Depending upon current state, we'll need to allocate-deallocate-deinit*/
+ deInitBuffer();
+ }
+
+ if(mSnapshotFormat == PICTURE_FORMAT_RAW) {
+ ret= QCameraStream::deinitChannel(mCameraId, MM_CAMERA_CH_RAW);
+ if(ret != MM_CAMERA_OK) {
+ ALOGE("%s:Deinit RAW channel failed=%d\n", __func__, ret);
+ }
+ (void)cam_evt_register_buf_notify(mCameraId, MM_CAMERA_CH_RAW,
+ NULL,
+ (mm_camera_register_buf_cb_type_t)NULL,
+ NULL,
+ NULL);
+ } else {
+ ret= QCameraStream::deinitChannel(mCameraId, MM_CAMERA_CH_SNAPSHOT);
+ if(ret != MM_CAMERA_OK) {
+ ALOGE("%s:Deinit Snapshot channel failed=%d\n", __func__, ret);
+ }
+ (void)cam_evt_register_buf_notify(mCameraId, MM_CAMERA_CH_SNAPSHOT,
+ NULL,
+ (mm_camera_register_buf_cb_type_t)NULL,
+ NULL,
+ NULL);
+ }
+
+ /* release is generally called in case of explicit call from
+ upper-layer during disconnect. So we need to deinit everything
+ whatever state we are in */
+ ALOGV("Calling omxjpegjoin from release\n");
+ omxJpegFinish();
+#if 0
+ omxJpegClose();
+#endif
+ mFullLiveshot = false;
+ mStopCallbackLock.unlock();
+ ALOGV("%s: X", __func__);
+
+}
+
+void QCameraStream_Snapshot::release()
+{
+ status_t ret = NO_ERROR;
+ ALOGV("%s: E", __func__);
+ //Mutex::Autolock l(&snapshotLock);
+
+ if(!mInit){
+ ALOGE("%s : Stream not Initalized",__func__);
+ return;
+ }
+
+ if(mActive) {
+ this->stop();
+ mActive = false;
+ }
+
+ /* release is generally called in case of explicit call from
+ upper-layer during disconnect. So we need to deinit everything
+ whatever state we are in */
+
+ //deinit();
+ mInit = false;
+ ALOGV("%s: X", __func__);
+
+}
+
+void QCameraStream_Snapshot::prepareHardware()
+{
+ ALOGV("%s: E", __func__);
+
+ /* Prepare snapshot*/
+ cam_ops_action(mCameraId,
+ true,
+ MM_CAMERA_OPS_PREPARE_SNAPSHOT,
+ this);
+ ALOGV("%s: X", __func__);
+}
+
+sp<IMemoryHeap> QCameraStream_Snapshot::getRawHeap() const
+{
+ return ((mDisplayHeap != NULL) ? mDisplayHeap->mHeap : NULL);
+}
+
+QCameraStream*
+QCameraStream_Snapshot::createInstance(int cameraId,
+ camera_mode_t mode)
+{
+
+ QCameraStream* pme = new QCameraStream_Snapshot(cameraId, mode);
+
+ return pme;
+}
+
+void QCameraStream_Snapshot::deleteInstance(QCameraStream *p)
+{
+ if (p){
+ p->release();
+ delete p;
+ p = NULL;
+ }
+}
+
+void QCameraStream_Snapshot::notifyWDenoiseEvent(cam_ctrl_status_t status, void * cookie)
+{
+ camera_notify_callback notifyCb;
+ camera_data_callback dataCb, jpgDataCb;
+ int rc = NO_ERROR;
+ mm_camera_ch_data_buf_t *frame = (mm_camera_ch_data_buf_t *)cookie;
+
+ ALOGV("%s: WDN Done status (%d) received",__func__,status);
+ Mutex::Autolock lock(mStopCallbackLock);
+ if (frame == NULL) {
+ ALOGE("%s: cookie is returned NULL", __func__);
+ } else {
+ #if 0
+ // first unmapping the fds
+ mHalCamCtrl->sendUnMappingBuf(MSM_V4L2_EXT_CAPTURE_MODE_MAIN, frame->snapshot.main.idx, mCameraId,
+ CAM_SOCK_MSG_TYPE_FD_UNMAPPING);
+ mHalCamCtrl->sendUnMappingBuf(MSM_V4L2_EXT_CAPTURE_MODE_THUMBNAIL, frame->snapshot.thumbnail.idx, mCameraId,
+ CAM_SOCK_MSG_TYPE_FD_UNMAPPING);
+ #endif
+
+ // then do JPEG encoding
+ rc = encodeDisplayAndSave(frame, 0);
+ }
+
+ // send upperlayer callback for raw image (data or notify, not both)
+ if((mHalCamCtrl->mDataCb) && (mHalCamCtrl->mMsgEnabled & CAMERA_MSG_RAW_IMAGE)){
+ dataCb = mHalCamCtrl->mDataCb;
+ } else {
+ dataCb = NULL;
+ }
+ if((mHalCamCtrl->mNotifyCb) && (mHalCamCtrl->mMsgEnabled & CAMERA_MSG_RAW_IMAGE_NOTIFY)){
+ notifyCb = mHalCamCtrl->mNotifyCb;
+ } else {
+ notifyCb = NULL;
+ }
+ if(mHalCamCtrl->mDataCb &&
+ (mHalCamCtrl->mMsgEnabled & CAMERA_MSG_COMPRESSED_IMAGE)) {
+ /* get picture failed. Give jpeg callback with NULL data
+ * to the application to restore to preview mode
+ */
+ jpgDataCb = mHalCamCtrl->mDataCb;
+ } else {
+ jpgDataCb = NULL;
+ }
+
+ // launch next WDN if there is more in WDN Queue
+ lauchNextWDenoiseFromQueue();
+
+ mStopCallbackLock.unlock();
+
+ if (rc != NO_ERROR)
+ {
+ ALOGE("%s: Error while encoding/displaying/saving image", __func__);
+ if (frame) {
+ cam_evt_buf_done(mCameraId, frame);
+ }
+
+ if (dataCb) {
+ dataCb(CAMERA_MSG_RAW_IMAGE, mHalCamCtrl->mSnapshotMemory.camera_memory[0],
+ 1, NULL, mHalCamCtrl->mCallbackCookie);
+ }
+ if (notifyCb) {
+ notifyCb(CAMERA_MSG_RAW_IMAGE_NOTIFY, 0, 0, mHalCamCtrl->mCallbackCookie);
+ }
+ if (jpgDataCb) {
+ jpgDataCb(CAMERA_MSG_COMPRESSED_IMAGE,
+ NULL, 0, NULL,
+ mHalCamCtrl->mCallbackCookie);
+ }
+
+ if (frame != NULL) {
+ free(frame);
+ }
+ }
+}
+
+void QCameraStream_Snapshot::lauchNextWDenoiseFromQueue()
+{
+ do {
+ mm_camera_ch_data_buf_t *frame = NULL;
+ if ( mWDNQueue.isEmpty() ||
+ (NULL == (frame = (mm_camera_ch_data_buf_t *)mWDNQueue.dequeue())) ) {
+ // set the flag back to false when no WDN going on
+ mIsDoingWDN = false;
+ break;
+ }
+
+ if ( NO_ERROR != doWaveletDenoise(frame) ) {
+ ALOGE("%s: Error while doing wavelet denoise", __func__);
+ if (frame != NULL) {
+ free(frame);
+ }
+ } else {
+ // we sent out req for WDN, so we can break here
+ ALOGV("%s: Send out req for doing wavelet denoise, return here", __func__);
+ break;
+ }
+ } while (true);
+}
+
+status_t QCameraStream_Snapshot::doWaveletDenoise(mm_camera_ch_data_buf_t* frame)
+{
+ status_t ret = NO_ERROR;
+ cam_sock_packet_t packet;
+ cam_ctrl_dimension_t dim;
+
+ ALOGV("%s: E", __func__);
+
+ #if 0
+ // get dim on the fly
+ memset(&dim, 0, sizeof(cam_ctrl_dimension_t));
+ ret = cam_config_get_parm(mCameraId, MM_CAMERA_PARM_DIMENSION, &dim);
+ if (NO_ERROR != ret) {
+ ALOGE("%s: error - can't get dimension!", __func__);
+ return FAILED_TRANSACTION;
+ }
+
+ // send main frame mapping through domain socket
+ if (NO_ERROR != mHalCamCtrl->sendMappingBuf(MSM_V4L2_EXT_CAPTURE_MODE_MAIN,
+ frame->snapshot.main.idx,
+ frame->snapshot.main.frame->fd,
+ dim.picture_frame_offset.frame_len, mCameraId,
+ CAM_SOCK_MSG_TYPE_FD_MAPPING)) {
+ ALOGE("%s: sending main frame mapping buf msg Failed", __func__);
+ ret = FAILED_TRANSACTION;
+ goto end;
+ }
+
+ // send thumbnail frame mapping through domain socket
+ if (NO_ERROR != mHalCamCtrl->sendMappingBuf(MSM_V4L2_EXT_CAPTURE_MODE_THUMBNAIL,
+ frame->snapshot.thumbnail.idx,
+ frame->snapshot.thumbnail.frame->fd,
+ dim.display_frame_offset.frame_len, mCameraId,
+ CAM_SOCK_MSG_TYPE_FD_MAPPING)) {
+ ALOGE("%s: sending thumbnail frame mapping buf msg Failed", __func__);
+ ret = FAILED_TRANSACTION;
+ goto end;
+ }
+
+#endif
+ // ask deamon to start wdn operation
+ if (NO_ERROR != sendWDenoiseStartMsg(frame)) {
+ ALOGE("%s: sending thumbnail frame mapping buf msg Failed", __func__);
+ ret = FAILED_TRANSACTION;
+ goto end;
+ }
+
+end:
+ ALOGV("%s: X", __func__);
+ return ret;
+}
+
+status_t QCameraStream_Snapshot::sendWDenoiseStartMsg(mm_camera_ch_data_buf_t * frame)
+{
+ cam_sock_packet_t packet;
+ memset(&packet, 0, sizeof(cam_sock_packet_t));
+ packet.msg_type = CAM_SOCK_MSG_TYPE_WDN_START;
+ packet.payload.wdn_start.cookie = (unsigned long)frame;
+ packet.payload.wdn_start.num_frames = MM_MAX_WDN_NUM;
+ packet.payload.wdn_start.ext_mode[0] = MSM_V4L2_EXT_CAPTURE_MODE_MAIN;
+ packet.payload.wdn_start.ext_mode[1] = MSM_V4L2_EXT_CAPTURE_MODE_THUMBNAIL;
+ packet.payload.wdn_start.frame_idx[0] = frame->snapshot.main.idx;
+ packet.payload.wdn_start.frame_idx[1] = frame->snapshot.thumbnail.idx;
+ if ( cam_ops_sendmsg(mCameraId, &packet, sizeof(packet), 0) <= 0 ) {
+ ALOGE("%s: sending start wavelet denoise msg failed", __func__);
+ return FAILED_TRANSACTION;
+ }
+ return NO_ERROR;
+}
+
+status_t QCameraStream_Snapshot::doHdrProcessing( )
+{
+ status_t rc = NO_ERROR;
+ cam_sock_packet_t packet;
+ int i;
+ memset(&packet, 0, sizeof(cam_sock_packet_t));
+ packet.msg_type = CAM_SOCK_MSG_TYPE_HDR_START;
+ packet.payload.hdr_pkg.cookie = (unsigned long)this;
+ packet.payload.hdr_pkg.num_hdr_frames = mHdrInfo.num_frame;
+ ALOGV("%s num frames = %d ", __func__, mHdrInfo.num_frame);
+ for (i = 0; i < mHdrInfo.num_frame; i++) {
+ packet.payload.hdr_pkg.hdr_main_idx[i] =mHdrInfo.recvd_frame[i]->snapshot.main.idx;
+ packet.payload.hdr_pkg.hdr_thm_idx[i] = mHdrInfo.recvd_frame[i]->snapshot.thumbnail.idx;
+ packet.payload.hdr_pkg.exp[i] = mHdrInfo.exp[i];
+ ALOGV("%s Adding buffer M %d T %d Exp %d into hdr pkg ", __func__,
+ packet.payload.hdr_pkg.hdr_main_idx[i],
+ packet.payload.hdr_pkg.hdr_thm_idx[i],
+ packet.payload.hdr_pkg.exp[i]);
+ }
+ if (cam_ops_sendmsg(mCameraId, &packet, sizeof(packet), 0) <= 0) {
+ ALOGE("%s: sending start HDR msg failed", __func__);
+ rc= FAILED_TRANSACTION;
+ }
+ return rc;
+}
+
+void QCameraStream_Snapshot::InitHdrInfoForSnapshot(bool Hdr_on, int number_frames, int *exp )
+{
+ mHdrInfo.hdr_on = Hdr_on;
+ mHdrInfo.num_frame = number_frames;
+ mHdrInfo.num_raw_received = 0;
+
+ if(number_frames) {
+ memcpy(mHdrInfo.exp, exp, sizeof(int)*number_frames);
+ }
+ memset(&mHdrInfo.recvd_frame, 0,
+ sizeof(mm_camera_ch_data_buf_t *)*MAX_HDR_EXP_FRAME_NUM);
+}
+
+
+void QCameraStream_Snapshot::notifyHdrEvent(cam_ctrl_status_t status, void * cookie)
+{
+ camera_notify_callback notifyCb;
+ camera_data_callback dataCb, jpgDataCb;
+ int rc[3];
+ mm_camera_ch_data_buf_t *frame;
+ int i;
+
+ ALOGV("%s: HDR Done status (%d) received",__func__,status);
+ Mutex::Autolock lock(mStopCallbackLock);
+ /* Regular frame */
+ frame = mHdrInfo.recvd_frame[0];
+ rc[0] = encodeDisplayAndSave(frame, 0);
+ /* HDR frame */
+ frame = mHdrInfo.recvd_frame[2];
+ rc[2] = encodeDisplayAndSave(frame,0);
+
+ // send upperlayer callback for raw image (data or notify, not both)
+ if((mHalCamCtrl->mDataCb) && (mHalCamCtrl->mMsgEnabled & CAMERA_MSG_RAW_IMAGE)){
+ dataCb = mHalCamCtrl->mDataCb;
+ } else {
+ dataCb = NULL;
+ }
+ if((mHalCamCtrl->mNotifyCb) && (mHalCamCtrl->mMsgEnabled & CAMERA_MSG_RAW_IMAGE_NOTIFY)){
+ notifyCb = mHalCamCtrl->mNotifyCb;
+ } else {
+ notifyCb = NULL;
+ }
+ if(mHalCamCtrl->mDataCb &&
+ (mHalCamCtrl->mMsgEnabled & CAMERA_MSG_COMPRESSED_IMAGE)) {
+ /* get picture failed. Give jpeg callback with NULL data
+ * to the application to restore to preview mode
+ */
+ jpgDataCb = mHalCamCtrl->mDataCb;
+ } else {
+ jpgDataCb = NULL;
+ }
+
+ mStopCallbackLock.unlock();
+
+ for (i =0; i<= 2; i++) {
+ if(i==1)
+ continue;
+ if (rc[i] != NO_ERROR)
+ {
+ ALOGE("%s: Error while encoding/displaying/saving image", __func__);
+ if (frame) {
+ cam_evt_buf_done(mCameraId, mHdrInfo.recvd_frame[i]);
+ }
+
+ if (dataCb) {
+ dataCb(CAMERA_MSG_RAW_IMAGE, mHalCamCtrl->mSnapshotMemory.camera_memory[0],
+ 1, NULL, mHalCamCtrl->mCallbackCookie);
+ }
+ if (notifyCb) {
+ notifyCb(CAMERA_MSG_RAW_IMAGE_NOTIFY, 0, 0, mHalCamCtrl->mCallbackCookie);
+ }
+ if (jpgDataCb) {
+ jpgDataCb(CAMERA_MSG_COMPRESSED_IMAGE,
+ NULL, 0, NULL,
+ mHalCamCtrl->mCallbackCookie);
+ }
+
+ if ( mHdrInfo.recvd_frame[i] != NULL) {
+ free( mHdrInfo.recvd_frame[i]);
+ mHdrInfo.recvd_frame[i] = NULL;
+ }
+ }
+ }
+
+ if (mHdrInfo.recvd_frame[1]) {
+ cam_evt_buf_done(mCameraId, mHdrInfo.recvd_frame[1]);
+ free( mHdrInfo.recvd_frame[1]);
+ mHdrInfo.recvd_frame[1] = NULL;
+ }
+}
+
+}; // namespace android
+
diff --git a/camera/QCameraParameters.cpp b/camera/QCameraParameters.cpp
new file mode 100644
index 0000000..0c9fd3d
--- /dev/null
+++ b/camera/QCameraParameters.cpp
@@ -0,0 +1,400 @@
+/*
+**
+** Copyright 2008, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+** http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+#define LOG_TAG "QCameraParams"
+#include <utils/Log.h>
+#include <string.h>
+#include <stdlib.h>
+#include "QCameraParameters.h"
+
+namespace android {
+// Parameter keys to communicate between camera application and driver.
+const char QCameraParameters::KEY_SUPPORTED_HFR_SIZES[] = "hfr-size-values";
+const char QCameraParameters::KEY_PREVIEW_FRAME_RATE_MODE[] = "preview-frame-rate-mode";
+const char QCameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATE_MODES[] = "preview-frame-rate-modes";
+const char QCameraParameters::KEY_PREVIEW_FRAME_RATE_AUTO_MODE[] = "frame-rate-auto";
+const char QCameraParameters::KEY_PREVIEW_FRAME_RATE_FIXED_MODE[] = "frame-rate-fixed";
+const char QCameraParameters::KEY_TOUCH_AF_AEC[] = "touch-af-aec";
+const char QCameraParameters::KEY_SUPPORTED_TOUCH_AF_AEC[] = "touch-af-aec-values";
+const char QCameraParameters::KEY_TOUCH_INDEX_AEC[] = "touch-index-aec";
+const char QCameraParameters::KEY_TOUCH_INDEX_AF[] = "touch-index-af";
+const char QCameraParameters::KEY_SCENE_DETECT[] = "scene-detect";
+const char QCameraParameters::KEY_SUPPORTED_SCENE_DETECT[] = "scene-detect-values";
+const char QCameraParameters::KEY_ISO_MODE[] = "iso";
+const char QCameraParameters::KEY_SUPPORTED_ISO_MODES[] = "iso-values";
+const char QCameraParameters::KEY_LENSSHADE[] = "lensshade";
+const char QCameraParameters::KEY_SUPPORTED_LENSSHADE_MODES[] = "lensshade-values";
+const char QCameraParameters::KEY_AUTO_EXPOSURE[] = "auto-exposure";
+const char QCameraParameters::KEY_SUPPORTED_AUTO_EXPOSURE[] = "auto-exposure-values";
+const char QCameraParameters::KEY_DENOISE[] = "denoise";
+const char QCameraParameters::KEY_SUPPORTED_DENOISE[] = "denoise-values";
+const char QCameraParameters::KEY_SELECTABLE_ZONE_AF[] = "selectable-zone-af";
+const char QCameraParameters::KEY_SUPPORTED_SELECTABLE_ZONE_AF[] = "selectable-zone-af-values";
+const char QCameraParameters::KEY_FACE_DETECTION[] = "face-detection";
+const char QCameraParameters::KEY_SUPPORTED_FACE_DETECTION[] = "face-detection-values";
+const char QCameraParameters::KEY_MEMORY_COLOR_ENHANCEMENT[] = "mce";
+const char QCameraParameters::KEY_SUPPORTED_MEM_COLOR_ENHANCE_MODES[] = "mce-values";
+const char QCameraParameters::KEY_VIDEO_HIGH_FRAME_RATE[] = "video-hfr";
+const char QCameraParameters::KEY_SUPPORTED_VIDEO_HIGH_FRAME_RATE_MODES[] = "video-hfr-values";
+const char QCameraParameters::KEY_REDEYE_REDUCTION[] = "redeye-reduction";
+const char QCameraParameters::KEY_SUPPORTED_REDEYE_REDUCTION[] = "redeye-reduction-values";
+const char QCameraParameters::KEY_HIGH_DYNAMIC_RANGE_IMAGING[] = "hdr";
+const char QCameraParameters::KEY_SUPPORTED_HDR_IMAGING_MODES[] = "hdr-values";
+const char QCameraParameters::KEY_POWER_MODE_SUPPORTED[] = "power-mode-supported";
+const char QCameraParameters::KEY_ZSL[] = "zsl";
+const char QCameraParameters::KEY_SUPPORTED_ZSL_MODES[] = "zsl-values";
+const char QCameraParameters::KEY_CAMERA_MODE[] = "camera-mode";
+const char QCameraParameters::KEY_AE_BRACKET_HDR[] = "ae-bracket-hdr";
+const char QCameraParameters::KEY_POWER_MODE[] = "power-mode";
+/*only effective when KEY_AE_BRACKET_HDR set to ae_bracketing*/
+//const char QCameraParameters::KEY_AE_BRACKET_SETTING_KEY[] = "ae-bracket-setting";
+
+// Values for effect settings.
+const char QCameraParameters::EFFECT_EMBOSS[] = "emboss";
+const char QCameraParameters::EFFECT_SKETCH[] = "sketch";
+const char QCameraParameters::EFFECT_NEON[] = "neon";
+
+// Values for auto exposure settings.
+const char QCameraParameters::TOUCH_AF_AEC_OFF[] = "touch-off";
+const char QCameraParameters::TOUCH_AF_AEC_ON[] = "touch-on";
+
+// Values for scene mode settings.
+const char QCameraParameters::SCENE_MODE_ASD[] = "asd"; // corresponds to CAMERA_BESTSHOT_AUTO in HAL
+const char QCameraParameters::SCENE_MODE_BACKLIGHT[] = "backlight";
+const char QCameraParameters::SCENE_MODE_FLOWERS[] = "flowers";
+const char QCameraParameters::SCENE_MODE_AR[] = "AR";
+const char QCameraParameters::SCENE_MODE_HDR[] = "hdr";
+
+// Values for auto scene detection settings.
+const char QCameraParameters::SCENE_DETECT_OFF[] = "off";
+const char QCameraParameters::SCENE_DETECT_ON[] = "on";
+
+// Formats for setPreviewFormat and setPictureFormat.
+const char QCameraParameters::PIXEL_FORMAT_YUV420SP_ADRENO[] = "yuv420sp-adreno";
+const char QCameraParameters::PIXEL_FORMAT_RAW[] = "raw";
+const char QCameraParameters::PIXEL_FORMAT_YV12[] = "yuv420p";
+const char QCameraParameters::PIXEL_FORMAT_NV12[] = "nv12";
+
+// Values for focus mode settings.
+const char QCameraParameters::FOCUS_MODE_NORMAL[] = "normal";
+const char QCameraParameters::KEY_SKIN_TONE_ENHANCEMENT[] = "skinToneEnhancement";
+const char QCameraParameters::KEY_SUPPORTED_SKIN_TONE_ENHANCEMENT_MODES[] = "skinToneEnhancement-values";
+
+// Values for ISO Settings
+const char QCameraParameters::ISO_AUTO[] = "auto";
+const char QCameraParameters::ISO_HJR[] = "ISO_HJR";
+const char QCameraParameters::ISO_100[] = "ISO100";
+const char QCameraParameters::ISO_200[] = "ISO200";
+const char QCameraParameters::ISO_400[] = "ISO400";
+const char QCameraParameters::ISO_800[] = "ISO800";
+const char QCameraParameters::ISO_1600[] = "ISO1600";
+
+ //Values for Lens Shading
+const char QCameraParameters::LENSSHADE_ENABLE[] = "enable";
+const char QCameraParameters::LENSSHADE_DISABLE[] = "disable";
+
+// Values for auto exposure settings.
+const char QCameraParameters::AUTO_EXPOSURE_FRAME_AVG[] = "frame-average";
+const char QCameraParameters::AUTO_EXPOSURE_CENTER_WEIGHTED[] = "center-weighted";
+const char QCameraParameters::AUTO_EXPOSURE_SPOT_METERING[] = "spot-metering";
+
+const char QCameraParameters::KEY_GPS_LATITUDE_REF[] = "gps-latitude-ref";
+const char QCameraParameters::KEY_GPS_LONGITUDE_REF[] = "gps-longitude-ref";
+const char QCameraParameters::KEY_GPS_ALTITUDE_REF[] = "gps-altitude-ref";
+const char QCameraParameters::KEY_GPS_STATUS[] = "gps-status";
+const char QCameraParameters::KEY_EXIF_DATETIME[] = "exif-datetime";
+
+const char QCameraParameters::KEY_HISTOGRAM[] = "histogram";
+const char QCameraParameters::KEY_SUPPORTED_HISTOGRAM_MODES[] = "histogram-values";
+
+//Values for Histogram Shading
+const char QCameraParameters::HISTOGRAM_ENABLE[] = "enable";
+const char QCameraParameters::HISTOGRAM_DISABLE[] = "disable";
+
+//Values for Skin Tone Enhancement Modes
+const char QCameraParameters::SKIN_TONE_ENHANCEMENT_ENABLE[] = "enable";
+const char QCameraParameters::SKIN_TONE_ENHANCEMENT_DISABLE[] = "disable";
+
+const char QCameraParameters::KEY_SHARPNESS[] = "sharpness";
+const char QCameraParameters::KEY_MAX_SHARPNESS[] = "max-sharpness";
+const char QCameraParameters::KEY_CONTRAST[] = "contrast";
+const char QCameraParameters::KEY_MAX_CONTRAST[] = "max-contrast";
+const char QCameraParameters::KEY_SATURATION[] = "saturation";
+const char QCameraParameters::KEY_MAX_SATURATION[] = "max-saturation";
+
+const char QCameraParameters::KEY_SINGLE_ISP_OUTPUT_ENABLED[] = "single-isp-output-enabled";
+const char QCameraParameters::KEY_SUPPORTED_CAMERA_FEATURES[] = "qc-camera-features";
+const char QCameraParameters::KEY_MAX_NUM_REQUESTED_FACES[] = "qc-max-num-requested-faces";
+
+//Values for DENOISE
+const char QCameraParameters::DENOISE_OFF[] = "denoise-off";
+const char QCameraParameters::DENOISE_ON[] = "denoise-on";
+
+// Values for selectable zone af Settings
+const char QCameraParameters::SELECTABLE_ZONE_AF_AUTO[] = "auto";
+const char QCameraParameters::SELECTABLE_ZONE_AF_SPOT_METERING[] = "spot-metering";
+const char QCameraParameters::SELECTABLE_ZONE_AF_CENTER_WEIGHTED[] = "center-weighted";
+const char QCameraParameters::SELECTABLE_ZONE_AF_FRAME_AVERAGE[] = "frame-average";
+
+// Values for Face Detection settings.
+const char QCameraParameters::FACE_DETECTION_OFF[] = "off";
+const char QCameraParameters::FACE_DETECTION_ON[] = "on";
+
+// Values for MCE settings.
+const char QCameraParameters::MCE_ENABLE[] = "enable";
+const char QCameraParameters::MCE_DISABLE[] = "disable";
+
+// Values for HFR settings.
+const char QCameraParameters::VIDEO_HFR_OFF[] = "off";
+const char QCameraParameters::VIDEO_HFR_2X[] = "60";
+const char QCameraParameters::VIDEO_HFR_3X[] = "90";
+const char QCameraParameters::VIDEO_HFR_4X[] = "120";
+
+// Values for Redeye Reduction settings.
+const char QCameraParameters::REDEYE_REDUCTION_ENABLE[] = "enable";
+const char QCameraParameters::REDEYE_REDUCTION_DISABLE[] = "disable";
+
+// Values for HDR settings.
+const char QCameraParameters::HDR_ENABLE[] = "enable";
+const char QCameraParameters::HDR_DISABLE[] = "disable";
+
+// Values for ZSL settings.
+const char QCameraParameters::ZSL_OFF[] = "off";
+const char QCameraParameters::ZSL_ON[] = "on";
+
+// Values for HDR Bracketing settings.
+const char QCameraParameters::AE_BRACKET_HDR_OFF[] = "Off";
+const char QCameraParameters::AE_BRACKET_HDR[] = "HDR";
+const char QCameraParameters::AE_BRACKET[] = "AE-Bracket";
+
+const char QCameraParameters::LOW_POWER[] = "Low_Power";
+const char QCameraParameters::NORMAL_POWER[] = "Normal_Power";
+
+static const char* portrait = "portrait";
+static const char* landscape = "landscape";
+
+//QCameraParameters::QCameraParameters()
+// : mMap()
+//{
+//}
+
+QCameraParameters::~QCameraParameters()
+{
+}
+
+int QCameraParameters::getOrientation() const
+{
+ const char* orientation = get("orientation");
+ if (orientation && !strcmp(orientation, portrait))
+ return CAMERA_ORIENTATION_PORTRAIT;
+ return CAMERA_ORIENTATION_LANDSCAPE;
+}
+void QCameraParameters::setOrientation(int orientation)
+{
+ if (orientation == CAMERA_ORIENTATION_PORTRAIT) {
+ set("orientation", portrait);
+ } else {
+ set("orientation", landscape);
+ }
+}
+
+ //XXX ALOGE("Key \"%s\"contains invalid character (= or ;)", key);
+ //XXX ALOGE("Value \"%s\"contains invalid character (= or ;)", value);
+ //snprintf(str, sizeof(str), "%d", value);
+ //ALOGE("Cannot find delimeter (%c) in str=%s", delim, str);
+
+
+// Parse string like "(1, 2, 3, 4, ..., N)"
+// num is pointer to an allocated array of size N
+static int parseNDimVector(const char *str, int *num, int N, char delim = ',')
+{
+ char *start, *end;
+ if(num == NULL) {
+ ALOGE("Invalid output array (num == NULL)");
+ return -1;
+ }
+ //check if string starts and ends with parantheses
+ if(str[0] != '(' || str[strlen(str)-1] != ')') {
+ ALOGE("Invalid format of string %s, valid format is (n1, n2, n3, n4 ...)", str);
+ return -1;
+ }
+ start = (char*) str;
+ start++;
+ for(int i=0; i<N; i++) {
+ *(num+i) = (int) strtol(start, &end, 10);
+ if(*end != delim && i < N-1) {
+ ALOGE("Cannot find delimeter '%c' in string \"%s\". end = %c", delim, str, *end);
+ return -1;
+ }
+ start = end+1;
+ }
+ return 0;
+}
+
+
+ //ALOGE("Picture sizes string \"%s\" contains invalid character.", sizesStr);
+ //snprintf(str, sizeof(str), "%dx%d", width, height);
+
+
+
+// Parse string like "640x480" or "10000,20000"
+static int parse_pair(const char *str, int *first, int *second, char delim,
+ char **endptr = NULL)
+{
+ // Find the first integer.
+ char *end;
+ int w = (int)strtol(str, &end, 10);
+ // If a delimeter does not immediately follow, give up.
+ if (*end != delim) {
+ ALOGE("Cannot find delimeter (%c) in str=%s", delim, str);
+ return -1;
+ }
+
+ // Find the second integer, immediately after the delimeter.
+ int h = (int)strtol(end+1, &end, 10);
+
+ *first = w;
+ *second = h;
+
+ if (endptr) {
+ *endptr = end;
+ }
+
+ return 0;
+}
+
+static void parseSizesList(const char *sizesStr, Vector<Size> &sizes)
+{
+ if (sizesStr == 0) {
+ return;
+ }
+
+ char *sizeStartPtr = (char *)sizesStr;
+
+ while (true) {
+ int width, height;
+ int success = parse_pair(sizeStartPtr, &width, &height, 'x',
+ &sizeStartPtr);
+ if (success == -1 || (*sizeStartPtr != ',' && *sizeStartPtr != '\0')) {
+ ALOGE("Picture sizes string \"%s\" contains invalid character.", sizesStr);
+ return;
+ }
+ sizes.push(Size(width, height));
+
+ if (*sizeStartPtr == '\0') {
+ return;
+ }
+ sizeStartPtr++;
+ }
+}
+
+
+void QCameraParameters::getSupportedHfrSizes(Vector<Size> &sizes) const
+{
+ const char *hfrSizesStr = get(KEY_SUPPORTED_HFR_SIZES);
+ parseSizesList(hfrSizesStr, sizes);
+}
+
+void QCameraParameters::setPreviewFpsRange(int minFPS, int maxFPS)
+{
+ char str[32];
+ snprintf(str, sizeof(str), "%d,%d",minFPS,maxFPS);
+ set(KEY_PREVIEW_FPS_RANGE,str);
+}
+
+void QCameraParameters::setPreviewFrameRateMode(const char *mode)
+{
+ set(KEY_PREVIEW_FRAME_RATE_MODE, mode);
+}
+
+const char *QCameraParameters::getPreviewFrameRateMode() const
+{
+ return get(KEY_PREVIEW_FRAME_RATE_MODE);
+}
+
+
+ //ALOGD("dump: mMap.size = %d", mMap.size());
+ //ALOGD("%s: %s\n", k.string(), v.string());
+
+
+
+void QCameraParameters::setTouchIndexAec(int x, int y)
+{
+ char str[32];
+ snprintf(str, sizeof(str), "%dx%d", x, y);
+ set(KEY_TOUCH_INDEX_AEC, str);
+}
+
+void QCameraParameters::getTouchIndexAec(int *x, int *y) const
+{
+ *x = -1;
+ *y = -1;
+
+ // Get the current string, if it doesn't exist, leave the -1x-1
+ const char *p = get(KEY_TOUCH_INDEX_AEC);
+ if (p == 0)
+ return;
+
+ int tempX, tempY;
+ if (parse_pair(p, &tempX, &tempY, 'x') == 0) {
+ *x = tempX;
+ *y = tempY;
+ }
+}
+
+void QCameraParameters::setTouchIndexAf(int x, int y)
+{
+ char str[32];
+ snprintf(str, sizeof(str), "%dx%d", x, y);
+ set(KEY_TOUCH_INDEX_AF, str);
+}
+
+void QCameraParameters::getTouchIndexAf(int *x, int *y) const
+{
+ *x = -1;
+ *y = -1;
+
+ // Get the current string, if it doesn't exist, leave the -1x-1
+ const char *p = get(KEY_TOUCH_INDEX_AF);
+ if (p == 0)
+ return;
+
+ int tempX, tempY;
+ if (parse_pair(p, &tempX, &tempY, 'x') == 0) {
+ *x = tempX;
+ *y = tempY;
+ }
+}
+
+void QCameraParameters::getMeteringAreaCenter(int *x, int *y) const
+{
+ //Default invalid values
+ *x = -2000;
+ *y = -2000;
+
+ const char *p = get(KEY_METERING_AREAS);
+ if(p != NULL) {
+ int arr[5] = {-2000, -2000, -2000, -2000, 0};
+ parseNDimVector(p, arr, 5); //p = "(x1, y1, x2, y2, weight)"
+ *x = (arr[0] + arr[2])/2; //center_x = (x1+x2)/2
+ *y = (arr[1] + arr[3])/2; //center_y = (y1+y2)/2
+ }
+}
+
+
+}; // namespace android
+
diff --git a/camera/QCameraParameters.h b/camera/QCameraParameters.h
new file mode 100644
index 0000000..dd29dda
--- /dev/null
+++ b/camera/QCameraParameters.h
@@ -0,0 +1,257 @@
+/*
+**
+** Copyright 2008, The Android Open Source Project
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+** http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+#ifndef ANDROID_HARDWARE_QCAMERA_PARAMETERS_H
+#define ANDROID_HARDWARE_QCAMERA_PARAMETERS_H
+
+//#include <utils/KeyedVector.h>
+//#include <utils/String8.h>
+#include <camera/CameraParameters.h>
+
+namespace android {
+
+struct FPSRange{
+ int minFPS;
+ int maxFPS;
+ FPSRange(){
+ minFPS=0;
+ maxFPS=0;
+ };
+ FPSRange(int min,int max){
+ minFPS=min;
+ maxFPS=max;
+ };
+};
+class QCameraParameters: public CameraParameters
+{
+public:
+#if 1
+ QCameraParameters() : CameraParameters() {};
+ QCameraParameters(const String8 ¶ms): CameraParameters(params) {};
+ #else
+ QCameraParameters() : CameraParameters() {};
+ QCameraParameters(const String8 ¶ms) { unflatten(params); }
+#endif
+ ~QCameraParameters();
+
+ // Supported PREVIEW/RECORDING SIZES IN HIGH FRAME RATE recording, sizes in pixels.
+ // Example value: "800x480,432x320". Read only.
+ static const char KEY_SUPPORTED_HFR_SIZES[];
+ // The mode of preview frame rate.
+ // Example value: "frame-rate-auto, frame-rate-fixed".
+ static const char KEY_PREVIEW_FRAME_RATE_MODE[];
+ static const char KEY_SUPPORTED_PREVIEW_FRAME_RATE_MODES[];
+ static const char KEY_PREVIEW_FRAME_RATE_AUTO_MODE[];
+ static const char KEY_PREVIEW_FRAME_RATE_FIXED_MODE[];
+
+ static const char KEY_SKIN_TONE_ENHANCEMENT[] ;
+ static const char KEY_SUPPORTED_SKIN_TONE_ENHANCEMENT_MODES[] ;
+
+ //Touch Af/AEC settings.
+ static const char KEY_TOUCH_AF_AEC[];
+ static const char KEY_SUPPORTED_TOUCH_AF_AEC[];
+ //Touch Index for AEC.
+ static const char KEY_TOUCH_INDEX_AEC[];
+ //Touch Index for AF.
+ static const char KEY_TOUCH_INDEX_AF[];
+ // Current auto scene detection mode.
+ // Example value: "off" or SCENE_DETECT_XXX constants. Read/write.
+ static const char KEY_SCENE_DETECT[];
+ // Supported auto scene detection settings.
+ // Example value: "off,backlight,snow/cloudy". Read only.
+ static const char KEY_SUPPORTED_SCENE_DETECT[];
+ // Returns true if video snapshot is supported. That is, applications
+ static const char KEY_FULL_VIDEO_SNAP_SUPPORTED[];
+ static const char KEY_POWER_MODE_SUPPORTED[];
+
+ static const char KEY_ISO_MODE[];
+ static const char KEY_SUPPORTED_ISO_MODES[];
+ static const char KEY_LENSSHADE[] ;
+ static const char KEY_SUPPORTED_LENSSHADE_MODES[] ;
+
+ static const char KEY_AUTO_EXPOSURE[];
+ static const char KEY_SUPPORTED_AUTO_EXPOSURE[];
+
+ static const char KEY_GPS_LATITUDE_REF[];
+ static const char KEY_GPS_LONGITUDE_REF[];
+ static const char KEY_GPS_ALTITUDE_REF[];
+ static const char KEY_GPS_STATUS[];
+ static const char KEY_EXIF_DATETIME[];
+ static const char KEY_MEMORY_COLOR_ENHANCEMENT[];
+ static const char KEY_SUPPORTED_MEM_COLOR_ENHANCE_MODES[];
+
+
+ static const char KEY_POWER_MODE[];
+
+ static const char KEY_ZSL[];
+ static const char KEY_SUPPORTED_ZSL_MODES[];
+
+ static const char KEY_CAMERA_MODE[];
+
+ static const char KEY_VIDEO_HIGH_FRAME_RATE[];
+ static const char KEY_SUPPORTED_VIDEO_HIGH_FRAME_RATE_MODES[];
+ static const char KEY_HIGH_DYNAMIC_RANGE_IMAGING[];
+ static const char KEY_SUPPORTED_HDR_IMAGING_MODES[];
+ static const char KEY_AE_BRACKET_HDR[];
+
+
+ // DENOISE
+ static const char KEY_DENOISE[];
+ static const char KEY_SUPPORTED_DENOISE[];
+
+ //Selectable zone AF.
+ static const char KEY_SELECTABLE_ZONE_AF[];
+ static const char KEY_SUPPORTED_SELECTABLE_ZONE_AF[];
+
+ //Face Detection
+ static const char KEY_FACE_DETECTION[];
+ static const char KEY_SUPPORTED_FACE_DETECTION[];
+
+ //Redeye Reduction
+ static const char KEY_REDEYE_REDUCTION[];
+ static const char KEY_SUPPORTED_REDEYE_REDUCTION[];
+ static const char EFFECT_EMBOSS[];
+ static const char EFFECT_SKETCH[];
+ static const char EFFECT_NEON[];
+
+ // Values for Touch AF/AEC
+ static const char TOUCH_AF_AEC_OFF[] ;
+ static const char TOUCH_AF_AEC_ON[] ;
+ static const char SCENE_MODE_ASD[];
+ static const char SCENE_MODE_BACKLIGHT[];
+ static const char SCENE_MODE_FLOWERS[];
+ static const char SCENE_MODE_AR[];
+ static const char SCENE_MODE_HDR[];
+ static const char SCENE_DETECT_OFF[];
+ static const char SCENE_DETECT_ON[];
+ static const char PIXEL_FORMAT_YUV420SP_ADRENO[]; // ADRENO
+ static const char PIXEL_FORMAT_RAW[];
+ static const char PIXEL_FORMAT_YV12[]; // NV12
+ static const char PIXEL_FORMAT_NV12[]; //NV12
+ // Normal focus mode. Applications should call
+ // CameraHardwareInterface.autoFocus to start the focus in this mode.
+ static const char FOCUS_MODE_NORMAL[];
+ static const char ISO_AUTO[];
+ static const char ISO_HJR[] ;
+ static const char ISO_100[];
+ static const char ISO_200[] ;
+ static const char ISO_400[];
+ static const char ISO_800[];
+ static const char ISO_1600[];
+ // Values for Lens Shading
+ static const char LENSSHADE_ENABLE[] ;
+ static const char LENSSHADE_DISABLE[] ;
+
+ // Values for auto exposure settings.
+ static const char AUTO_EXPOSURE_FRAME_AVG[];
+ static const char AUTO_EXPOSURE_CENTER_WEIGHTED[];
+ static const char AUTO_EXPOSURE_SPOT_METERING[];
+
+ static const char KEY_SHARPNESS[];
+ static const char KEY_MAX_SHARPNESS[];
+ static const char KEY_CONTRAST[];
+ static const char KEY_MAX_CONTRAST[];
+ static const char KEY_SATURATION[];
+ static const char KEY_MAX_SATURATION[];
+
+ static const char KEY_HISTOGRAM[] ;
+ static const char KEY_SUPPORTED_HISTOGRAM_MODES[] ;
+ // Values for HISTOGRAM
+ static const char HISTOGRAM_ENABLE[] ;
+ static const char HISTOGRAM_DISABLE[] ;
+
+ // Values for SKIN TONE ENHANCEMENT
+ static const char SKIN_TONE_ENHANCEMENT_ENABLE[] ;
+ static const char SKIN_TONE_ENHANCEMENT_DISABLE[] ;
+
+ // Values for Denoise
+ static const char DENOISE_OFF[] ;
+ static const char DENOISE_ON[] ;
+
+ // Values for auto exposure settings.
+ static const char SELECTABLE_ZONE_AF_AUTO[];
+ static const char SELECTABLE_ZONE_AF_SPOT_METERING[];
+ static const char SELECTABLE_ZONE_AF_CENTER_WEIGHTED[];
+ static const char SELECTABLE_ZONE_AF_FRAME_AVERAGE[];
+
+ // Values for Face Detection settings.
+ static const char FACE_DETECTION_OFF[];
+ static const char FACE_DETECTION_ON[];
+
+ // Values for MCE settings.
+ static const char MCE_ENABLE[];
+ static const char MCE_DISABLE[];
+
+ // Values for ZSL settings.
+ static const char ZSL_OFF[];
+ static const char ZSL_ON[];
+
+ // Values for HDR Bracketing settings.
+ static const char AE_BRACKET_HDR_OFF[];
+ static const char AE_BRACKET_HDR[];
+ static const char AE_BRACKET[];
+
+ // Values for Power mode settings.
+ static const char LOW_POWER[];
+ static const char NORMAL_POWER[];
+
+ // Values for HFR settings.
+ static const char VIDEO_HFR_OFF[];
+ static const char VIDEO_HFR_2X[];
+ static const char VIDEO_HFR_3X[];
+ static const char VIDEO_HFR_4X[];
+
+ // Values for Redeye Reduction settings.
+ static const char REDEYE_REDUCTION_ENABLE[];
+ static const char REDEYE_REDUCTION_DISABLE[];
+ // Values for HDR settings.
+ static const char HDR_ENABLE[];
+ static const char HDR_DISABLE[];
+
+ // Values for Redeye Reduction settings.
+ // static const char REDEYE_REDUCTION_ENABLE[];
+ // static const char REDEYE_REDUCTION_DISABLE[];
+ // Values for HDR settings.
+ // static const char HDR_ENABLE[];
+ // static const char HDR_DISABLE[];
+
+
+ static const char KEY_SINGLE_ISP_OUTPUT_ENABLED[];
+ static const char KEY_SUPPORTED_CAMERA_FEATURES[];
+ static const char KEY_MAX_NUM_REQUESTED_FACES[];
+
+ enum {
+ CAMERA_ORIENTATION_UNKNOWN = 0,
+ CAMERA_ORIENTATION_PORTRAIT = 1,
+ CAMERA_ORIENTATION_LANDSCAPE = 2,
+ };
+ int getOrientation() const;
+ void setOrientation(int orientation);
+ void getSupportedHfrSizes(Vector<Size> &sizes) const;
+ void setPreviewFpsRange(int minFPS,int maxFPS);
+ void setPreviewFrameRateMode(const char *mode);
+ const char *getPreviewFrameRateMode() const;
+ void setTouchIndexAec(int x, int y);
+ void getTouchIndexAec(int *x, int *y) const;
+ void setTouchIndexAf(int x, int y);
+ void getTouchIndexAf(int *x, int *y) const;
+ void getMeteringAreaCenter(int * x, int *y) const;
+
+};
+
+}; // namespace android
+
+#endif
diff --git a/camera/QCameraStream.cpp b/camera/QCameraStream.cpp
new file mode 100644
index 0000000..a0ba84c
--- /dev/null
+++ b/camera/QCameraStream.cpp
@@ -0,0 +1,362 @@
+/*
+** Copyright (c) 2011-2012 The Linux Foundation. All rights reserved.
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+** http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+/*#error uncomment this for compiler test!*/
+
+#define ALOG_NDEBUG 0
+#define ALOG_NIDEBUG 0
+
+#define LOG_TAG __FILE__
+#include <utils/Log.h>
+#include <utils/threads.h>
+
+
+#include "QCameraStream.h"
+
+/* QCameraStream class implementation goes here*/
+/* following code implement the control logic of this class*/
+
+namespace android {
+
+StreamQueue::StreamQueue(){
+ mInitialized = false;
+}
+
+StreamQueue::~StreamQueue(){
+ flush();
+}
+
+void StreamQueue::init(){
+ Mutex::Autolock l(&mQueueLock);
+ mInitialized = true;
+ mQueueWait.signal();
+}
+
+void StreamQueue::deinit(){
+ Mutex::Autolock l(&mQueueLock);
+ mInitialized = false;
+ mQueueWait.signal();
+}
+
+bool StreamQueue::isInitialized(){
+ Mutex::Autolock l(&mQueueLock);
+ return mInitialized;
+}
+
+bool StreamQueue::enqueue(
+ void * element){
+ Mutex::Autolock l(&mQueueLock);
+ if(mInitialized == false)
+ return false;
+
+ mContainer.add(element);
+ mQueueWait.signal();
+ return true;
+}
+
+bool StreamQueue::isEmpty(){
+ return (mInitialized && mContainer.isEmpty());
+}
+void* StreamQueue::dequeue(){
+
+ void *frame;
+ mQueueLock.lock();
+ while(mInitialized && mContainer.isEmpty()){
+ mQueueWait.wait(mQueueLock);
+ }
+
+ if(!mInitialized){
+ mQueueLock.unlock();
+ return NULL;
+ }
+
+ frame = mContainer.itemAt(0);
+ mContainer.removeAt(0);
+ mQueueLock.unlock();
+ return frame;
+}
+
+void StreamQueue::flush(){
+ Mutex::Autolock l(&mQueueLock);
+ mContainer.clear();
+}
+
+
+// ---------------------------------------------------------------------------
+// QCameraStream
+// ---------------------------------------------------------------------------
+
+/* initialize a streaming channel*/
+status_t QCameraStream::initChannel(int cameraId,
+ uint32_t ch_type_mask)
+{
+#if 0
+ int rc = MM_CAMERA_OK;
+ int i;
+ status_t ret = NO_ERROR;
+ int width = 0; /* width of channel */
+ int height = 0; /* height of channel */
+ cam_ctrl_dimension_t dim;
+ mm_camera_ch_image_fmt_parm_t fmt;
+
+ memset(&dim, 0, sizeof(cam_ctrl_dimension_t));
+ rc = cam_config_get_parm(cameraId, MM_CAMERA_PARM_DIMENSION, &dim);
+ if (MM_CAMERA_OK != rc) {
+ ALOGE("%s: error - can't get camera dimension!", __func__);
+ ALOGE("%s: X", __func__);
+ return BAD_VALUE;
+ }
+
+ if(MM_CAMERA_CH_PREVIEW_MASK & ch_type_mask) {
+ rc = cam_ops_ch_acquire(cameraId, MM_CAMERA_CH_PREVIEW);
+ ALOGV("%s:ch_acquire MM_CAMERA_CH_PREVIEW, rc=%d\n",__func__, rc);
+
+ if(MM_CAMERA_OK != rc) {
+ ALOGE("%s: preview channel acquir error =%d\n", __func__, rc);
+ ALOGE("%s: X", __func__);
+ return BAD_VALUE;
+ }
+ else{
+ memset(&fmt, 0, sizeof(mm_camera_ch_image_fmt_parm_t));
+ fmt.ch_type = MM_CAMERA_CH_PREVIEW;
+ fmt.def.fmt = CAMERA_YUV_420_NV12; //dim.prev_format;
+ fmt.def.dim.width = dim.display_width;
+ fmt.def.dim.height = dim.display_height;
+ ALOGV("%s: preview channel fmt = %d", __func__,
+ dim.prev_format);
+ ALOGV("%s: preview channel resolution = %d X %d", __func__,
+ dim.display_width, dim.display_height);
+
+ rc = cam_config_set_parm(cameraId, MM_CAMERA_PARM_CH_IMAGE_FMT, &fmt);
+ ALOGV("%s: preview MM_CAMERA_PARM_CH_IMAGE_FMT rc = %d\n", __func__, rc);
+ if(MM_CAMERA_OK != rc) {
+ ALOGE("%s:set preview channel format err=%d\n", __func__, ret);
+ ALOGE("%s: X", __func__);
+ ret = BAD_VALUE;
+ }
+ }
+ }
+
+
+ if(MM_CAMERA_CH_VIDEO_MASK & ch_type_mask)
+ {
+ rc = cam_ops_ch_acquire(cameraId, MM_CAMERA_CH_VIDEO);
+ ALOGV("%s:ch_acquire MM_CAMERA_CH_VIDEO, rc=%d\n",__func__, rc);
+
+ if(MM_CAMERA_OK != rc) {
+ ALOGE("%s: video channel acquir error =%d\n", __func__, rc);
+ ALOGE("%s: X", __func__);
+ ret = BAD_VALUE;
+ }
+ else {
+ memset(&fmt, 0, sizeof(mm_camera_ch_image_fmt_parm_t));
+ fmt.ch_type = MM_CAMERA_CH_VIDEO;
+ fmt.video.video.fmt = CAMERA_YUV_420_NV12; //dim.enc_format;
+ fmt.video.video.dim.width = dim.video_width;
+ fmt.video.video.dim.height = dim.video_height;
+ ALOGV("%s: video channel fmt = %d", __func__,
+ dim.enc_format);
+ ALOGV("%s: video channel resolution = %d X %d", __func__,
+ dim.video_width, dim.video_height);
+
+ rc = cam_config_set_parm(cameraId, MM_CAMERA_PARM_CH_IMAGE_FMT, &fmt);
+
+ ALOGV("%s: video MM_CAMERA_PARM_CH_IMAGE_FMT rc = %d\n", __func__, rc);
+ if(MM_CAMERA_OK != rc) {
+ ALOGE("%s:set video channel format err=%d\n", __func__, rc);
+ ALOGE("%s: X", __func__);
+ ret= BAD_VALUE;
+ }
+ }
+
+ } /*MM_CAMERA_CH_VIDEO*/
+#endif
+
+ int rc = MM_CAMERA_OK;
+ status_t ret = NO_ERROR;
+ mm_camera_op_mode_type_t op_mode=MM_CAMERA_OP_MODE_VIDEO;
+ int i;
+
+ ALOGV("QCameraStream::initChannel : E");
+ if(MM_CAMERA_CH_PREVIEW_MASK & ch_type_mask){
+ rc = cam_ops_ch_acquire(cameraId, MM_CAMERA_CH_PREVIEW);
+ ALOGV("%s:ch_acquire MM_CAMERA_CH_PREVIEW, rc=%d\n",__func__, rc);
+ if(MM_CAMERA_OK != rc) {
+ ALOGE("%s: preview channel acquir error =%d\n", __func__, rc);
+ ALOGV("%s: X", __func__);
+ return BAD_VALUE;
+ }
+ /*Callback register*/
+ /* register a notify into the mmmm_camera_t object*/
+ /* ret = cam_evt_register_buf_notify(mCameraId, MM_CAMERA_CH_PREVIEW,
+ preview_notify_cb,
+ this);
+ ALOGV("Buf notify MM_CAMERA_CH_PREVIEW, rc=%d\n",rc);*/
+ }else if(MM_CAMERA_CH_VIDEO_MASK & ch_type_mask){
+ rc = cam_ops_ch_acquire(cameraId, MM_CAMERA_CH_VIDEO);
+ ALOGV("%s:ch_acquire MM_CAMERA_CH_VIDEO, rc=%d\n",__func__, rc);
+ if(MM_CAMERA_OK != rc) {
+ ALOGE("%s: preview channel acquir error =%d\n", __func__, rc);
+ ALOGV("%s: X", __func__);
+ return BAD_VALUE;
+ }
+ /*Callback register*/
+ /* register a notify into the mmmm_camera_t object*/
+ /*ret = cam_evt_register_buf_notify(mCameraId, MM_CAMERA_CH_VIDEO,
+ record_notify_cb,
+ this);
+ ALOGV("Buf notify MM_CAMERA_CH_VIDEO, rc=%d\n",rc);*/
+ }
+
+ ret = (MM_CAMERA_OK==rc)? NO_ERROR : BAD_VALUE;
+ ALOGV("%s: X, ret = %d", __func__, ret);
+ return ret;
+}
+
+status_t QCameraStream::deinitChannel(int cameraId,
+ mm_camera_channel_type_t ch_type)
+{
+
+ int rc = MM_CAMERA_OK;
+
+ ALOGV("%s: E, channel = %d\n", __func__, ch_type);
+
+ if (MM_CAMERA_CH_MAX <= ch_type) {
+ ALOGE("%s: X: BAD_VALUE", __func__);
+ return BAD_VALUE;
+ }
+
+ cam_ops_ch_release(cameraId, ch_type);
+
+ ALOGV("%s: X, channel = %d\n", __func__, ch_type);
+ return NO_ERROR;
+}
+
+status_t QCameraStream::setMode(int enable) {
+ ALOGV("%s :myMode %x ", __func__, myMode);
+ if (enable) {
+ myMode = (camera_mode_t)(myMode | CAMERA_ZSL_MODE);
+ } else {
+ myMode = (camera_mode_t)(myMode & ~CAMERA_ZSL_MODE);
+ }
+ return NO_ERROR;
+}
+
+status_t QCameraStream::setFormat(uint8_t ch_type_mask, cam_format_t previewFmt)
+{
+ int rc = MM_CAMERA_OK;
+ status_t ret = NO_ERROR;
+ int width = 0; /* width of channel */
+ int height = 0; /* height of channel */
+ cam_ctrl_dimension_t dim;
+ mm_camera_ch_image_fmt_parm_t fmt;
+ ALOGV("%s: E",__func__);
+
+ memset(&dim, 0, sizeof(cam_ctrl_dimension_t));
+ rc = cam_config_get_parm(mCameraId, MM_CAMERA_PARM_DIMENSION, &dim);
+ if (MM_CAMERA_OK != rc) {
+ ALOGE("%s: error - can't get camera dimension!", __func__);
+ ALOGV("%s: X", __func__);
+ return BAD_VALUE;
+ }
+ char mDeviceName[PROPERTY_VALUE_MAX];
+ property_get("ro.product.device",mDeviceName," ");
+ memset(&fmt, 0, sizeof(mm_camera_ch_image_fmt_parm_t));
+ if(MM_CAMERA_CH_PREVIEW_MASK & ch_type_mask){
+ fmt.ch_type = MM_CAMERA_CH_PREVIEW;
+ fmt.def.fmt = (cam_format_t)previewFmt;
+ fmt.def.dim.width = dim.display_width;
+ fmt.def.dim.height = dim.display_height;
+ }else if(MM_CAMERA_CH_VIDEO_MASK & ch_type_mask){
+ fmt.ch_type = MM_CAMERA_CH_VIDEO;
+ fmt.video.video.fmt = CAMERA_YUV_420_NV21; //dim.enc_format;
+ fmt.video.video.dim.width = dim.video_width;
+ fmt.video.video.dim.height = dim.video_height;
+ }/*else if(MM_CAMERA_CH_SNAPSHOT_MASK & ch_type_mask){
+ if(mHalCamCtrl->isRawSnapshot()) {
+ fmt.ch_type = MM_CAMERA_CH_RAW;
+ fmt.def.fmt = CAMERA_BAYER_SBGGR10;
+ fmt.def.dim.width = dim.raw_picture_width;
+ fmt.def.dim.height = dim.raw_picture_height;
+ }else{
+ //Jpeg???
+ fmt.ch_type = MM_CAMERA_CH_SNAPSHOT;
+ fmt.snapshot.main.fmt = dim.main_img_format;
+ fmt.snapshot.main.dim.width = dim.picture_width;
+ fmt.snapshot.main.dim.height = dim.picture_height;
+
+ fmt.snapshot.thumbnail.fmt = dim.thumb_format;
+ fmt.snapshot.thumbnail.dim.width = dim.ui_thumbnail_width;
+ fmt.snapshot.thumbnail.dim.height = dim.ui_thumbnail_height;
+ }
+ }*/
+
+ rc = cam_config_set_parm(mCameraId, MM_CAMERA_PARM_CH_IMAGE_FMT, &fmt);
+ ALOGV("%s: Stream MM_CAMERA_PARM_CH_IMAGE_FMT rc = %d\n", __func__, rc);
+ if(MM_CAMERA_OK != rc) {
+ ALOGE("%s:set stream channel format err=%d\n", __func__, ret);
+ ALOGV("%s: X", __func__);
+ ret = BAD_VALUE;
+ }
+ ALOGV("%s: X",__func__);
+ return ret;
+}
+
+QCameraStream::QCameraStream (){
+ mInit = false;
+ mActive = false;
+ /* memset*/
+ memset(&mCrop, 0, sizeof(mm_camera_ch_crop_t));
+}
+
+QCameraStream::QCameraStream (int cameraId, camera_mode_t mode)
+ :mCameraId(cameraId),
+ myMode(mode)
+{
+ mInit = false;
+ mActive = false;
+
+ /* memset*/
+ memset(&mCrop, 0, sizeof(mm_camera_ch_crop_t));
+}
+
+QCameraStream::~QCameraStream () {;}
+
+
+status_t QCameraStream::init() {
+ return NO_ERROR;
+}
+
+status_t QCameraStream::start() {
+ return NO_ERROR;
+}
+
+void QCameraStream::stop() {
+ return;
+}
+
+void QCameraStream::release() {
+ return;
+}
+
+void QCameraStream::setHALCameraControl(QCameraHardwareInterface* ctrl) {
+
+ /* provide a frame data user,
+ for the queue monitor thread to call the busy queue is not empty*/
+ mHalCamCtrl = ctrl;
+}
+
+}; // namespace android
diff --git a/camera/QCameraStream.h b/camera/QCameraStream.h
new file mode 100644
index 0000000..294ed58
--- /dev/null
+++ b/camera/QCameraStream.h
@@ -0,0 +1,391 @@
+/*
+** Copyright 2008, Google Inc.
+** Copyright (c) 2009-2012, The Linux Foundation. All rights reserved.
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+** http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+#ifndef ANDROID_HARDWARE_QCAMERA_STREAM_H
+#define ANDROID_HARDWARE_QCAMERA_STREAM_H
+
+
+#include <utils/threads.h>
+
+#include <binder/MemoryBase.h>
+#include <binder/MemoryHeapBase.h>
+#include <utils/threads.h>
+
+#include "QCameraHWI.h"
+#include "QCameraHWI_Mem.h"
+#include "QCamera_Intf.h"
+extern "C" {
+#include <mm_camera_interface2.h>
+
+#define DEFAULT_STREAM_WIDTH 320
+#define DEFAULT_STREAM_HEIGHT 240
+#define DEFAULT_LIVESHOT_WIDTH 2592
+#define DEFAULT_LIVESHOT_HEIGHT 1944
+
+#define MM_CAMERA_CH_PREVIEW_MASK (0x01 << MM_CAMERA_CH_PREVIEW)
+#define MM_CAMERA_CH_VIDEO_MASK (0x01 << MM_CAMERA_CH_VIDEO)
+#define MM_CAMERA_CH_SNAPSHOT_MASK (0x01 << MM_CAMERA_CH_SNAPSHOT)
+
+} /* extern C*/
+
+
+
+typedef struct snap_hdr_record_t_ {
+ bool hdr_on;
+ int num_frame;
+ int num_raw_received;
+
+ /*in terms of 2^(n/6), e.g -6 means (1/2)x, while 12 is 4x*/
+ int exp[MAX_HDR_EXP_FRAME_NUM];
+ mm_camera_ch_data_buf_t *recvd_frame[MAX_HDR_EXP_FRAME_NUM];
+} snap_hdr_record_t;
+
+
+namespace android {
+
+class QCameraHardwareInterface;
+
+class StreamQueue {
+private:
+ Mutex mQueueLock;
+ Condition mQueueWait;
+ bool mInitialized;
+
+ //Vector<struct msm_frame *> mContainer;
+ Vector<void *> mContainer;
+public:
+ StreamQueue();
+ virtual ~StreamQueue();
+ bool enqueue(void *element);
+ void flush();
+ void* dequeue();
+ void init();
+ void deinit();
+ bool isInitialized();
+bool isEmpty();
+};
+
+
+class QCameraStream { //: public virtual RefBase{
+
+public:
+ bool mInit;
+ bool mActive;
+
+ virtual status_t init();
+ virtual status_t start();
+ virtual void stop();
+ virtual void release();
+
+ status_t setFormat(uint8_t ch_type_mask, cam_format_t previewFmt);
+ status_t setMode(int enable);
+
+ virtual void setHALCameraControl(QCameraHardwareInterface* ctrl);
+
+ //static status_t openChannel(mm_camera_t *, mm_camera_channel_type_t ch_type);
+ virtual status_t initChannel(int cameraId, uint32_t ch_type_mask);
+ virtual status_t deinitChannel(int cameraId, mm_camera_channel_type_t ch_type);
+ virtual void releaseRecordingFrame(const void *opaque)
+ {
+ ;
+ }
+#if 0 // mzhu
+ virtual status_t getBufferInfo(sp<IMemory>& Frame, size_t *alignedSize)
+ {
+ return NO_ERROR;
+ }
+#endif // mzhu
+ virtual void prepareHardware()
+ {
+ ;
+ }
+ virtual sp<IMemoryHeap> getHeap() const{return NULL;}
+ virtual status_t initDisplayBuffers(){return NO_ERROR;}
+ virtual status_t initPreviewOnlyBuffers(){return NO_ERROR;}
+ virtual sp<IMemoryHeap> getRawHeap() const {return NULL;}
+ virtual void *getLastQueuedFrame(void){return NULL;}
+ virtual status_t takePictureZSL(void){return NO_ERROR;}
+ virtual status_t takeLiveSnapshot(){return NO_ERROR;}
+ virtual status_t takePictureLiveshot(mm_camera_ch_data_buf_t* recvd_frame){return NO_ERROR;}
+ virtual void setModeLiveSnapshot(bool){;}
+ virtual status_t initSnapshotBuffers(cam_ctrl_dimension_t *dim,
+ int num_of_buf){return NO_ERROR;}
+
+ virtual void setFullSizeLiveshot(bool){};
+ /* Set the ANativeWindow */
+ virtual int setPreviewWindow(preview_stream_ops_t* window) {return NO_ERROR;}
+ virtual void notifyROIEvent(fd_roi_t roi) {;}
+ virtual void notifyWDenoiseEvent(cam_ctrl_status_t status, void * cookie) {};
+ virtual void resetSnapshotCounters(void ){};
+ virtual void InitHdrInfoForSnapshot(bool HDR_on, int number_frames, int *exp ) {};
+ virtual void notifyHdrEvent(cam_ctrl_status_t status, void * cookie) {};
+
+ QCameraStream();
+ QCameraStream(int, camera_mode_t);
+ virtual ~QCameraStream();
+ QCameraHardwareInterface* mHalCamCtrl;
+ mm_camera_ch_crop_t mCrop;
+
+ int mCameraId;
+ camera_mode_t myMode;
+
+ mutable Mutex mStopCallbackLock;
+private:
+ StreamQueue mBusyQueue;
+ StreamQueue mFreeQueue;
+public:
+ friend void liveshot_callback(mm_camera_ch_data_buf_t *frame,void *user_data);
+};
+
+/*
+* Record Class
+*/
+class QCameraStream_record : public QCameraStream {
+public:
+ status_t init();
+ status_t start() ;
+ void stop() ;
+ void release() ;
+
+ static QCameraStream* createInstance(int cameraId, camera_mode_t);
+ static void deleteInstance(QCameraStream *p);
+
+ QCameraStream_record() {};
+ virtual ~QCameraStream_record();
+
+ status_t processRecordFrame(void *data);
+ status_t initEncodeBuffers();
+ status_t getBufferInfo(sp<IMemory>& Frame, size_t *alignedSize);
+ //sp<IMemoryHeap> getHeap() const;
+
+ void releaseRecordingFrame(const void *opaque);
+ void debugShowVideoFPS() const;
+
+ status_t takeLiveSnapshot();
+private:
+ QCameraStream_record(int, camera_mode_t);
+ void releaseEncodeBuffer();
+
+ cam_ctrl_dimension_t dim;
+ bool mDebugFps;
+
+ mm_camera_reg_buf_t mRecordBuf;
+ //int record_frame_len;
+ //static const int maxFrameCnt = 16;
+ //camera_memory_t *mCameraMemoryPtr[maxFrameCnt];
+ //int mNumRecordFrames;
+ //sp<PmemPool> mRecordHeap[maxFrameCnt];
+ struct msm_frame *recordframes;
+ //uint32_t record_offset[VIDEO_BUFFER_COUNT];
+ mm_camera_ch_data_buf_t mRecordedFrames[MM_CAMERA_MAX_NUM_FRAMES];
+ //Mutex mRecordFreeQueueLock;
+ //Vector<mm_camera_ch_data_buf_t> mRecordFreeQueue;
+
+ int mJpegMaxSize;
+ QCameraStream *mStreamSnap;
+
+};
+
+class QCameraStream_preview : public QCameraStream {
+public:
+ status_t init();
+ status_t start() ;
+ void stop() ;
+ void release() ;
+
+ static QCameraStream* createInstance(int, camera_mode_t);
+ static void deleteInstance(QCameraStream *p);
+
+ QCameraStream_preview() {};
+ virtual ~QCameraStream_preview();
+ void *getLastQueuedFrame(void);
+ /*init preview buffers with display case*/
+ status_t initDisplayBuffers();
+ /*init preview buffers without display case*/
+ status_t initPreviewOnlyBuffers();
+
+ status_t processPreviewFrame(mm_camera_ch_data_buf_t *frame);
+
+ /*init preview buffers with display case*/
+ status_t processPreviewFrameWithDisplay(mm_camera_ch_data_buf_t *frame);
+ /*init preview buffers without display case*/
+ status_t processPreviewFrameWithOutDisplay(mm_camera_ch_data_buf_t *frame);
+
+ int setPreviewWindow(preview_stream_ops_t* window);
+ void notifyROIEvent(fd_roi_t roi);
+ friend class QCameraHardwareInterface;
+
+private:
+ QCameraStream_preview(int cameraId, camera_mode_t);
+ /*allocate and free buffers with display case*/
+ status_t getBufferFromSurface();
+ status_t putBufferToSurface();
+
+ /*allocate and free buffers without display case*/
+ status_t getBufferNoDisplay();
+ status_t freeBufferNoDisplay();
+
+ void dumpFrameToFile(struct msm_frame* newFrame);
+ bool mFirstFrameRcvd;
+
+ int8_t my_id;
+ mm_camera_op_mode_type_t op_mode;
+ cam_ctrl_dimension_t dim;
+ struct msm_frame *mLastQueuedFrame;
+ mm_camera_reg_buf_t mDisplayBuf;
+ mm_cameara_stream_buf_t mDisplayStreamBuf;
+ Mutex mDisplayLock;
+ preview_stream_ops_t *mPreviewWindow;
+ static const int kPreviewBufferCount = PREVIEW_BUFFER_COUNT;
+ mm_camera_ch_data_buf_t mNotifyBuffer[16];
+ int8_t mNumFDRcvd;
+ int mVFEOutputs;
+ int mHFRFrameCnt;
+ int mHFRFrameSkip;
+};
+
+/* Snapshot Class - handle data flow*/
+class QCameraStream_Snapshot : public QCameraStream {
+public:
+ status_t init();
+ status_t start();
+ void stop();
+ void release();
+ void prepareHardware();
+ static QCameraStream* createInstance(int cameraId, camera_mode_t);
+ static void deleteInstance(QCameraStream *p);
+
+ status_t takePictureZSL(void);
+ status_t takePictureLiveshot(mm_camera_ch_data_buf_t* recvd_frame);
+ status_t receiveRawPicture(mm_camera_ch_data_buf_t* recvd_frame);
+ void receiveCompleteJpegPicture(jpeg_event_t event);
+ void jpegErrorHandler(jpeg_event_t event);
+ void receiveJpegFragment(uint8_t *ptr, uint32_t size);
+ void deInitBuffer(void);
+ sp<IMemoryHeap> getRawHeap() const;
+ int getSnapshotState();
+ /*Temp: to be removed once event handling is enabled in mm-camera*/
+ void runSnapshotThread(void *data);
+ bool isZSLMode();
+ void setFullSizeLiveshot(bool);
+ void notifyWDenoiseEvent(cam_ctrl_status_t status, void * cookie);
+ friend void liveshot_callback(mm_camera_ch_data_buf_t *frame,void *user_data);
+ void resetSnapshotCounters(void );
+ void InitHdrInfoForSnapshot(bool HDR_on, int number_frames, int *exp );
+ void notifyHdrEvent(cam_ctrl_status_t status, void * cookie);
+
+private:
+ QCameraStream_Snapshot(int, camera_mode_t);
+ virtual ~QCameraStream_Snapshot();
+
+ /* snapshot related private members */
+ status_t initJPEGSnapshot(int num_of_snapshots);
+ status_t initRawSnapshot(int num_of_snapshots);
+ status_t initZSLSnapshot(void);
+ status_t initFullLiveshot(void);
+ status_t cancelPicture();
+ void notifyShutter(common_crop_t *crop,
+ bool play_shutter_sound);
+ status_t initSnapshotBuffers(cam_ctrl_dimension_t *dim,
+ int num_of_buf);
+ status_t initRawSnapshotBuffers(cam_ctrl_dimension_t *dim,
+ int num_of_buf);
+ status_t deinitRawSnapshotBuffers(void);
+ status_t deinitSnapshotBuffers(void);
+ status_t initRawSnapshotChannel(cam_ctrl_dimension_t* dim,
+ int num_snapshots);
+ status_t initSnapshotFormat(cam_ctrl_dimension_t *dim);
+ status_t takePictureRaw(void);
+ status_t takePictureJPEG(void);
+ status_t startStreamZSL(void);
+ void deinitSnapshotChannel(mm_camera_channel_type_t);
+ status_t configSnapshotDimension(cam_ctrl_dimension_t* dim);
+ status_t encodeData(mm_camera_ch_data_buf_t* recvd_frame,
+ common_crop_t *crop_info,
+ int frame_len,
+ bool enqueued);
+ status_t encodeDisplayAndSave(mm_camera_ch_data_buf_t* recvd_frame,
+ bool enqueued);
+ status_t setZSLChannelAttribute(void);
+ void handleError();
+ void setSnapshotState(int state);
+ void setModeLiveSnapshot(bool);
+ bool isLiveSnapshot(void);
+ void stopPolling(void);
+ bool isFullSizeLiveshot(void);
+ status_t doWaveletDenoise(mm_camera_ch_data_buf_t* frame);
+ status_t sendWDenoiseStartMsg(mm_camera_ch_data_buf_t * frame);
+ void lauchNextWDenoiseFromQueue();
+ status_t doHdrProcessing( );
+
+ /* Member variables */
+
+ int mSnapshotFormat;
+ int mPictureWidth;
+ int mPictureHeight;
+ cam_format_t mPictureFormat;
+ int mPostviewWidth;
+ int mPostviewHeight;
+ int mThumbnailWidth;
+ int mThumbnailHeight;
+ cam_format_t mThumbnailFormat;
+ int mJpegOffset;
+ int mSnapshotState;
+ int mNumOfSnapshot;
+ int mNumOfRecievedJPEG;
+ bool mModeLiveSnapshot;
+ bool mBurstModeFlag;
+ int mActualPictureWidth;
+ int mActualPictureHeight;
+ bool mJpegDownscaling;
+ sp<AshmemPool> mJpegHeap;
+ /*TBD:Bikas: This is defined in HWI too.*/
+#ifdef USE_ION
+ sp<IonPool> mDisplayHeap;
+ sp<IonPool> mPostviewHeap;
+#else
+ sp<PmemPool> mDisplayHeap;
+ sp<PmemPool> mPostviewHeap;
+#endif
+ mm_camera_ch_data_buf_t *mCurrentFrameEncoded;
+ mm_cameara_stream_buf_t mSnapshotStreamBuf;
+ mm_cameara_stream_buf_t mPostviewStreamBuf;
+ StreamQueue mSnapshotQueue;
+ static const int mMaxSnapshotBufferCount = 16;
+ int mSnapshotBufferNum;
+ int mMainfd[mMaxSnapshotBufferCount];
+ int mThumbfd[mMaxSnapshotBufferCount];
+ int mMainSize;
+ int mThumbSize;
+ camera_memory_t *mCameraMemoryPtrMain[mMaxSnapshotBufferCount];
+ camera_memory_t *mCameraMemoryPtrThumb[mMaxSnapshotBufferCount];
+ int mJpegSessionId;
+ int dump_fd;
+ bool mFullLiveshot;
+ StreamQueue mWDNQueue; // queue to hold frames while one frame is sent out for WDN
+ bool mIsDoingWDN; // flag to indicate if WDN is going on (one frame is sent out for WDN)
+ bool mDropThumbnail;
+ int mJpegQuality;
+ snap_hdr_record_t mHdrInfo;
+ int hdrRawCount;
+ int hdrJpegCount;
+}; // QCameraStream_Snapshot
+
+
+}; // namespace android
+
+#endif
diff --git a/camera/QCamera_Intf.h b/camera/QCamera_Intf.h
new file mode 100644
index 0000000..4daea63
--- /dev/null
+++ b/camera/QCamera_Intf.h
@@ -0,0 +1,2769 @@
+/* Copyright (c) 2012, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ * * Neither the name of The Linux Foundation nor the names of its
+ * contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef __QCAMERA_INTF_H__
+#define __QCAMERA_INTF_H__
+
+#include <stdint.h>
+#include <pthread.h>
+#include <inttypes.h>
+#include <media/msm_camera.h>
+
+#define PAD_TO_WORD(a) (((a)+3)&~3)
+#define PAD_TO_2K(a) (((a)+2047)&~2047)
+#define PAD_TO_4K(a) (((a)+4095)&~4095)
+#define PAD_TO_8K(a) (((a)+8191)&~8191)
+
+#define CEILING32(X) (((X) + 0x0001F) & 0xFFFFFFE0)
+#define CEILING16(X) (((X) + 0x000F) & 0xFFF0)
+#define CEILING4(X) (((X) + 0x0003) & 0xFFFC)
+#define CEILING2(X) (((X) + 0x0001) & 0xFFFE)
+
+#define MAX_ROI 2
+#define MAX_NUM_PARM 5
+#define MAX_NUM_OPS 2
+#define VIDEO_MAX_PLANES 8
+#define MAX_SNAPSHOT_BUFFERS 5
+#define MAX_EXP_BRACKETING_LENGTH 32
+
+/* No sharpness for default */
+#define CAMERA_MIN_SHARPNESS 0
+#define CAMERA_DEF_SHARPNESS 10
+#define CAMERA_MAX_SHARPNESS 30
+#define CAMERA_SHARPNESS_STEP 5
+
+/* No saturation for default */
+#define CAMERA_MIN_SATURATION 0
+#define CAMERA_DEF_SATURATION 5
+#define CAMERA_MAX_SATURATION 10
+#define CAMERA_SATURATION_STEP 1
+
+#define CAMERA_MIN_BRIGHTNESS 0
+#define CAMERA_DEF_BRIGHTNESS 3
+#define CAMERA_MAX_BRIGHTNESS 6
+#define CAMERA_BRIGHTNESS_STEP 1
+
+#define CAMERA_MIN_CONTRAST 0
+#define CAMERA_DEF_CONTRAST 5
+#define CAMERA_MAX_CONTRAST 10
+#define CAMERA_CONTRAST_STEP 1
+
+/* No hue for default. */
+#define CAMERA_MIN_HUE 0
+#define CAMERA_DEF_HUE 0
+#define CAMERA_MAX_HUE 300
+#define CAMERA_HUE_STEP 60
+
+#define CAMERA_MIN_ZOOM 0
+#define CAMERA_DEF_ZOOM 0
+#define CAMERA_MAX_ZOOM 0x31
+#define CAMERA_ZOOM_STEP 0x3
+
+#define ANDROID_FB0 "/dev/graphics/fb0"
+#define LE_FB0 "/dev/fb0"
+
+// Events common to encoder and decoder
+#define JPEG_EVENT_DONE 0
+#define JPEG_EVENT_WARNING 1
+#define JPEG_EVENT_ERROR 2
+#define JPEG_EVENT_ABORTED 3
+// Events specific to encoder
+#define JPEG_EVENT_THUMBNAIL_DROPPED 4
+
+/* Exif Tag ID */
+typedef uint32_t exif_tag_id_t;
+
+/* Exif Info (opaque definition) */
+struct exif_info_t;
+typedef struct exif_info_t * exif_info_obj_t;
+
+typedef enum {
+ BACK_CAMERA,
+ FRONT_CAMERA,
+}cam_position_t;
+
+typedef enum {
+ CAM_CTRL_FAILED, /* Failure in doing operation */
+ CAM_CTRL_SUCCESS, /* Operation Succeded */
+ CAM_CTRL_INVALID_PARM, /* Inavlid parameter provided */
+ CAM_CTRL_NOT_SUPPORTED, /* Parameter/operation not supported */
+ CAM_CTRL_ACCEPTED, /* Parameter accepted */
+ CAM_CTRL_MAX,
+} cam_ctrl_status_t;
+
+typedef enum {
+ CAMERA_YUV_420_NV12,
+ CAMERA_YUV_420_NV21,
+ CAMERA_YUV_420_NV21_ADRENO,
+ CAMERA_BAYER_SBGGR10,
+ CAMERA_RDI,
+ CAMERA_YUV_420_YV12,
+ CAMERA_YUV_422_NV16,
+ CAMERA_YUV_422_NV61,
+ CAMERA_YUV_422_YUYV,
+} cam_format_t;
+
+typedef enum {
+ CAMERA_PAD_NONE,
+ CAMERA_PAD_TO_WORD, /*2 bytes*/
+ CAMERA_PAD_TO_LONG_WORD, /*4 bytes*/
+ CAMERA_PAD_TO_8, /*8 bytes*/
+ CAMERA_PAD_TO_16, /*16 bytes*/
+
+ CAMERA_PAD_TO_1K, /*1k bytes*/
+ CAMERA_PAD_TO_2K, /*2k bytes*/
+ CAMERA_PAD_TO_4K,
+ CAMERA_PAD_TO_8K
+} cam_pad_format_t;
+
+typedef struct {
+ int ext_mode; /* preview, main, thumbnail, video, raw, etc */
+ int frame_idx; /* frame index */
+ int fd; /* origin fd */
+ uint32_t size;
+} mm_camera_frame_map_type;
+
+typedef struct {
+ int ext_mode; /* preview, main, thumbnail, video, raw, etc */
+ int frame_idx; /* frame index */
+} mm_camera_frame_unmap_type;
+
+typedef enum {
+ CAM_SOCK_MSG_TYPE_FD_MAPPING,
+ CAM_SOCK_MSG_TYPE_FD_UNMAPPING,
+ CAM_SOCK_MSG_TYPE_WDN_START,
+ CAM_SOCK_MSG_TYPE_HIST_MAPPING,
+ CAM_SOCK_MSG_TYPE_HIST_UNMAPPING,
+ CAM_SOCK_MSG_TYPE_HDR_START,
+ CAM_SOCK_MSG_TYPE_MAX
+}mm_camera_socket_msg_type;
+
+
+#define MAX_HDR_EXP_FRAME_NUM 5
+typedef struct {
+ unsigned long cookie;
+ int num_hdr_frames;
+ int hdr_main_idx[MAX_HDR_EXP_FRAME_NUM];
+ int hdr_thm_idx[MAX_HDR_EXP_FRAME_NUM];
+ int exp[MAX_HDR_EXP_FRAME_NUM];
+} mm_camera_hdr_start_type;
+
+#define MM_MAX_WDN_NUM 2
+typedef struct {
+ unsigned long cookie;
+ int num_frames;
+ int ext_mode[MM_MAX_WDN_NUM];
+ int frame_idx[MM_MAX_WDN_NUM];
+} mm_camera_wdn_start_type;
+
+typedef struct {
+ mm_camera_socket_msg_type msg_type;
+ union {
+ mm_camera_frame_map_type frame_fd_map;
+ mm_camera_frame_unmap_type frame_fd_unmap;
+ mm_camera_wdn_start_type wdn_start;
+ mm_camera_hdr_start_type hdr_pkg;
+ } payload;
+} cam_sock_packet_t;
+
+
+typedef enum {
+ CAM_VIDEO_FRAME,
+ CAM_SNAPSHOT_FRAME,
+ CAM_PREVIEW_FRAME,
+}cam_frame_type_t;
+
+
+typedef enum {
+ CAMERA_MODE_2D = (1<<0),
+ CAMERA_MODE_3D = (1<<1),
+ CAMERA_NONZSL_MODE = (1<<2),
+ CAMERA_ZSL_MODE = (1<<3),
+ CAMERA_MODE_MAX = CAMERA_ZSL_MODE,
+} camera_mode_t;
+
+
+typedef struct {
+ int modes_supported;
+ int8_t camera_id;
+ cam_position_t position;
+ uint32_t sensor_mount_angle;
+}qcamera_info_t;
+
+typedef struct {
+ camera_mode_t mode;
+ int8_t camera_id;
+ camera_mode_t cammode;
+}config_params_t;
+
+typedef struct {
+ uint32_t len;
+ uint32_t y_offset;
+ uint32_t cbcr_offset;
+} cam_sp_len_offset_t;
+
+typedef struct{
+ uint32_t len;
+ uint32_t offset;
+} cam_mp_len_offset_t;
+
+typedef struct {
+ int num_planes;
+ union {
+ cam_sp_len_offset_t sp;
+ cam_mp_len_offset_t mp[8];
+ };
+ uint32_t frame_len;
+} cam_frame_len_offset_t;
+
+typedef struct {
+ uint32_t parm[MAX_NUM_PARM];
+ uint32_t ops[MAX_NUM_OPS];
+ uint8_t yuv_output;
+ uint8_t jpeg_capture;
+ uint32_t max_pict_width;
+ uint32_t max_pict_height;
+ uint32_t max_preview_width;
+ uint32_t max_preview_height;
+ uint32_t max_video_width;
+ uint32_t max_video_height;
+ uint32_t effect;
+ camera_mode_t modes;
+ uint8_t preview_format;
+ uint32_t preview_sizes_cnt;
+ uint32_t thumb_sizes_cnt;
+ uint32_t video_sizes_cnt;
+ uint32_t hfr_sizes_cnt;
+ uint8_t vfe_output_enable;
+ uint8_t hfr_frame_skip;
+ uint32_t default_preview_width;
+ uint32_t default_preview_height;
+ uint32_t bestshot_reconfigure;
+ uint32_t pxlcode;
+}cam_prop_t;
+
+typedef struct {
+ uint16_t video_width; /* Video width seen by VFE could be different than orig. Ex. DIS */
+ uint16_t video_height; /* Video height seen by VFE */
+ uint16_t picture_width; /* Picture width seen by VFE */
+ uint16_t picture_height; /* Picture height seen by VFE */
+ uint16_t display_width; /* width of display */
+ uint16_t display_height; /* height of display */
+ uint16_t orig_video_width; /* original video width received */
+ uint16_t orig_video_height; /* original video height received */
+ uint16_t orig_picture_dx; /* original picture width received */
+ uint16_t orig_picture_dy; /* original picture height received */
+ uint16_t ui_thumbnail_height; /* Just like orig_picture_dx */
+ uint16_t ui_thumbnail_width; /* Just like orig_picture_dy */
+ uint16_t thumbnail_height;
+ uint16_t thumbnail_width;
+ uint16_t orig_picture_width;
+ uint16_t orig_picture_height;
+ uint16_t orig_thumb_width;
+ uint16_t orig_thumb_height;
+ uint16_t raw_picture_height;
+ uint16_t raw_picture_width;
+ uint16_t rdi0_height;
+ uint16_t rdi0_width;
+ uint16_t rdi1_height;
+ uint16_t rdi1_width;
+ uint32_t hjr_xtra_buff_for_bayer_filtering;
+ cam_format_t prev_format;
+ cam_format_t enc_format;
+ cam_format_t thumb_format;
+ cam_format_t main_img_format;
+ cam_format_t rdi0_format;
+ cam_format_t rdi1_format;
+ cam_format_t raw_img_format;
+ cam_pad_format_t prev_padding_format;
+ cam_pad_format_t enc_padding_format;
+ cam_pad_format_t thumb_padding_format;
+ cam_pad_format_t main_padding_format;
+ uint16_t display_luma_width;
+ uint16_t display_luma_height;
+ uint16_t display_chroma_width;
+ uint16_t display_chroma_height;
+ uint16_t video_luma_width;
+ uint16_t video_luma_height;
+ uint16_t video_chroma_width;
+ uint16_t video_chroma_height;
+ uint16_t thumbnail_luma_width;
+ uint16_t thumbnail_luma_height;
+ uint16_t thumbnail_chroma_width;
+ uint16_t thumbnail_chroma_height;
+ uint16_t main_img_luma_width;
+ uint16_t main_img_luma_height;
+ uint16_t main_img_chroma_width;
+ uint16_t main_img_chroma_height;
+ int rotation;
+ cam_frame_len_offset_t display_frame_offset;
+ cam_frame_len_offset_t video_frame_offset;
+ cam_frame_len_offset_t picture_frame_offset;
+ cam_frame_len_offset_t thumb_frame_offset;
+ uint32_t channel_interface_mask;
+} cam_ctrl_dimension_t;
+
+typedef struct {
+ uint8_t cid;
+ uint8_t dt;
+ uint32_t inst_handle;
+} cam_cid_entry_t;
+
+#define CAM_MAX_CID_NUM 8
+typedef struct {
+ /*should we hard code max CIDs? if not we need to have two CMD*/
+ uint8_t num_cids;
+ cam_cid_entry_t cid_entries[CAM_MAX_CID_NUM];
+} cam_cid_info_t;
+
+typedef struct {
+ /* we still use prev, video, main,
+ * thumb to interprete image types */
+ uint32_t image_mode; /* input */
+ cam_format_t format; /* input */
+ cam_pad_format_t padding_format; /* input */
+ int rotation; /* input */
+ uint16_t width; /* input/output */
+ uint16_t height; /* input/output */
+ cam_frame_len_offset_t frame_offset; /* output */
+} cam_frame_resolution_t;
+
+/* Add enumenrations at the bottom but before MM_CAMERA_PARM_MAX */
+typedef enum {
+ MM_CAMERA_PARM_PICT_SIZE,
+ MM_CAMERA_PARM_ZOOM_RATIO,
+ MM_CAMERA_PARM_HISTOGRAM,
+ MM_CAMERA_PARM_DIMENSION,
+ MM_CAMERA_PARM_FPS,
+ MM_CAMERA_PARM_FPS_MODE, /*5*/
+ MM_CAMERA_PARM_EFFECT,
+ MM_CAMERA_PARM_EXPOSURE_COMPENSATION,
+ MM_CAMERA_PARM_EXPOSURE,
+ MM_CAMERA_PARM_SHARPNESS,
+ MM_CAMERA_PARM_CONTRAST, /*10*/
+ MM_CAMERA_PARM_SATURATION,
+ MM_CAMERA_PARM_BRIGHTNESS,
+ MM_CAMERA_PARM_WHITE_BALANCE,
+ MM_CAMERA_PARM_LED_MODE,
+ MM_CAMERA_PARM_ANTIBANDING, /*15*/
+ MM_CAMERA_PARM_ROLLOFF,
+ MM_CAMERA_PARM_CONTINUOUS_AF,
+ MM_CAMERA_PARM_FOCUS_RECT,
+ MM_CAMERA_PARM_AEC_ROI,
+ MM_CAMERA_PARM_AF_ROI, /*20*/
+ MM_CAMERA_PARM_HJR,
+ MM_CAMERA_PARM_ISO,
+ MM_CAMERA_PARM_BL_DETECTION,
+ MM_CAMERA_PARM_SNOW_DETECTION,
+ MM_CAMERA_PARM_BESTSHOT_MODE, /*25*/
+ MM_CAMERA_PARM_ZOOM,
+ MM_CAMERA_PARM_VIDEO_DIS,
+ MM_CAMERA_PARM_VIDEO_ROT,
+ MM_CAMERA_PARM_SCE_FACTOR,
+ MM_CAMERA_PARM_FD, /*30*/
+ MM_CAMERA_PARM_MODE,
+ /* 2nd 32 bits */
+ MM_CAMERA_PARM_3D_FRAME_FORMAT,
+ MM_CAMERA_PARM_CAMERA_ID,
+ MM_CAMERA_PARM_CAMERA_INFO,
+ MM_CAMERA_PARM_PREVIEW_SIZE, /*35*/
+ MM_CAMERA_PARM_QUERY_FALSH4SNAP,
+ MM_CAMERA_PARM_FOCUS_DISTANCES,
+ MM_CAMERA_PARM_BUFFER_INFO,
+ MM_CAMERA_PARM_JPEG_ROTATION,
+ MM_CAMERA_PARM_JPEG_MAINIMG_QUALITY, /* 40 */
+ MM_CAMERA_PARM_JPEG_THUMB_QUALITY,
+ MM_CAMERA_PARM_ZSL_ENABLE,
+ MM_CAMERA_PARM_FOCAL_LENGTH,
+ MM_CAMERA_PARM_HORIZONTAL_VIEW_ANGLE,
+ MM_CAMERA_PARM_VERTICAL_VIEW_ANGLE, /* 45 */
+ MM_CAMERA_PARM_MCE,
+ MM_CAMERA_PARM_RESET_LENS_TO_INFINITY,
+ MM_CAMERA_PARM_SNAPSHOTDATA,
+ MM_CAMERA_PARM_HFR,
+ MM_CAMERA_PARM_REDEYE_REDUCTION, /* 50 */
+ MM_CAMERA_PARM_WAVELET_DENOISE,
+ MM_CAMERA_PARM_3D_DISPLAY_DISTANCE,
+ MM_CAMERA_PARM_3D_VIEW_ANGLE,
+ MM_CAMERA_PARM_PREVIEW_FORMAT,
+ MM_CAMERA_PARM_RDI_FORMAT,
+ MM_CAMERA_PARM_HFR_SIZE, /* 55 */
+ MM_CAMERA_PARM_3D_EFFECT,
+ MM_CAMERA_PARM_3D_MANUAL_CONV_RANGE,
+ MM_CAMERA_PARM_3D_MANUAL_CONV_VALUE,
+ MM_CAMERA_PARM_ENABLE_3D_MANUAL_CONVERGENCE,
+ /* These are new parameters defined here */
+ MM_CAMERA_PARM_CH_IMAGE_FMT, /* 60 */ // mm_camera_ch_image_fmt_parm_t
+ MM_CAMERA_PARM_OP_MODE, // camera state, sub state also
+ MM_CAMERA_PARM_SHARPNESS_CAP, //
+ MM_CAMERA_PARM_SNAPSHOT_BURST_NUM, // num shots per snapshot action
+ MM_CAMERA_PARM_LIVESHOT_MAIN, // enable/disable full size live shot
+ MM_CAMERA_PARM_MAXZOOM, /* 65 */
+ MM_CAMERA_PARM_LUMA_ADAPTATION, // enable/disable
+ MM_CAMERA_PARM_HDR,
+ MM_CAMERA_PARM_CROP,
+ MM_CAMERA_PARM_MAX_PICTURE_SIZE,
+ MM_CAMERA_PARM_MAX_PREVIEW_SIZE, /* 70 */
+ MM_CAMERA_PARM_ASD_ENABLE,
+ MM_CAMERA_PARM_RECORDING_HINT,
+ MM_CAMERA_PARM_CAF_ENABLE,
+ MM_CAMERA_PARM_FULL_LIVESHOT,
+ MM_CAMERA_PARM_DIS_ENABLE, /* 75 */
+ MM_CAMERA_PARM_AEC_LOCK,
+ MM_CAMERA_PARM_AWB_LOCK,
+ MM_CAMERA_PARM_AF_MTR_AREA,
+ MM_CAMERA_PARM_AEC_MTR_AREA,
+ MM_CAMERA_GET_PARM_LOW_LIGHT_FOR_ZSL,
+ MM_CAMERA_PARM_LOW_POWER_MODE,
+ MM_CAMERA_PARM_MAX_HFR_MODE, /* 80 */
+ MM_CAMERA_PARM_MAX_VIDEO_SIZE,
+ MM_CAMERA_PARM_DEF_PREVIEW_SIZES,
+ MM_CAMERA_PARM_DEF_VIDEO_SIZES,
+ MM_CAMERA_PARM_DEF_THUMB_SIZES,
+ MM_CAMERA_PARM_DEF_HFR_SIZES,
+ MM_CAMERA_PARM_PREVIEW_SIZES_CNT,
+ MM_CAMERA_PARM_VIDEO_SIZES_CNT,
+ MM_CAMERA_PARM_THUMB_SIZES_CNT,
+ MM_CAMERA_PARM_HFR_SIZES_CNT,
+ MM_CAMERA_PARM_GRALLOC_USAGE,
+ MM_CAMERA_PARM_VFE_OUTPUT_ENABLE, //to check whether both oputputs are
+ MM_CAMERA_PARM_DEFAULT_PREVIEW_WIDTH,
+ MM_CAMERA_PARM_DEFAULT_PREVIEW_HEIGHT,
+ MM_CAMERA_PARM_FOCUS_MODE,
+ MM_CAMERA_PARM_HFR_FRAME_SKIP,
+ MM_CAMERA_PARM_CH_INTERFACE,
+ //or single output enabled to differentiate 7x27a with others
+ MM_CAMERA_PARM_BESTSHOT_RECONFIGURE,
+ MM_CAMERA_PARM_MAX_NUM_FACES_DECT,
+ MM_CAMERA_PARM_FPS_RANGE,
+ MM_CAMERA_PARM_CID,
+ MM_CAMERA_PARM_FRAME_RESOLUTION,
+ MM_CAMERA_PARM_RAW_SNAPSHOT_FMT,
+ MM_CAMERA_PARM_FACIAL_FEATURE_INFO,
+ MM_CAMERA_PARM_CAF_LOCK_CANCEL,
+ MM_CAMERA_PARM_CAF_TYPE,
+ MM_CAMERA_PARM_LUX_IDX,
+ MM_CAMERA_PARM_GET_AF_STATUS,
+ MM_CAMERA_PARM_CHECK_AF_RETRY,
+ MM_CAMERA_PARM_LG_CAF_LOCK,
+ MM_CAMERA_PARM_INFORM_STARTPRVIEW,
+ MM_CAMERA_PARM_MAX
+} mm_camera_parm_type_t;
+
+typedef enum {
+ STREAM_IMAGE,
+ STREAM_RAW,
+ STREAM_IMAGE_AND_RAW,
+ STREAM_RAW_AND_RAW,
+ STREAM_MAX,
+} mm_camera_channel_stream_info_t;
+
+typedef enum {
+ CAMERA_SET_PARM_DISPLAY_INFO,
+ CAMERA_SET_PARM_DIMENSION,
+
+ CAMERA_SET_PARM_ZOOM,
+ CAMERA_SET_PARM_SENSOR_POSITION,
+ CAMERA_SET_PARM_FOCUS_RECT,
+ CAMERA_SET_PARM_LUMA_ADAPTATION,
+ CAMERA_SET_PARM_CONTRAST,
+ CAMERA_SET_PARM_BRIGHTNESS,
+ CAMERA_SET_PARM_EXPOSURE_COMPENSATION,
+ CAMERA_SET_PARM_SHARPNESS,
+ CAMERA_SET_PARM_HUE, /* 10 */
+ CAMERA_SET_PARM_SATURATION,
+ CAMERA_SET_PARM_EXPOSURE,
+ CAMERA_SET_PARM_AUTO_FOCUS,
+ CAMERA_SET_PARM_WB,
+ CAMERA_SET_PARM_EFFECT,
+ CAMERA_SET_PARM_FPS,
+ CAMERA_SET_PARM_FLASH,
+ CAMERA_SET_PARM_NIGHTSHOT_MODE,
+ CAMERA_SET_PARM_REFLECT,
+ CAMERA_SET_PARM_PREVIEW_MODE, /* 20 */
+ CAMERA_SET_PARM_ANTIBANDING,
+ CAMERA_SET_PARM_RED_EYE_REDUCTION,
+ CAMERA_SET_PARM_FOCUS_STEP,
+ CAMERA_SET_PARM_EXPOSURE_METERING,
+ CAMERA_SET_PARM_AUTO_EXPOSURE_MODE,
+ CAMERA_SET_PARM_ISO,
+ CAMERA_SET_PARM_BESTSHOT_MODE,
+ CAMERA_SET_PARM_ENCODE_ROTATION,
+
+ CAMERA_SET_PARM_PREVIEW_FPS,
+ CAMERA_SET_PARM_AF_MODE, /* 30 */
+ CAMERA_SET_PARM_HISTOGRAM,
+ CAMERA_SET_PARM_FLASH_STATE,
+ CAMERA_SET_PARM_FRAME_TIMESTAMP,
+ CAMERA_SET_PARM_STROBE_FLASH,
+ CAMERA_SET_PARM_FPS_LIST,
+ CAMERA_SET_PARM_HJR,
+ CAMERA_SET_PARM_ROLLOFF,
+
+ CAMERA_STOP_PREVIEW,
+ CAMERA_START_PREVIEW,
+ CAMERA_START_SNAPSHOT, /* 40 */
+ CAMERA_START_RAW_SNAPSHOT,
+ CAMERA_STOP_SNAPSHOT,
+ CAMERA_EXIT,
+ CAMERA_ENABLE_BSM,
+ CAMERA_DISABLE_BSM,
+ CAMERA_GET_PARM_ZOOM,
+ CAMERA_GET_PARM_MAXZOOM,
+ CAMERA_GET_PARM_ZOOMRATIOS,
+ CAMERA_GET_PARM_AF_SHARPNESS,
+ CAMERA_SET_PARM_LED_MODE, /* 50 */
+ CAMERA_SET_MOTION_ISO,
+ CAMERA_AUTO_FOCUS_CANCEL,
+ CAMERA_GET_PARM_FOCUS_STEP,
+ CAMERA_ENABLE_AFD,
+ CAMERA_PREPARE_SNAPSHOT,
+ CAMERA_SET_FPS_MODE,
+ CAMERA_START_VIDEO,
+ CAMERA_STOP_VIDEO,
+ CAMERA_START_RECORDING,
+ CAMERA_STOP_RECORDING, /* 60 */
+ CAMERA_SET_VIDEO_DIS_PARAMS,
+ CAMERA_SET_VIDEO_ROT_PARAMS,
+ CAMERA_SET_PARM_AEC_ROI,
+ CAMERA_SET_CAF,
+ CAMERA_SET_PARM_BL_DETECTION_ENABLE,
+ CAMERA_SET_PARM_SNOW_DETECTION_ENABLE,
+ CAMERA_SET_PARM_STROBE_FLASH_MODE,
+ CAMERA_SET_PARM_AF_ROI,
+ CAMERA_START_LIVESHOT,
+ CAMERA_SET_SCE_FACTOR, /* 70 */
+ CAMERA_GET_CAPABILITIES,
+ CAMERA_GET_PARM_DIMENSION,
+ CAMERA_GET_PARM_LED_MODE,
+ CAMERA_SET_PARM_FD,
+ CAMERA_GET_PARM_3D_FRAME_FORMAT,
+ CAMERA_QUERY_FLASH_FOR_SNAPSHOT,
+ CAMERA_GET_PARM_FOCUS_DISTANCES,
+ CAMERA_START_ZSL,
+ CAMERA_STOP_ZSL,
+ CAMERA_ENABLE_ZSL, /* 80 */
+ CAMERA_GET_PARM_FOCAL_LENGTH,
+ CAMERA_GET_PARM_HORIZONTAL_VIEW_ANGLE,
+ CAMERA_GET_PARM_VERTICAL_VIEW_ANGLE,
+ CAMERA_SET_PARM_WAVELET_DENOISE,
+ CAMERA_SET_PARM_MCE,
+ CAMERA_ENABLE_STEREO_CAM,
+ CAMERA_SET_PARM_RESET_LENS_TO_INFINITY,
+ CAMERA_GET_PARM_SNAPSHOTDATA,
+ CAMERA_SET_PARM_HFR,
+ CAMERA_SET_REDEYE_REDUCTION, /* 90 */
+ CAMERA_SET_PARM_3D_DISPLAY_DISTANCE,
+ CAMERA_SET_PARM_3D_VIEW_ANGLE,
+ CAMERA_SET_PARM_3D_EFFECT,
+ CAMERA_SET_PARM_PREVIEW_FORMAT,
+ CAMERA_GET_PARM_3D_DISPLAY_DISTANCE, /* 95 */
+ CAMERA_GET_PARM_3D_VIEW_ANGLE,
+ CAMERA_GET_PARM_3D_EFFECT,
+ CAMERA_GET_PARM_3D_MANUAL_CONV_RANGE,
+ CAMERA_SET_PARM_3D_MANUAL_CONV_VALUE,
+ CAMERA_ENABLE_3D_MANUAL_CONVERGENCE, /* 100 */
+ CAMERA_SET_PARM_HDR,
+ CAMERA_SET_ASD_ENABLE,
+ CAMERA_POSTPROC_ABORT,
+ CAMERA_SET_AEC_MTR_AREA,
+ CAMERA_SET_AEC_LOCK, /*105*/
+ CAMERA_SET_AWB_LOCK,
+ CAMERA_SET_RECORDING_HINT,
+ CAMERA_SET_PARM_CAF,
+ CAMERA_SET_FULL_LIVESHOT,
+ CAMERA_SET_DIS_ENABLE, /*110*/
+ CAMERA_GET_PARM_MAX_HFR_MODE,
+ CAMERA_SET_LOW_POWER_MODE,
+ CAMERA_GET_PARM_DEF_PREVIEW_SIZES,
+ CAMERA_GET_PARM_DEF_VIDEO_SIZES,
+ CAMERA_GET_PARM_DEF_THUMB_SIZES, /*115*/
+ CAMERA_GET_PARM_DEF_HFR_SIZES,
+ CAMERA_GET_PARM_MAX_LIVESHOT_SIZE,
+ CAMERA_GET_PARM_FPS_RANGE,
+ CAMERA_SET_3A_CONVERGENCE,
+ CAMERA_SET_PREVIEW_HFR, /*120*/
+ CAMERA_GET_MAX_DIMENSION,
+ CAMERA_GET_MAX_NUM_FACES_DECT,
+ CAMERA_SET_CHANNEL_STREAM,
+ CAMERA_GET_CHANNEL_STREAM,
+ CAMERA_SET_PARM_CID, /*125*/
+ CAMERA_GET_PARM_FRAME_RESOLUTION,
+ CAMERA_GET_FACIAL_FEATURE_INFO,
+ CAMERA_SET_CAF_LOCK_CANCEL,
+ CAMERA_GET_PARM_HDR,
+ CAMERA_SET_PARM_CAF_TYPE,
+ CAMERA_GET_PARM_LUX_IDX,
+ CAMERA_GET_PARM_LOW_LIGHT_FOR_ZSL,
+ CAMERA_GET_PARM_AF_STATUS,
+ CAMERA_CHECK_AF_RETRY,
+ CAMERA_SET_LG_CAF_LOCK,
+ CAMERA_SET_INFORM_STARTPREVIEW,
+ CAMERA_CTRL_PARM_MAX
+} cam_ctrl_type;
+
+typedef enum {
+ CAMERA_ERROR_NO_MEMORY,
+ CAMERA_ERROR_EFS_FAIL, /* Low-level operation failed */
+ CAMERA_ERROR_EFS_FILE_OPEN, /* File already opened */
+ CAMERA_ERROR_EFS_FILE_NOT_OPEN, /* File not opened */
+ CAMERA_ERROR_EFS_FILE_ALREADY_EXISTS, /* File already exists */
+ CAMERA_ERROR_EFS_NONEXISTENT_DIR, /* User directory doesn't exist */
+ CAMERA_ERROR_EFS_NONEXISTENT_FILE, /* User directory doesn't exist */
+ CAMERA_ERROR_EFS_BAD_FILE_NAME, /* Client specified invalid file/directory name*/
+ CAMERA_ERROR_EFS_BAD_FILE_HANDLE, /* Client specified invalid file/directory name*/
+ CAMERA_ERROR_EFS_SPACE_EXHAUSTED, /* Out of file system space */
+ CAMERA_ERROR_EFS_OPEN_TABLE_FULL, /* Out of open-file table slots */
+ CAMERA_ERROR_EFS_OTHER_ERROR, /* Other error */
+ CAMERA_ERROR_CONFIG,
+ CAMERA_ERROR_EXIF_ENCODE,
+ CAMERA_ERROR_VIDEO_ENGINE,
+ CAMERA_ERROR_IPL,
+ CAMERA_ERROR_INVALID_FORMAT,
+ CAMERA_ERROR_TIMEOUT,
+ CAMERA_ERROR_ESD,
+ CAMERA_ERROR_MAX
+} camera_error_type;
+
+#if defined CAMERA_ANTIBANDING_OFF
+#undef CAMERA_ANTIBANDING_OFF
+#endif
+
+#if defined CAMERA_ANTIBANDING_60HZ
+#undef CAMERA_ANTIBANDING_60HZ
+#endif
+
+#if defined CAMERA_ANTIBANDING_50HZ
+#undef CAMERA_ANTIBANDING_50HZ
+#endif
+
+#if defined CAMERA_ANTIBANDING_AUTO
+#undef CAMERA_ANTIBANDING_AUTO
+#endif
+
+typedef enum {
+ CAMERA_ANTIBANDING_OFF,
+ CAMERA_ANTIBANDING_60HZ,
+ CAMERA_ANTIBANDING_50HZ,
+ CAMERA_ANTIBANDING_AUTO,
+ CAMERA_ANTIBANDING_AUTO_50HZ,
+ CAMERA_ANTIBANDING_AUTO_60HZ,
+ CAMERA_MAX_ANTIBANDING,
+} camera_antibanding_type;
+
+/* Enum Type for different ISO Mode supported */
+typedef enum {
+ CAMERA_ISO_AUTO = 0,
+ CAMERA_ISO_DEBLUR,
+ CAMERA_ISO_100,
+ CAMERA_ISO_200,
+ CAMERA_ISO_400,
+ CAMERA_ISO_800,
+ CAMERA_ISO_1600,
+ CAMERA_ISO_MAX
+} camera_iso_mode_type;
+
+typedef enum {
+ MM_CAMERA_FACIAL_FEATURE_FD, // facial detection
+ MM_CAMERA_FACIAL_FEATURE_MAX
+} camera_facial_features;
+
+typedef enum {
+ AEC_ROI_OFF,
+ AEC_ROI_ON
+} aec_roi_ctrl_t;
+
+typedef enum {
+ AEC_ROI_BY_INDEX,
+ AEC_ROI_BY_COORDINATE,
+} aec_roi_type_t;
+
+typedef struct {
+ uint32_t x;
+ uint32_t y;
+} cam_coordinate_type_t;
+
+/*
+ * Define DRAW_RECTANGLES to draw rectangles on screen. Just for test purpose.
+ */
+//#define DRAW_RECTANGLES
+
+typedef struct {
+ uint16_t x;
+ uint16_t y;
+ uint16_t dx;
+ uint16_t dy;
+} roi_t;
+
+typedef struct {
+ aec_roi_ctrl_t aec_roi_enable;
+ aec_roi_type_t aec_roi_type;
+ union {
+ cam_coordinate_type_t coordinate;
+ uint32_t aec_roi_idx;
+ } aec_roi_position;
+} cam_set_aec_roi_t;
+
+typedef struct {
+ uint32_t frm_id;
+ uint8_t num_roi;
+ roi_t roi[MAX_ROI];
+ uint8_t is_multiwindow;
+} roi_info_t;
+
+/* Exif Tag Data Type */
+typedef enum
+{
+ EXIF_BYTE = 1,
+ EXIF_ASCII = 2,
+ EXIF_SHORT = 3,
+ EXIF_LONG = 4,
+ EXIF_RATIONAL = 5,
+ EXIF_UNDEFINED = 7,
+ EXIF_SLONG = 9,
+ EXIF_SRATIONAL = 10
+} exif_tag_type_t;
+
+
+/* Exif Rational Data Type */
+typedef struct
+{
+ uint32_t num; // Numerator
+ uint32_t denom; // Denominator
+
+} rat_t;
+
+/* Exif Signed Rational Data Type */
+typedef struct
+{
+ int32_t num; // Numerator
+ int32_t denom; // Denominator
+
+} srat_t;
+
+typedef struct
+{
+ exif_tag_type_t type;
+ uint8_t copy;
+ uint32_t count;
+ union
+ {
+ char *_ascii;
+ uint8_t *_bytes;
+ uint8_t _byte;
+ uint16_t *_shorts;
+ uint16_t _short;
+ uint32_t *_longs;
+ uint32_t _long;
+ rat_t *_rats;
+ rat_t _rat;
+ uint8_t *_undefined;
+ int32_t *_slongs;
+ int32_t _slong;
+ srat_t *_srats;
+ srat_t _srat;
+ } data;
+} exif_tag_entry_t;
+
+typedef struct {
+ uint32_t tag_id;
+ exif_tag_entry_t tag_entry;
+} exif_tags_info_t;
+
+
+typedef enum {
+ HDR_BRACKETING_OFF,
+ HDR_MODE,
+ EXP_BRACKETING_MODE
+ } hdr_mode;
+
+typedef struct {
+ hdr_mode mode;
+ uint32_t hdr_enable;
+ uint32_t total_frames;
+ uint32_t total_hal_frames;
+ char values[MAX_EXP_BRACKETING_LENGTH]; /* user defined values */
+} exp_bracketing_t;
+typedef struct {
+ roi_t mtr_area[MAX_ROI];
+ uint32_t num_area;
+ int weight[MAX_ROI];
+} aec_mtr_area_t;
+
+typedef struct {
+ int denoise_enable;
+ int process_plates;
+} denoise_param_t;
+
+#ifndef HAVE_CAMERA_SIZE_TYPE
+ #define HAVE_CAMERA_SIZE_TYPE
+struct camera_size_type {
+ int width;
+ int height;
+};
+#endif
+
+typedef struct {
+ uint32_t yoffset;
+ uint32_t cbcr_offset;
+ uint32_t size;
+ struct camera_size_type resolution;
+}cam_buf_info_t;
+
+typedef struct {
+ int x;
+ int y;
+}cam_point_t;
+
+typedef struct {
+ /* AF parameters */
+ uint8_t focus_position;
+ /* AEC parameters */
+ uint32_t line_count;
+ uint8_t luma_target;
+ /* AWB parameters */
+ int32_t r_gain;
+ int32_t b_gain;
+ int32_t g_gain;
+ uint8_t exposure_mode;
+ uint8_t exposure_program;
+ float exposure_time;
+ uint32_t iso_speed;
+} snapshotData_info_t;
+
+
+typedef enum {
+ CAMERA_HFR_MODE_OFF = 1,
+ CAMERA_HFR_MODE_60FPS,
+ CAMERA_HFR_MODE_90FPS,
+ CAMERA_HFR_MODE_120FPS,
+ CAMERA_HFR_MODE_150FPS,
+} camera_hfr_mode_t;
+
+/* frame Q*/
+struct fifo_node
+{
+ struct fifo_node *next;
+ void *f;
+};
+
+struct fifo_queue
+{
+ int num_of_frames;
+ struct fifo_node *front;
+ struct fifo_node *back;
+ pthread_mutex_t mut;
+ pthread_cond_t wait;
+ char* name;
+};
+
+typedef struct {
+ uint32_t buf_len;
+ uint8_t num;
+ uint8_t pmem_type;
+ uint32_t vaddr[8];
+} mm_camera_histo_mem_info_t;
+
+typedef enum {
+ MM_CAMERA_CTRL_EVT_ZOOM_DONE,
+ MM_CAMERA_CTRL_EVT_AUTO_FOCUS_DONE,
+ MM_CAMERA_CTRL_EVT_PREP_SNAPSHOT,
+ MM_CAMERA_CTRL_EVT_SNAPSHOT_CONFIG_DONE,
+ MM_CAMERA_CTRL_EVT_WDN_DONE, // wavelet denoise done
+ MM_CAMERA_CTRL_EVT_HDR_DONE,
+ MM_CAMERA_CTRL_EVT_AUTO_FOCUS_MOVE,
+ MM_CAMERA_CTRL_EVT_ERROR,
+ MM_CAMERA_CTRL_EVT_MAX
+}mm_camera_ctrl_event_type_t;
+
+typedef struct {
+ mm_camera_ctrl_event_type_t evt;
+ cam_ctrl_status_t status;
+ unsigned long cookie;
+} mm_camera_ctrl_event_t;
+
+typedef enum {
+ MM_CAMERA_CH_EVT_STREAMING_ON,
+ MM_CAMERA_CH_EVT_STREAMING_OFF,
+ MM_CAMERA_CH_EVT_STREAMING_ERR,
+ MM_CAMERA_CH_EVT_DATA_DELIVERY_DONE,
+ MM_CAMERA_CH_EVT_DATA_REQUEST_MORE,
+ MM_CAMERA_CH_EVT_MAX
+}mm_camera_ch_event_type_t;
+
+typedef struct {
+ uint32_t ch;
+ mm_camera_ch_event_type_t evt;
+} mm_camera_ch_event_t;
+
+typedef struct {
+ uint32_t index;
+ /* TBD: need more fields for histo stats? */
+} mm_camera_stats_histo_t;
+
+typedef struct {
+ uint32_t event_id;
+ union {
+ mm_camera_stats_histo_t stats_histo;
+ } e;
+} mm_camera_stats_event_t;
+
+typedef enum {
+ FD_ROI_TYPE_HEADER,
+ FD_ROI_TYPE_DATA
+} fd_roi_type_t;
+
+typedef struct {
+ int fd_mode;
+ int num_fd;
+} fd_set_parm_t;
+
+typedef struct {
+ uint32_t frame_id;
+ int16_t num_face_detected;
+} fd_roi_header_type;
+
+struct fd_rect_t {
+ uint16_t x;
+ uint16_t y;
+ uint16_t dx;
+ uint16_t dy;
+};
+
+typedef struct {
+ struct fd_rect_t face_boundary;
+ uint16_t left_eye_center[2];
+ uint16_t right_eye_center[2];
+ uint16_t mouth_center[2];
+ uint8_t smile_degree; //0 -100
+ uint8_t smile_confidence; //
+ uint8_t blink_detected; // 0 or 1
+ uint8_t is_face_recognised; // 0 or 1
+ int8_t gaze_angle; // -90 -45 0 45 90 for head left to rigth tilt
+ int8_t updown_dir; // -90 to 90
+ int8_t leftright_dir; //-90 to 90
+ int8_t roll_dir; // -90 to 90
+ int8_t left_right_gaze; // -50 to 50
+ int8_t top_bottom_gaze; // -50 to 50
+ uint8_t left_blink; // 0 - 100
+ uint8_t right_blink; // 0 - 100
+ int8_t id; // unique id for face tracking within view unless view changes
+ int8_t score; // score of confidence( 0 -100)
+} fd_face_type;
+
+typedef struct {
+ uint32_t frame_id;
+ uint8_t idx;
+ fd_face_type face;
+} fd_roi_data_type;
+
+struct fd_roi_t {
+ fd_roi_type_t type;
+ union {
+ fd_roi_header_type hdr;
+ fd_roi_data_type data;
+ } d;
+};
+
+typedef struct {
+ uint32_t event_id;
+ union {
+ mm_camera_histo_mem_info_t histo_mem_info;
+ struct fd_roi_t roi;
+ } e;
+} mm_camera_info_event_t;
+
+typedef enum {
+ MM_CAMERA_EVT_TYPE_CH,
+ MM_CAMERA_EVT_TYPE_CTRL,
+ MM_CAMERA_EVT_TYPE_STATS,
+ MM_CAMERA_EVT_TYPE_INFO,
+ MM_CAMERA_EVT_TYPE_MAX
+} mm_camera_event_type_t;
+
+/*
+ * the APP event related defines
+*/
+typedef enum {
+ MM_CAMERA_STATS_EVT_HISTO,
+ MM_CAMERA_STATS_EVT_MAX
+} mm_camera_stats_event_type_t;
+
+typedef enum {
+ MM_CAMERA_INFO_EVT_ROI,
+ MM_CAMERA_INFO_FLASH_FRAME_IDX,
+ MM_CAMERA_INFO_EVT_MAX
+} mm_camera_info_event_type_t;
+
+/* !!WARNING: PLAESE BE VERY CAREFUL!!
+ * v4l2_event payload has a limitation of 64 bytes.
+ * This makes that the whole mm_camera_event_t size
+ * cannot go beyond 64 bytes.
+ */
+typedef struct {
+ mm_camera_event_type_t event_type;
+ /* the union size cannot go beyond 64 bytes. need RFC */
+ union {
+ mm_camera_ch_event_t ch;
+ mm_camera_ctrl_event_t ctrl;
+ mm_camera_stats_event_t stats;
+ mm_camera_info_event_t info;
+ } e;
+} mm_camera_event_t;
+
+/* Auto focus mode, used for CAMERA_PARM_AF_MODE */
+typedef enum {
+ AF_MODE_UNCHANGED = -1,
+ AF_MODE_NORMAL = 0,
+ AF_MODE_MACRO,
+ AF_MODE_AUTO,
+ AF_MODE_CAF,
+ AF_MODE_INFINITY,
+ AF_MODE_MAX
+} isp3a_af_mode_t;
+
+typedef struct {
+ uint32_t in1_w;
+ uint32_t out1_w;
+ uint32_t in1_h;
+ uint32_t out1_h;
+ uint32_t in2_w;
+ uint32_t out2_w;
+ uint32_t in2_h;
+ uint32_t out2_h;
+ uint8_t update_flag;
+} common_crop_t;
+
+typedef enum {
+ LED_MODE_OFF,
+ LED_MODE_AUTO,
+ LED_MODE_ON,
+ LED_MODE_TORCH,
+
+ /*new mode above should be added above this line*/
+ LED_MODE_MAX
+} led_mode_t;
+
+typedef struct {
+ int is_checking_af_retry;
+ int is_moving;
+} af_actuator_status_t;
+
+typedef struct{
+ int aec_index_for_zsl;
+ int zsl_flash_enable;
+} aec_info_for_flash_t;
+
+typedef struct video_dis_param_ctrl_t {
+ uint32_t dis_enable; /* DIS feature: 1 = enable, 0 = disable.
+ when enable, caller makes sure w/h are 10% more. */
+ uint32_t video_rec_width; /* video frame width for recording */
+ uint32_t video_rec_height; /* video frame height for recording */
+ uint32_t output_cbcr_offset;
+} video_dis_param_ctrl_t;
+
+typedef enum camera_rotation_type {
+ ROT_NONE = 0,
+ ROT_CLOCKWISE_90 = 1,
+ ROT_CLOCKWISE_180 = 6,
+ ROT_CLOCKWISE_270 = 7,
+} camera_rotation_type;
+
+typedef struct video_rotation_param_ctrl_t {
+ camera_rotation_type rotation; /* 0 degree = rot disable. */
+} video_rotation_param_ctrl_t;
+
+enum focus_distance_index{
+ FOCUS_DISTANCE_NEAR_INDEX, /* 0 */
+ FOCUS_DISTANCE_OPTIMAL_INDEX,
+ FOCUS_DISTANCE_FAR_INDEX,
+ FOCUS_DISTANCE_MAX_INDEX
+};
+
+typedef struct {
+ float focus_distance[FOCUS_DISTANCE_MAX_INDEX];
+ float real_gain;
+ float exp_time;
+} focus_distances_info_t;
+
+typedef enum msm_st_frame_packing cam_3d_frame_format_t;
+
+typedef struct {
+ cam_frame_type_t frame_type;
+ cam_3d_frame_format_t format;
+}camera_3d_frame_t;
+
+typedef enum {
+ CAMERA_BESTSHOT_OFF = 0,
+ CAMERA_BESTSHOT_AUTO = 1,
+ CAMERA_BESTSHOT_LANDSCAPE = 2,
+ CAMERA_BESTSHOT_SNOW,
+ CAMERA_BESTSHOT_BEACH,
+ CAMERA_BESTSHOT_SUNSET,
+ CAMERA_BESTSHOT_NIGHT,
+ CAMERA_BESTSHOT_PORTRAIT,
+ CAMERA_BESTSHOT_BACKLIGHT,
+ CAMERA_BESTSHOT_SPORTS,
+ CAMERA_BESTSHOT_ANTISHAKE,
+ CAMERA_BESTSHOT_FLOWERS,
+ CAMERA_BESTSHOT_CANDLELIGHT,
+ CAMERA_BESTSHOT_FIREWORKS,
+ CAMERA_BESTSHOT_PARTY,
+ CAMERA_BESTSHOT_NIGHT_PORTRAIT,
+ CAMERA_BESTSHOT_THEATRE,
+ CAMERA_BESTSHOT_ACTION,
+ CAMERA_BESTSHOT_AR,
+ CAMERA_BESTSHOT_MAX
+} camera_bestshot_mode_type;
+
+typedef enum {
+ AUTO = 1,
+ SPOT,
+ CENTER_WEIGHTED,
+ AVERAGE
+} cam_af_focusrect_t;
+
+typedef enum {
+ CAMERA_AEC_FRAME_AVERAGE,
+ CAMERA_AEC_CENTER_WEIGHTED,
+ CAMERA_AEC_SPOT_METERING,
+ CAMERA_AEC_SMART_METERING,
+ CAMERA_AEC_USER_METERING,
+ CAMERA_AEC_MAX_MODES
+} camera_auto_exposure_mode_type;
+
+typedef enum {
+ FPS_MODE_AUTO,
+ FPS_MODE_FIXED,
+} fps_mode_t;
+
+typedef struct {
+ int32_t buffer[256]; /* buffer to hold data */
+ int32_t max_value;
+} camera_preview_histogram_info;
+
+/* Clockwise */
+typedef enum {
+ CAMERA_ENCODING_ROTATE_0,
+ CAMERA_ENCODING_ROTATE_90,
+ CAMERA_ENCODING_ROTATE_180,
+ CAMERA_ENCODING_ROTATE_270
+} camera_encoding_rotate_t;
+
+typedef enum {
+ MOTION_ISO_OFF,
+ MOTION_ISO_ON
+} motion_iso_t;
+
+typedef struct {
+ struct msm_ctrl_cmd ctrlCmd;
+ int fd;
+ void (*af_cb)(int8_t );
+ int8_t is_camafctrl_thread_join;
+ isp3a_af_mode_t af_mode;
+} cam_af_ctrl_t;
+
+/* Display */
+typedef struct {
+ uint16_t user_input_display_width;
+ uint16_t user_input_display_height;
+} USER_INPUT_DISPLAY_T;
+
+#if defined CAMERA_WB_AUTO
+#undef CAMERA_WB_AUTO
+#endif
+
+#if defined CAMERA_WB_CUSTOM
+#undef CAMERA_WB_CUSTOM
+#endif
+
+#if defined CAMERA_WB_INCANDESCENT
+#undef CAMERA_WB_INCANDESCENT
+#endif
+
+#if defined CAMERA_WB_FLUORESCENT
+#undef CAMERA_WB_FLUORESCENT
+#endif
+
+#if defined CAMERA_WB_DAYLIGHT
+#undef CAMERA_WB_DAYLIGHT
+#endif
+
+#if defined CAMERA_WB_CLOUDY_DAYLIGHT
+#undef CAMERA_WB_CLOUDY_DAYLIGHT
+#endif
+
+#if defined CAMERA_WB_TWILIGHT
+#undef CAMERA_WB_TWILIGHT
+#endif
+
+#if defined CAMERA_WB_SHADE
+#undef CAMERA_WB_SHADE
+#endif
+
+typedef enum {
+ CAMERA_WB_MIN_MINUS_1,
+ CAMERA_WB_AUTO = 1,
+ CAMERA_WB_CUSTOM,
+ CAMERA_WB_INCANDESCENT,
+ CAMERA_WB_FLUORESCENT,
+ CAMERA_WB_DAYLIGHT,
+ CAMERA_WB_CLOUDY_DAYLIGHT,
+ CAMERA_WB_TWILIGHT,
+ CAMERA_WB_SHADE,
+ CAMERA_WB_OFF,
+ CAMERA_WB_MAX_PLUS_1
+} config3a_wb_t;
+
+/******************************************************************************
+ * Function: exif_set_tag
+ * Description: Inserts or modifies an Exif tag to the Exif Info object. Typical
+ * use is to call this function multiple times - to insert all the
+ * desired Exif Tags individually to the Exif Info object and
+ * then pass the info object to the Jpeg Encoder object so
+ * the inserted tags would be emitted as tags in the Exif header.
+ * Input parameters:
+ * obj - The Exif Info object where the tag would be inserted to or
+ * modified from.
+ * tag_id - The Exif Tag ID of the tag to be inserted/modified.
+ * p_entry - The pointer to the tag entry structure which contains the
+ * details of tag. The pointer can be set to NULL to un-do
+ * previous insertion for a certain tag.
+ * Return values:
+ * JPEGERR_SUCCESS
+ * JPEGERR_ENULLPTR
+ * JPEGERR_EFAILED
+ * (See jpegerr.h for description of error values.)
+ * Notes: none
+ *****************************************************************************/
+int exif_set_tag(exif_info_obj_t obj,
+ exif_tag_id_t tag_id,
+ exif_tag_entry_t *p_entry);
+
+typedef uint32_t jpeg_event_t;
+
+// Possibly supported color formats
+// Ordering handcrafted for efficient coding, alter with care!
+typedef enum
+{
+ YCRCBLP_H2V2 = 0,
+ YCBCRLP_H2V2 = 1,
+
+ YCRCBLP_H2V1 = 2,
+ YCBCRLP_H2V1 = 3,
+
+ YCRCBLP_H1V2 = 4,
+ YCBCRLP_H1V2 = 5,
+
+ YCRCBLP_H1V1 = 6,
+ YCBCRLP_H1V1 = 7,
+
+ RGB565 = 8,
+ RGB888 = 9,
+ RGBa = 10,
+
+ JPEG_BITSTREAM_H2V2 = 12,
+ JPEG_BITSTREAM_H2V1 = 14,
+ JPEG_BITSTREAM_H1V2 = 16,
+ JPEG_BITSTREAM_H1V1 = 18,
+
+ JPEG_COLOR_FORMAT_MAX,
+
+} jpeg_color_format_t;
+
+/* EXIF header */
+/* =======================================================================
+** Macro Definitions
+** ======================================================================= */
+/* Enum defined to let compiler generate unique offset numbers for different
+ * tags - ordering matters! NOT INTENDED to be used by any application. */
+typedef enum
+{
+ // GPS IFD
+ GPS_VERSION_ID = 0,
+ GPS_LATITUDE_REF,
+ GPS_LATITUDE,
+ GPS_LONGITUDE_REF,
+ GPS_LONGITUDE,
+ GPS_ALTITUDE_REF,
+ GPS_ALTITUDE,
+ GPS_TIMESTAMP,
+ GPS_SATELLITES,
+ GPS_STATUS,
+ GPS_MEASUREMODE,
+ GPS_DOP,
+ GPS_SPEED_REF,
+ GPS_SPEED,
+ GPS_TRACK_REF,
+ GPS_TRACK,
+ GPS_IMGDIRECTION_REF,
+ GPS_IMGDIRECTION,
+ GPS_MAPDATUM,
+ GPS_DESTLATITUDE_REF,
+ GPS_DESTLATITUDE,
+ GPS_DESTLONGITUDE_REF,
+ GPS_DESTLONGITUDE,
+ GPS_DESTBEARING_REF,
+ GPS_DESTBEARING,
+ GPS_DESTDISTANCE_REF,
+ GPS_DESTDISTANCE,
+ GPS_PROCESSINGMETHOD,
+ GPS_AREAINFORMATION,
+ GPS_DATESTAMP,
+ GPS_DIFFERENTIAL,
+
+ // TIFF IFD
+ NEW_SUBFILE_TYPE,
+ SUBFILE_TYPE,
+ IMAGE_WIDTH,
+ IMAGE_LENGTH,
+ BITS_PER_SAMPLE,
+ COMPRESSION,
+ PHOTOMETRIC_INTERPRETATION,
+ THRESH_HOLDING,
+ CELL_WIDTH,
+ CELL_HEIGHT,
+ FILL_ORDER,
+ DOCUMENT_NAME,
+ IMAGE_DESCRIPTION,
+ MAKE,
+ MODEL,
+ STRIP_OFFSETS,
+ ORIENTATION,
+ SAMPLES_PER_PIXEL,
+ ROWS_PER_STRIP,
+ STRIP_BYTE_COUNTS,
+ MIN_SAMPLE_VALUE,
+ MAX_SAMPLE_VALUE,
+ X_RESOLUTION,
+ Y_RESOLUTION,
+ PLANAR_CONFIGURATION,
+ PAGE_NAME,
+ X_POSITION,
+ Y_POSITION,
+ FREE_OFFSET,
+ FREE_BYTE_COUNTS,
+ GRAY_RESPONSE_UNIT,
+ GRAY_RESPONSE_CURVE,
+ T4_OPTION,
+ T6_OPTION,
+ RESOLUTION_UNIT,
+ PAGE_NUMBER,
+ TRANSFER_FUNCTION,
+ SOFTWARE,
+ DATE_TIME,
+ ARTIST,
+ HOST_COMPUTER,
+ PREDICTOR,
+ WHITE_POINT,
+ PRIMARY_CHROMATICITIES,
+ COLOR_MAP,
+ HALFTONE_HINTS,
+ TILE_WIDTH,
+ TILE_LENGTH,
+ TILE_OFFSET,
+ TILE_BYTE_COUNTS,
+ INK_SET,
+ INK_NAMES,
+ NUMBER_OF_INKS,
+ DOT_RANGE,
+ TARGET_PRINTER,
+ EXTRA_SAMPLES,
+ SAMPLE_FORMAT,
+ TRANSFER_RANGE,
+ JPEG_PROC,
+ JPEG_INTERCHANGE_FORMAT,
+ JPEG_INTERCHANGE_FORMAT_LENGTH,
+ JPEG_RESTART_INTERVAL,
+ JPEG_LOSSLESS_PREDICTORS,
+ JPEG_POINT_TRANSFORMS,
+ JPEG_Q_TABLES,
+ JPEG_DC_TABLES,
+ JPEG_AC_TABLES,
+ YCBCR_COEFFICIENTS,
+ YCBCR_SUB_SAMPLING,
+ YCBCR_POSITIONING,
+ REFERENCE_BLACK_WHITE,
+ GAMMA,
+ ICC_PROFILE_DESCRIPTOR,
+ SRGB_RENDERING_INTENT,
+ IMAGE_TITLE,
+ COPYRIGHT,
+ EXIF_IFD,
+ ICC_PROFILE,
+ GPS_IFD,
+
+
+ // TIFF IFD (Thumbnail)
+ TN_IMAGE_WIDTH,
+ TN_IMAGE_LENGTH,
+ TN_BITS_PER_SAMPLE,
+ TN_COMPRESSION,
+ TN_PHOTOMETRIC_INTERPRETATION,
+ TN_IMAGE_DESCRIPTION,
+ TN_MAKE,
+ TN_MODEL,
+ TN_STRIP_OFFSETS,
+ TN_ORIENTATION,
+ TN_SAMPLES_PER_PIXEL,
+ TN_ROWS_PER_STRIP,
+ TN_STRIP_BYTE_COUNTS,
+ TN_X_RESOLUTION,
+ TN_Y_RESOLUTION,
+ TN_PLANAR_CONFIGURATION,
+ TN_RESOLUTION_UNIT,
+ TN_TRANSFER_FUNCTION,
+ TN_SOFTWARE,
+ TN_DATE_TIME,
+ TN_ARTIST,
+ TN_WHITE_POINT,
+ TN_PRIMARY_CHROMATICITIES,
+ TN_JPEGINTERCHANGE_FORMAT,
+ TN_JPEGINTERCHANGE_FORMAT_L,
+ TN_YCBCR_COEFFICIENTS,
+ TN_YCBCR_SUB_SAMPLING,
+ TN_YCBCR_POSITIONING,
+ TN_REFERENCE_BLACK_WHITE,
+ TN_COPYRIGHT,
+
+ // EXIF IFD
+ EXPOSURE_TIME,
+ F_NUMBER,
+ EXPOSURE_PROGRAM,
+ SPECTRAL_SENSITIVITY,
+ ISO_SPEED_RATING,
+ OECF,
+ EXIF_VERSION,
+ EXIF_DATE_TIME_ORIGINAL,
+ EXIF_DATE_TIME_DIGITIZED,
+ EXIF_COMPONENTS_CONFIG,
+ EXIF_COMPRESSED_BITS_PER_PIXEL,
+ SHUTTER_SPEED,
+ APERTURE,
+ BRIGHTNESS,
+ EXPOSURE_BIAS_VALUE,
+ MAX_APERTURE,
+ SUBJECT_DISTANCE,
+ METERING_MODE,
+ LIGHT_SOURCE,
+ FLASH,
+ FOCAL_LENGTH,
+ SUBJECT_AREA,
+ EXIF_MAKER_NOTE,
+ EXIF_USER_COMMENT,
+ SUBSEC_TIME,
+ SUBSEC_TIME_ORIGINAL,
+ SUBSEC_TIME_DIGITIZED,
+ EXIF_FLASHPIX_VERSION,
+ EXIF_COLOR_SPACE,
+ EXIF_PIXEL_X_DIMENSION,
+ EXIF_PIXEL_Y_DIMENSION,
+ RELATED_SOUND_FILE,
+ INTEROP,
+ FLASH_ENERGY,
+ SPATIAL_FREQ_RESPONSE,
+ FOCAL_PLANE_X_RESOLUTION,
+ FOCAL_PLANE_Y_RESOLUTION,
+ FOCAL_PLANE_RESOLUTION_UNIT,
+ SUBJECT_LOCATION,
+ EXPOSURE_INDEX,
+ SENSING_METHOD,
+ FILE_SOURCE,
+ SCENE_TYPE,
+ CFA_PATTERN,
+ CUSTOM_RENDERED,
+ EXPOSURE_MODE,
+ WHITE_BALANCE,
+ DIGITAL_ZOOM_RATIO,
+ FOCAL_LENGTH_35MM,
+ SCENE_CAPTURE_TYPE,
+ GAIN_CONTROL,
+ CONTRAST,
+ SATURATION,
+ SHARPNESS,
+ DEVICE_SETTINGS_DESCRIPTION,
+ SUBJECT_DISTANCE_RANGE,
+ IMAGE_UID,
+ PIM,
+
+ EXIF_TAG_MAX_OFFSET
+
+} exif_tag_offset_t;
+
+/* Below are the supported Tags (ID and structure for their data) */
+#define CONSTRUCT_TAGID(offset,ID) (offset << 16 | ID)
+
+// GPS tag version
+// Use EXIFTAGTYPE_GPS_VERSION_ID as the exif_tag_type (EXIF_BYTE)
+// Count should be 4
+#define _ID_GPS_VERSION_ID 0x0000
+#define EXIFTAGID_GPS_VERSION_ID CONSTRUCT_TAGID(GPS_VERSION_ID, _ID_GPS_VERSION_ID)
+#define EXIFTAGTYPE_GPS_VERSION_ID EXIF_BYTE
+// North or South Latitude
+// Use EXIFTAGTYPE_GPS_LATITUDE_REF as the exif_tag_type (EXIF_ASCII)
+// It should be 2 characters long including the null-terminating character.
+#define _ID_GPS_LATITUDE_REF 0x0001
+#define EXIFTAGID_GPS_LATITUDE_REF CONSTRUCT_TAGID(GPS_LATITUDE_REF, _ID_GPS_LATITUDE_REF)
+#define EXIFTAGTYPE_GPS_LATITUDE_REF EXIF_ASCII
+// Latitude
+// Use EXIFTAGTYPE_GPS_LATITUDE as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 3
+#define _ID_GPS_LATITUDE 0x0002
+#define EXIFTAGID_GPS_LATITUDE CONSTRUCT_TAGID(GPS_LATITUDE, _ID_GPS_LATITUDE)
+#define EXIFTAGTYPE_GPS_LATITUDE EXIF_RATIONAL
+// East or West Longitude
+// Use EXIFTAGTYPE_GPS_LONGITUDE_REF as the exif_tag_type (EXIF_ASCII)
+// It should be 2 characters long including the null-terminating character.
+#define _ID_GPS_LONGITUDE_REF 0x0003
+#define EXIFTAGID_GPS_LONGITUDE_REF CONSTRUCT_TAGID(GPS_LONGITUDE_REF, _ID_GPS_LONGITUDE_REF)
+#define EXIFTAGTYPE_GPS_LONGITUDE_REF EXIF_ASCII
+// Longitude
+// Use EXIFTAGTYPE_GPS_LONGITUDE as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 3
+#define _ID_GPS_LONGITUDE 0x0004
+#define EXIFTAGID_GPS_LONGITUDE CONSTRUCT_TAGID(GPS_LONGITUDE, _ID_GPS_LONGITUDE)
+#define EXIFTAGTYPE_GPS_LONGITUDE EXIF_RATIONAL
+// Altitude reference
+// Use EXIFTAGTYPE_GPS_ALTITUDE_REF as the exif_tag_type (EXIF_BYTE)
+#define _ID_GPS_ALTITUDE_REF 0x0005
+#define EXIFTAGID_GPS_ALTITUDE_REF CONSTRUCT_TAGID(GPS_ALTITUDE_REF, _ID_GPS_ALTITUDE_REF)
+#define EXIFTAGTYPE_GPS_ALTITUDE_REF EXIF_BYTE
+// Altitude
+// Use EXIFTAGTYPE_GPS_ALTITUDE as the exif_tag_type (EXIF_RATIONAL)
+#define _ID_GPS_ALTITUDE 0x0006
+#define EXIFTAGID_GPS_ALTITUDE CONSTRUCT_TAGID(GPS_ALTITUDE, _ID_GPS_ALTITUDE)
+#define EXIFTAGTYPE_GPS_ALTITUE EXIF_RATIONAL
+// GPS time (atomic clock)
+// Use EXIFTAGTYPE_GPS_TIMESTAMP as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 3
+#define _ID_GPS_TIMESTAMP 0x0007
+#define EXIFTAGID_GPS_TIMESTAMP CONSTRUCT_TAGID(GPS_TIMESTAMP, _ID_GPS_TIMESTAMP)
+#define EXIFTAGTYPE_GPS_TIMESTAMP EXIF_RATIONAL
+// GPS Satellites
+// Use EXIFTAGTYPE_GPS_SATELLITES as the exif_tag_type (EXIF_ASCII)
+// Count can be anything.
+#define _ID_GPS_SATELLITES 0x0008
+#define EXIFTAGID_GPS_SATELLITES CONSTRUCT_TAGID(GPS_SATELLITES, _ID_GPS_SATELLITES)
+#define EXIFTAGTYPE_GPS_SATELLITES EXIF_ASCII
+// GPS Status
+// Use EXIFTAGTYPE_GPS_STATUS as the exif_tag_type (EXIF_ASCII)
+// It should be 2 characters long including the null-terminating character.
+// "A" - Measurement in progress
+// "V" - Measurement Interoperability
+// Other - Reserved
+#define _ID_GPS_STATUS 0x0009
+#define EXIFTAGID_GPS_STATUS CONSTRUCT_TAGID(GPS_STATUS, _ID_GPS_STATUS)
+#define EXIFTATTYPE_GPS_STATUS EXIF_ASCII
+// GPS Measure Mode
+// Use EXIFTAGTYPE_GPS_MEASUREMODE as the exif_tag_type (EXIF_ASCII)
+// It should be 2 characters long including the null-terminating character.
+// "2" - 2-dimensional measurement
+// "3" - 3-dimensional measurement
+// Other - Reserved
+#define _ID_GPS_MEASUREMODE 0x000a
+#define EXIFTAGID_GPS_MEASUREMODE CONSTRUCT_TAGID(GPS_MEASUREMODE, _ID_GPS_MEASUREMODE)
+#define EXIFTAGTYPE_GPS_MEASUREMODE EXIF_ASCII
+// GPS Measurement precision (DOP)
+// Use EXIFTAGTYPE_GPS_DOP as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 1
+#define _ID_GPS_DOP 0x000b
+#define EXIFTAGID_GPS_DOP CONSTRUCT_TAGID(GPS_DOP, _ID_GPS_DOP)
+#define EXIFTAGTYPE_GPS_DOP EXIF_RATIONAL
+// Speed Unit
+// Use EXIFTAGTYPE_GPS_SPEED_REF as the exif_tag_type (EXIF_ASCII)
+// It should be 2 characters long including the null-terminating character.
+// "K" - Kilometers per hour
+// "M" - Miles per hour
+// "N" - Knots
+// Other - Reserved
+#define _ID_GPS_SPEED_REF 0x000c
+#define EXIFTAGID_GPS_SPEED_REF CONSTRUCT_TAGID(GPS_SPEED_REF, _ID_GPS_SPEED_REF)
+#define EXIFTAGTYPE_GPS_SPEED_REF EXIF_ASCII
+// Speed of GPS receiver
+// Use EXIFTAGTYPE_GPS_SPEED as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 1
+#define _ID_GPS_SPEED 0x000d
+#define EXIFTAGID_GPS_SPEED CONSTRUCT_TAGID(GPS_SPEED, _ID_GPS_SPEED)
+#define EXIFTAGTYPE_GPS_SPEED EXIF_RATIONAL
+// Reference of direction of movement
+// Use EXIFTAGTYPE_GPS_TRACK_REF as the exif_tag_type (EXIF_ASCII)
+// It should be 2 characters long including the null-terminating character.
+// "T" - True direction
+// "M" - Magnetic direction
+// Other - Reserved
+#define _ID_GPS_TRACK_REF 0x000e
+#define EXIFTAGID_GPS_TRACK_REF CONSTRUCT_TAGID(GPS_TRACK_REF, _ID_GPS_TRACK_REF)
+#define EXIFTAGTYPE_GPS_TRACK_REF EXIF_ASCII
+// Direction of movement
+// Use EXIFTAGTYPE_GPS_TRACK as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 1
+#define _ID_GPS_TRACK 0x000f
+#define EXIFTAGID_GPS_TRACK CONSTRUCT_TAGID(GPS_TRACK, _ID_GPS_TRACK)
+#define EXIFTAGTYPE_GPS_TRACK EXIF_RATIONAL
+// Reference of direction of image
+// Use EXIFTAGTYPE_GPS_IMGDIRECTION_REF as the exif_tag_type (EXIF_ASCII)
+// It should be 2 characters long including the null-terminating character.
+// "T" - True direction
+// "M" - Magnetic direction
+// Other - Reserved
+#define _ID_GPS_IMGDIRECTION_REF 0x0010
+#define EXIFTAGID_GPS_IMGDIRECTION_REF CONSTRUCT_TAGID(GPS_IMGDIRECTION_REF, _ID_GPS_IMGDIRECTION_REF)
+#define EXIFTAGTYPE_GPS_IMGDIRECTION_REF EXIF_ASCII
+// Direction of image
+// Use EXIFTAGTYPE_GPS_IMGDIRECTION as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 1
+#define _ID_GPS_IMGDIRECTION 0x0011
+#define EXIFTAGID_GPS_IMGDIRECTION CONSTRUCT_TAGID(GPS_IMGDIRECTION, _ID_GPS_IMGDIRECTION)
+#define EXIFTAGTYPE_GPS_IMGDIRECTION EXIF_RATIONAL
+// Geodetic survey data used
+// Use EXIFTAGTYPE_GPS_MAPDATUM as the exif_tag_type (EXIF_ASCII)
+// Count can be any
+#define _ID_GPS_MAPDATUM 0x0012
+#define EXIFTAGID_GPS_MAPDATUM CONSTRUCT_TAGID(GPS_MAPDATUM, _ID_GPS_MAPDATUM)
+#define EXIFTAGTYPE_GPS_MAPDATUM EXIF_ASCII
+// Reference for latitude of destination
+// Use EXIFTAGTYPE_GPS_DESTLATITUDE_REF as the exif_tag_type (EXIF_ASCII)
+// It should be 2 characters long including the null-terminating character.
+// "N" - North latitude
+// "S" - South latitude
+// Other - Reserved
+#define _ID_GPS_DESTLATITUDE_REF 0x0013
+#define EXIFTAGID_GPS_DESTLATITUDE_REF CONSTRUCT_TAGID(GPS_DESTLATITUDE_REF, _ID_GPS_DESTLATITUDE_REF)
+#define EXIFTAGTYPE_GPS_DESTLATITUDE_REF EXIF_ASCII
+// Latitude of destination
+// Use EXIFTAGTYPE_GPS_DESTLATITUDE as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 3
+#define _ID_GPS_DESTLATITUDE 0x0014
+#define EXIFTAGID_GPS_DESTLATITUDE CONSTRUCT_TAGID(GPS_DESTLATITUDE, _ID_GPS_DESTLATITUDE)
+#define EXIFTAGTYPE_GPS_DESTLATITUDE EXIF_RATIONAL
+// Reference for longitude of destination
+// Use EXIFTAGTYPE_GPS_DESTLONGITUDE_REF as the exif_tag_type (EXIF_ASCII)
+// It should be 2 characters long including the null-terminating character.
+// "E" - East longitude
+// "W" - West longitude
+// Other - Reserved
+#define _ID_GPS_DESTLONGITUDE_REF 0x0015
+#define EXIFTAGID_GPS_DESTLONGITUDE_REF CONSTRUCT_TAGID(GPS_DESTLONGITUDE_REF, _ID_GPS_DESTLONGITUDE_REF)
+#define EXIFTAGTYPE_GPS_DESTLONGITUDE_REF EXIF_ASCII
+// Longitude of destination
+// Use EXIFTAGTYPE_GPS_DESTLONGITUDE as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 3
+#define _ID_GPS_DESTLONGITUDE 0x0016
+#define EXIFTAGID_GPS_DESTLONGITUDE CONSTRUCT_TAGID(GPS_DESTLONGITUDE, _ID_GPS_DESTLONGITUDE)
+#define EXIFTAGTYPE_GPS_DESTLONGITUDE EXIF_RATIONAL
+// Reference for bearing of destination
+// Use EXIFTAGTYPE_GPS_DESTBEARING_REF as the exif_tag_type (EXIF_ASCII)
+// It should be 2 characters long including the null-terminating character.
+// "T" - True direction
+// "M" - Magnetic direction
+// Other - Reserved
+#define _ID_GPS_DESTBEARING_REF 0x0017
+#define EXIFTAGID_GPS_DESTBEARING_REF CONSTRUCT_TAGID(GPS_DESTBEARING_REF, _ID_GPS_DESTBEARING_REF)
+#define EXIFTAGTYPE_GPS_DESTBEARING_REF EXIF_ASCII
+// Bearing of destination
+// Use EXIFTAGTYPE_GPS_DESTBEARING as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 1
+#define _ID_GPS_DESTBEARING 0x0018
+#define EXIFTAGID_GPS_DESTBEARING CONSTRUCT_TAGID(GPS_DESTBEARING, _ID_GPS_DESTBEARING)
+#define EXIFTAGTYPE_GPS_DESTBEARING EXIF_RATIONAL
+// Reference for distance to destination
+// Use EXIFTAGTYPE_GPS_DESTDISTANCE_REF as the exif_tag_type (EXIF_ASCII)
+// It should be 2 characters long including the null-terminating character.
+// "K" - Kilometers per hour
+// "M" - Miles per hour
+// "N" - Knots
+// Other - Reserved
+#define _ID_GPS_DESTDISTANCE_REF 0x0019
+#define EXIFTAGID_GPS_DESTDISTANCE_REF CONSTRUCT_TAGID(GPS_DESTDISTANCE_REF, _ID_GPS_DESTDISTANCE_REF)
+#define EXIFTAGTYPE_GPS_DESTDISTANCE_REF EXIF_ASCII
+// Distance to destination
+// Use EXIFTAGTYPE_GPS_DESTDISTANCE as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 1
+#define _ID_GPS_DESTDISTANCE 0x001a
+#define EXIFTAGID_GPS_DESTDISTANCE CONSTRUCT_TAGID(GPS_DESTDISTANCE, _ID_GPS_DESTDISTANCE)
+#define EXIFTAGTYPE_GPS_DESTDISTANCE EXIF_RATIONAL
+// Name of GPS processing method
+// Use EXIFTAGTYPE_GPS_PROCESSINGMETHOD as the exif_tag_type (EXIF_UNDEFINED)
+// Count can be any
+#define _ID_GPS_PROCESSINGMETHOD 0x001b
+#define EXIFTAGID_GPS_PROCESSINGMETHOD CONSTRUCT_TAGID(GPS_PROCESSINGMETHOD, _ID_GPS_PROCESSINGMETHOD)
+#define EXIFTAGTYPE_GPS_PROCESSINGMETHOD EXIF_UNDEFINED
+// Name of GPS area
+// Use EXIFTAGTYPE_GPS_AREAINFORMATION as the exif_tag_type (EXIF_UNDEFINED)
+// Count can be any
+#define _ID_GPS_AREAINFORMATION 0x001c
+#define EXIFTAGID_GPS_AREAINFORMATION CONSTRUCT_TAGID(GPS_AREAINFORMATION, _ID_GPS_AREAINFORMATION)
+#define EXIFTAGTYPE_GPS_AREAINFORMATION EXIF_UNDEFINED
+// GPS date
+// Use EXIFTAGTYPE_GPS_DATESTAMP as the exif_tag_type (EXIF_ASCII)
+// It should be 11 characters long including the null-terminating character.
+#define _ID_GPS_DATESTAMP 0x001d
+#define EXIFTAGID_GPS_DATESTAMP CONSTRUCT_TAGID(GPS_DATESTAMP, _ID_GPS_DATESTAMP)
+#define EXIFTAGTYPE_GPS_DATESTAMP EXIF_ASCII
+// GPS differential correction
+// Use EXIFTAGTYPE_GPS_DIFFERENTIAL as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+// 0 - Measurement without differential correction
+// 1 - Differential correction applied
+// Other - Reserved
+#define _ID_GPS_DIFFERENTIAL 0x001e
+#define EXIFTAGID_GPS_DIFFERENTIAL CONSTRUCT_TAGID(GPS_DIFFERENTIAL, _ID_GPS_DIFFERENTIAL)
+#define EXIFTAGTYPE_GPS_DIFFERENTIAL EXIF_SHORT
+// Image width
+// Use EXIFTAGTYPE_IMAGE_WIDTH as the exif_tag_type (EXIF_LONG)
+// Count should be 1
+#define _ID_IMAGE_WIDTH 0x0100
+#define EXIFTAGID_IMAGE_WIDTH CONSTRUCT_TAGID(IMAGE_WIDTH, _ID_IMAGE_WIDTH)
+#define EXIFTAGTYPE_IMAGE_WIDTH EXIF_LONG
+// Image height
+// Use EXIFTAGTYPE_IMAGE_LENGTH as the exif_tag_type (EXIF_SHORT_OR_LONG)
+// Count should be 1
+#define _ID_IMAGE_LENGTH 0x0101
+#define EXIFTAGID_IMAGE_LENGTH CONSTRUCT_TAGID(IMAGE_LENGTH, _ID_IMAGE_LENGTH)
+#define EXIFTAGTYPE_IMAGE_LENGTH EXIF_LONG
+// Number of bits per component
+// Use EXIFTAGTYPE_BITS_PER_SAMPLE as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_BITS_PER_SAMPLE 0x0102
+#define EXIFTAGID_BITS_PER_SAMPLE CONSTRUCT_TAGID(BITS_PER_SAMPLE, _ID_BITS_PER_SAMPLE)
+#define EXIFTAGTYPE_BITS_PER_SAMPLE EXIF_SHORT
+// Compression scheme
+// Use EXIFTAGTYPE_COMPRESSION as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_COMPRESSION 0x0103
+#define EXIFTAGID_COMPRESSION CONSTRUCT_TAGID(COMPRESSION, _ID_COMPRESSION)
+#define EXIFTAGTYPE_COMPRESSION EXIF_SHORT
+// Pixel composition
+// Use EXIFTAGTYPE_PHOTOMETRIC_INTERPRETATION as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_PHOTOMETRIC_INTERPRETATION 0x0106
+#define EXIFTAGID_PHOTOMETRIC_INTERPRETATION CONSTRUCT_TAGID(PHOTOMETRIC_INTERPRETATION, _ID_PHOTOMETRIC_INTERPRETATION)
+#define EXIFTAGTYPE_PHOTOMETRIC_INTERPRETATION EXIF_SHORT
+
+// Thresholding
+// Use EXIFTAGTYPE_THRESH_HOLDING as the exif_tag_type (EXIF_SHORT)
+//
+//1 = No dithering or halftoning
+//2 = Ordered dither or halftone
+//3 = Randomized dither
+#define _ID_THRESH_HOLDING 0x0107
+#define EXIFTAGID_THRESH_HOLDING CONSTRUCT_TAGID(THRESH_HOLDING, _ID_THRESH_HOLDING)
+#define EXIFTAGTYPE_THRESH_HOLDING EXIF_SHORT
+
+// Cell Width
+// Use EXIFTAGTYPE_CELL_WIDTH as the exif_tag_type (EXIF_SHORT)
+//
+#define _ID_CELL_WIDTH 0x0108
+#define EXIFTAGID_CELL_WIDTH CONSTRUCT_TAGID(CELL_WIDTH, _ID_CELL_WIDTH)
+#define EXIFTAGTYPE_CELL_WIDTH EXIF_SHORT
+// Cell Height
+// Use EXIFTAGTYPE_CELL_HEIGHT as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_CELL_HEIGHT 0x0109
+#define EXIFTAGID_CELL_HEIGHT CONSTRUCT_TAGID(CELL_HEIGHT, _ID_CELL_HEIGHT)
+#define EXIFTAGTYPE_CELL_HEIGHT EXIF_SHORT
+// Fill Order
+// Use EXIFTAGTYPE_FILL_ORDER as the exif_tag_type (EXIF_SHORT)
+// 1 = Normal
+// 2 = Reversed
+#define _ID_FILL_ORDER 0x010A
+#define EXIFTAGID_FILL_ORDER CONSTRUCT_TAGID(FILL_ORDER, _ID_FILL_ORDER)
+#define EXIFTAGTYPE_FILL_ORDER EXIF_SHORT
+
+// DOCUMENT NAME
+// Use EXIFTAGTYPE_DOCUMENT_NAME as the exif_tag_type (EXIF_ASCII)
+//
+#define _ID_DOCUMENT_NAME 0x010D
+#define EXIFTAGID_DOCUMENT_NAME CONSTRUCT_TAGID(DOCUMENT_NAME, _ID_DOCUMENT_NAME)
+#define EXIFTAGTYPE_DOCUMENT_NAME EXIF_ASCII
+
+// Image title
+// Use EXIFTAGTYPE_IMAGE_DESCRIPTION as the exif_tag_type (EXIF_ASCII)
+// Count can be any
+#define _ID_IMAGE_DESCRIPTION 0x010e
+#define EXIFTAGID_IMAGE_DESCRIPTION CONSTRUCT_TAGID(IMAGE_DESCRIPTION, _ID_IMAGE_DESCRIPTION)
+#define EXIFTAGTYPE_IMAGE_DESCRIPTION EXIF_ASCII
+// Image input equipment manufacturer
+// Use EXIFTAGTYPE_MAKE as the exif_tag_type (EXIF_ASCII)
+// Count can be any
+#define _ID_MAKE 0x010f
+#define EXIFTAGID_MAKE CONSTRUCT_TAGID(MAKE, _ID_MAKE)
+#define EXIFTAGTYPE_MAKE EXIF_ASCII
+// Image input equipment model
+// Use EXIFTAGTYPE_MODEL as the exif_tag_type (EXIF_ASCII)
+// Count can be any
+#define _ID_MODEL 0x0110
+#define EXIFTAGID_MODEL CONSTRUCT_TAGID(MODEL, _ID_MODEL)
+#define EXIFTAGTYPE_MODEL EXIF_ASCII
+// Image data location
+// Use EXIFTAGTYPE_STRIP_OFFSETS as the exif_tag_type (EXIF_LONG)
+// Count = StripsPerImage when PlanarConfiguration = 1
+// = SamplesPerPixel * StripsPerImage when PlanarConfiguration = 2
+#define _ID_STRIP_OFFSETS 0x0111
+#define EXIFTAGID_STRIP_OFFSETS CONSTRUCT_TAGID(STRIP_OFFSETS, _ID_STRIP_OFFSETS)
+#define EXIFTAGTYPE_STRIP_OFFSETS EXIF_LONG
+// Orientation of image
+// Use EXIFTAGTYPE_ORIENTATION as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_ORIENTATION 0x0112
+#define EXIFTAGID_ORIENTATION CONSTRUCT_TAGID(ORIENTATION, _ID_ORIENTATION)
+#define EXIFTAGTYPE_ORIENTATION EXIF_SHORT
+// Number of components
+// Use EXIFTAGTYPE_SAMPLES_PER_PIXEL as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_SAMPLES_PER_PIXEL 0x0115
+#define EXIFTAGID_SAMPLES_PER_PIXEL CONSTRUCT_TAGID(SAMPLES_PER_PIXEL, _ID_SAMPLES_PER_PIXEL)
+#define EXIFTAGTYPE_SAMPLES_PER_PIXEL EXIF_SHORT
+// Number of rows per strip
+// Use EXIFTAGTYPE_ROWS_PER_STRIP as the exif_tag_type (EXIF_LONG)
+// Count should be 1
+#define _ID_ROWS_PER_STRIP 0x0116
+#define EXIFTAGID_ROWS_PER_STRIP CONSTRUCT_TAGID(ROWS_PER_STRIP, _ID_ROWS_PER_STRIP)
+#define EXIFTAGTYPE_ROWS_PER_STRIP EXIF_LONG
+// Bytes per compressed strip
+// Use EXIFTAGTYPE_STRIP_BYTE_COUNTS as the exif_tag_type (EXIF_LONG)
+// Count = StripsPerImage when PlanarConfiguration = 1
+// = SamplesPerPixel * StripsPerImage when PlanarConfiguration = 2
+#define _ID_STRIP_BYTE_COUNTS 0x0117
+#define EXIFTAGID_STRIP_BYTE_COUNTS CONSTRUCT_TAGID(STRIP_BYTE_COUNTS, _ID_STRIP_BYTE_COUNTS)
+#define EXIFTAGTYPE_STRIP_BYTE_COUNTS EXIF_LONG
+// MinSampleValue
+// Use EXIFTAGTYPE_MIN_SAMPLE_VALUE as the exif_tag_type (EXIF_SHORT)
+#define _ID_MIN_SAMPLE_VALUE 0x0118
+#define EXIFTAGID_MIN_SAMPLE_VALUE CONSTRUCT_TAGID(MIN_SAMPLE_VALUE, _ID_MIN_SAMPLE_VALUE)
+#define EXIFTAGTYPE_MIN_SAMPLE_VALUE EXIF_SHORT
+// MaxSampleValue
+// Use EXIFTAGTYPE_MAX_SAMPLE_VALUE as the exif_tag_type (EXIF_SHORT)
+#define _ID_MAX_SAMPLE_VALUE 0x0119
+#define EXIFTAGID_MAX_SAMPLE_VALUE CONSTRUCT_TAGID(MAX_SAMPLE_VALUE, _ID_MAX_SAMPLE_VALUE)
+#define EXIFTAGTYPE_MAX_SAMPLE_VALUE EXIF_SHORT
+
+// Image resolution in width direction
+// Use EXIFTAGTYPE_X_RESOLUTION as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 1
+#define _ID_X_RESOLUTION 0x011a
+#define EXIFTAGID_X_RESOLUTION CONSTRUCT_TAGID(X_RESOLUTION, _ID_X_RESOLUTION)
+#define EXIFTAGTYPE_X_RESOLUTION EXIF_RATIONAL
+// Image resolution in height direction
+// Use EXIFTAGTYPE_Y_RESOLUTION as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 1
+#define _ID_Y_RESOLUTION 0x011b
+#define EXIFTAGID_Y_RESOLUTION CONSTRUCT_TAGID(Y_RESOLUTION, _ID_Y_RESOLUTION)
+#define EXIFTAGTYPE_Y_RESOLUTION EXIF_RATIONAL
+// Image data arrangement
+// Use EXIFTAGTYPE_PLANAR_CONFIGURATION as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_PLANAR_CONFIGURATION 0x011c
+#define EXIFTAGID_PLANAR_CONFIGURATION CONSTRUCT_TAGID(PLANAR_CONFIGURATION, _ID_PLANAR_CONFIGURATION)
+#define EXIFTAGTYPE_PLANAR_CONFIGURATION EXIF_SHORT
+// PageName
+// Use EXIFTAGTYPE_PAGE_NAME as the exif_tag_type (EXIF_ASCII)
+// Count should be 1
+#define _ID_PAGE_NAME 0x011d
+#define EXIFTAGID_PAGE_NAME CONSTRUCT_TAGID(PAGE_NAME, _ID_PAGE_NAME)
+#define EXIFTAGTYPE_PAGE_NAME EXIF_ASCII
+// XPosition
+// Use EXIFTAGTYPE_X_POSITION as the exif_tag_type (EXIF_RATIONAL)
+//
+#define _ID_X_POSITION 0x011e
+#define EXIFTAGID_X_POSITION CONSTRUCT_TAGID(X_POSITION, _ID_X_POSITION)
+#define EXIFTAGTYPE_X_POSITION EXIF_RATIONAL
+// YPosition
+// Use EXIFTAGTYPE_Y_POSITION as the exif_tag_type (EXIF_RATIONAL)
+//
+#define _ID_Y_POSITION 0x011f
+#define EXIFTAGID_Y_POSITION CONSTRUCT_TAGID(Y_POSITION, _ID_Y_POSITION)
+#define EXIFTAGTYPE_Y_POSITION EXIF_RATIONAL
+
+// FREE_OFFSET
+// Use EXIFTAGTYPE_FREE_OFFSET as the exif_tag_type (EXIF_LONG)
+//
+#define _ID_FREE_OFFSET 0x0120
+#define EXIFTAGID_FREE_OFFSET CONSTRUCT_TAGID(FREE_OFFSET, _ID_FREE_OFFSET)
+#define EXIFTAGTYPE_FREE_OFFSET EXIF_LONG
+// FREE_BYTE_COUNTS
+// Use EXIFTAGTYPE_FREE_BYTE_COUNTS as the exif_tag_type (EXIF_LONG)
+//
+#define _ID_FREE_BYTE_COUNTS 0x0121
+#define EXIFTAGID_FREE_BYTE_COUNTS CONSTRUCT_TAGID(FREE_BYTE_COUNTS, _ID_FREE_BYTE_COUNTS)
+#define EXIFTAGTYPE_FREE_BYTE_COUNTS EXIF_LONG
+
+// GrayResponseUnit
+// Use EXIFTAGTYPE_GRAY_RESPONSE_UNIT as the exif_tag_type (EXIF_SHORT)
+//
+#define _ID_GRAY_RESPONSE_UNIT 0x0122
+#define EXIFTAGID_GRAY_RESPONSE_UNIT CONSTRUCT_TAGID(GRAY_RESPONSE_UNIT, _ID_GRAY_RESPONSE_UNIT)
+#define EXIFTAGTYPE_GRAY_RESPONSE_UNIT EXIF_SHORT
+// GrayResponseCurve
+// Use EXIFTAGTYPE_GRAY_RESPONSE_CURVE as the exif_tag_type (EXIF_SHORT)
+//
+#define _ID_GRAY_RESPONSE_CURVE 0x0123
+#define EXIFTAGID_GRAY_RESPONSE_CURVE CONSTRUCT_TAGID(GRAY_RESPONSE_CURVE , _ID_GRAY_RESPONSE_CURVE )
+#define EXIFTAGTYPE_GRAY_RESPONSE_CURVE EXIF_SHORT
+
+// T4_OPTION
+// Use EXIFTAGTYPE_T4_OPTION as the exif_tag_type (EXIF_LONG)
+//
+#define _ID_T4_OPTION 0x0124
+#define EXIFTAGID_T4_OPTION CONSTRUCT_TAGID(T4_OPTION, _ID_T4_OPTION)
+#define EXIFTAGTYPE_T4_OPTION EXIF_LONG
+// T6_OPTION
+// Use EXIFTAGTYPE_T6_OPTION as the exif_tag_type (EXIF_LONG)
+//
+#define _ID_T6_OPTION 0x0125
+#define EXIFTAGID_T6_OPTION CONSTRUCT_TAGID(T6_OPTION, _ID_T6_OPTION)
+#define EXIFTAGTYPE_T6_OPTION EXIF_LONG
+
+// Unit of X and Y resolution
+// Use EXIFTAGTYPE_RESOLUTION_UNIT as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_RESOLUTION_UNIT 0x0128
+#define EXIFTAGID_RESOLUTION_UNIT CONSTRUCT_TAGID(RESOLUTION_UNIT, _ID_RESOLUTION_UNIT)
+#define EXIFTAGTYPE_RESOLUTION_UNIT EXIF_SHORT
+
+// Page Number
+// Use EXIFTAGTYPE_PAGE_NUMBER as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_PAGE_NUMBER 0x0129
+#define EXIFTAGID_PAGE_NUMBER CONSTRUCT_TAGID(PAGE_NUMBER, _ID_PAGE_NUMBER)
+#define EXIFTAGTYPE_PAGE_NUMBER EXIF_SHORT
+// Transfer function
+// Use EXIFTAGTYPE_TRANSFER_FUNCTION as the exif_tag_type (EXIF_SHORT)
+// Count should be 3*256
+#define _ID_TRANSFER_FUNCTION 0x012d
+#define EXIFTAGID_TRANSFER_FUNCTION CONSTRUCT_TAGID(TRANSFER_FUNCTION, _ID_TRANSFER_FUNCTION)
+#define EXIFTAGTYPE_TRANSFER_FUNCTION EXIF_SHORT
+// Software used
+// Use EXIFTAGTYPE_SOFTWARE as the exif_tag_type (EXIF_ASCII)
+// Count can be any
+#define _ID_SOFTWARE 0x0131
+#define EXIFTAGID_SOFTWARE CONSTRUCT_TAGID(SOFTWARE, _ID_SOFTWARE)
+#define EXIFTAGTYPE_SOFTWARE EXIF_ASCII
+// File change date and time
+// Use EXIFTAGTYPE_DATE_TIME as the exif_tag_type (EXIF_ASCII)
+// Count should be 20
+#define _ID_DATE_TIME 0x0132
+#define EXIFTAGID_DATE_TIME CONSTRUCT_TAGID(DATE_TIME, _ID_DATE_TIME)
+#define EXIFTAGTYPE_DATE_TIME EXIF_ASCII
+// ARTIST, person who created this image
+// Use EXIFTAGTYPE_ARTIST as the exif_tag_type (EXIF_ASCII)
+// Count can be any
+#define _ID_ARTIST 0x013b
+#define EXIFTAGID_ARTIST CONSTRUCT_TAGID(ARTIST, _ID_ARTIST)
+#define EXIFTAGTYPE_ARTIST EXIF_ASCII
+// Host Computer Name
+// Use EXIFTAGTYPE_HOST_COMPUTER as the exif_tag_type (EXIF_ASCII)
+// Count can be any
+#define _ID_HOST_COMPUTER 0x013c
+#define EXIFTAGID_HOST_COMPUTER CONSTRUCT_TAGID(HOST_COMPUTER , _ID_HOST_COMPUTER )
+#define EXIFTAGTYPE_HOST_COMPUTER EXIF_ASCII
+// Predictor
+// Use EXIFTAGTYPE_PREDICTOR as the exif_tag_type (EXIF_SHORT)
+// Count can be any
+#define _ID_PREDICTOR 0x013d
+#define EXIFTAGID_PREDICTOR CONSTRUCT_TAGID(PREDICTOR , _ID_PREDICTOR )
+#define EXIFTAGTYPE_PREDICTOR EXIF_SHORT
+// White point chromaticity
+// Use EXIFTAGTYPE_WHITE_POINT as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 2
+#define _ID_WHITE_POINT 0x013e
+#define EXIFTAGID_WHITE_POINT CONSTRUCT_TAGID(WHITE_POINT, _ID_WHITE_POINT)
+#define EXIFTAGTYPE_WHITE_POINT EXIF_RATIONAL
+// Chromaticities of primaries
+// Use EXIFTAGTYPE_PRIMARY_CHROMATICITIES as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 6
+#define _ID_PRIMARY_CHROMATICITIES 0x013f
+#define EXIFTAGID_PRIMARY_CHROMATICITIES CONSTRUCT_TAGID(PRIMARY_CHROMATICITIES, _ID_PRIMARY_CHROMATICITIES)
+#define EXIFTAGTYPE_PRIMARY_CHROMATICITIES EXIF_RATIONAL
+
+// COLOR_MAP
+// Use EXIFTAGTYPE_COLOR_MAP as the exif_tag_type (EXIF_SHORT)
+// Count should be 6
+#define _ID_COLOR_MAP 0x0140
+#define EXIFTAGID_COLOR_MAP CONSTRUCT_TAGID(COLOR_MAP, _ID_COLOR_MAP)
+#define EXIFTAGTYPE_COLOR_MAP EXIF_SHORT
+// HALFTONE_HINTS
+// Use EXIFTAGTYPE_HALFTONE_HINTS as the exif_tag_type (EXIF_SHORT)
+// Count should be 6
+#define _ID_HALFTONE_HINTS 0x0141
+#define EXIFTAGID_HALFTONE_HINTS CONSTRUCT_TAGID(HALFTONE_HINTS, _ID_HALFTONE_HINTS)
+#define EXIFTAGTYPE_HALFTONE_HINTS EXIF_SHORT
+
+// TILE_WIDTH
+// Use EXIFTAGTYPE_TILE_WIDTH as the exif_tag_type (EXIF_LONG)
+// Count should be 6
+#define _ID_TILE_WIDTH 0x0142
+#define EXIFTAGID_TILE_WIDTH CONSTRUCT_TAGID(TILE_WIDTH, _ID_TILE_WIDTH)
+#define EXIFTAGTYPE_TILE_WIDTH EXIF_LONG
+// TILE_LENGTH
+// Use EXIFTAGTYPE_TILE_LENGTH as the exif_tag_type (EXIF_LONG)
+// Count should be 6
+#define _ID_TILE_LENGTH 0x0143
+#define EXIFTAGID_TILE_LENGTH CONSTRUCT_TAGID(TILE_LENGTH , _ID_TILE_LENGTH )
+#define EXIFTAGTYPE_TILE_LENGTH EXIF_LONG
+// TILE_OFFSET
+// Use EXIFTAGTYPE_TILE_OFFSET as the exif_tag_type (EXIF_LONG)
+//
+#define _ID_TILE_OFFSET 0x0144
+#define EXIFTAGID_TILE_OFFSET CONSTRUCT_TAGID(TILE_OFFSET , _ID_TILE_OFFSET )
+#define EXIFTAGTYPE_TILE_OFFSET EXIF_LONG
+// tile Byte Counts
+// Use EXIFTAGTYPE_TILE_OFFSET as the exif_tag_type (EXIF_LONG)
+//
+#define _ID_TILE_BYTE_COUNTS 0x0145
+#define EXIFTAGID_TILE_BYTE_COUNTS CONSTRUCT_TAGID(TILE_BYTE_COUNTS , _ID_TILE_BYTE_COUNTS )
+#define EXIFTAGTYPE_TILE_BYTE_COUNTS EXIF_LONG
+
+// INK_SET
+// Use EXIFTAGTYPE_TILE_LENGTH as the exif_tag_type (EXIF_SHORT)
+// Count should be 6
+#define _ID_INK_SET 0x014c
+#define EXIFTAGID_INK_SET CONSTRUCT_TAGID(INK_SET , _ID_INK_SET )
+#define EXIFTAGTYPE_INK_SET EXIF_SHORT
+// INK_NAMES
+// Use EXIFTAGTYPE_INK_NAMES as the exif_tag_type (EXIF_ASCII)
+// Count should be 6
+#define _ID_INK_NAMES 0x014D
+#define EXIFTAGID_INK_NAMES CONSTRUCT_TAGID(INK_NAMES , _ID_INK_NAMES)
+#define EXIFTAGTYPE_INK_NAMES EXIF_ASCII
+// NUMBER_OF_INKS
+// Use EXIFTAGTYPE_NUMBER_OF_INKS as the exif_tag_type (EXIF_SHORT)
+// Count should be 6
+#define _ID_NUMBER_OF_INKS 0x014e
+#define EXIFTAGID_NUMBER_OF_INKS CONSTRUCT_TAGID(NUMBER_OF_INKS , _ID_NUMBER_OF_INKS )
+#define EXIFTAGTYPE_NUMBER_OF_INKS EXIF_SHORT
+
+// DOT_RANGE
+// Use EXIFTAGTYPE_DOT_RANGE as the exif_tag_type (EXIF_ASCII)
+// Count should be 6
+#define _ID_DOT_RANGE 0x0150
+#define EXIFTAGID_DOT_RANGE CONSTRUCT_TAGID(DOT_RANGE , _ID_DOT_RANGE )
+#define EXIFTAGTYPE_DOT_RANGE EXIF_ASCII
+
+// TARGET_PRINTER
+// Use EXIFTAGTYPE_TARGET_PRINTER as the exif_tag_type (EXIF_ASCII)
+// Count should be 6
+#define _ID_TARGET_PRINTER 0x0151
+#define EXIFTAGID_TARGET_PRINTER CONSTRUCT_TAGID(TARGET_PRINTER , _ID_TARGET_PRINTER)
+#define EXIFTAGTYPE_TARGET_PRINTER EXIF_ASCII
+// EXTRA_SAMPLES
+// Use EXIFTAGTYPE_EXTRA_SAMPLES as the exif_tag_type (EXIF_SHORT)
+// Count should be 6
+#define _ID_EXTRA_SAMPLES 0x0152
+#define EXIFTAGID_EXTRA_SAMPLES CONSTRUCT_TAGID(EXTRA_SAMPLES , _ID_EXTRA_SAMPLES )
+#define EXIFTAGTYPE_EXTRA_SAMPLES EXIF_SHORT
+
+// SAMPLE_FORMAT
+// Use EXIFTAGTYPE_SAMPLE_FORMAT as the exif_tag_type (EXIF_SHORT)
+// Count should be 6
+#define _ID_SAMPLE_FORMAT 0x0153
+#define EXIFTAGID_SAMPLE_FORMAT CONSTRUCT_TAGID(SAMPLE_FORMAT , _ID_SAMPLE_FORMAT )
+#define EXIFTAGTYPE_SAMPLE_FORMAT EXIF_SHORT
+
+// Table of values that extends the range of the transfer function.
+// Use EXIFTAGTYPE_TRANSFER_RANGE as the exif_tag_type (EXIF_SHORT)
+// Count should be 6
+#define _ID_TRANSFER_RANGE 0x0156
+#define EXIFTAGID_TRANSFER_RANGE CONSTRUCT_TAGID(TRANSFER_RANGE , _ID_TRANSFER_RANGE )
+#define EXIFTAGTYPE_TRANSFER_RANGE EXIF_SHORT
+
+// JPEG compression process.
+// Use EXIFTAGTYPE_JPEG_PROC as the exif_tag_type (EXIF_SHORT)
+//
+#define _ID_JPEG_PROC 0x0200
+#define EXIFTAGID_JPEG_PROC CONSTRUCT_TAGID(JPEG_PROC , _ID_JPEG_PROC )
+#define EXIFTAGTYPE_JPEG_PROC EXIF_SHORT
+
+
+// Offset to JPEG SOI
+// Use EXIFTAGTYPE_JPEG_INTERCHANGE_FORMAT as the exif_tag_type (EXIF_LONG)
+// Count is undefined
+#define _ID_JPEG_INTERCHANGE_FORMAT 0x0201
+#define EXIFTAGID_JPEG_INTERCHANGE_FORMAT CONSTRUCT_TAGID(JPEG_INTERCHANGE_FORMAT, _ID_JPEG_INTERCHANGE_FORMAT)
+#define EXIFTAGTYPE_JPEG_INTERCHANGE_FORMAT EXIF_LONG
+// Bytes of JPEG data
+// Use EXIFTAGTYPE_JPEG_INTERCHANGE_FORMAT_LENGTH as the exif_tag_type (EXIF_LONG)
+// Count is undefined
+#define _ID_JPEG_INTERCHANGE_FORMAT_LENGTH 0x0202
+#define EXIFTAGID_JPEG_INTERCHANGE_FORMAT_LENGTH CONSTRUCT_TAGID(JPEG_INTERCHANGE_FORMAT_LENGTH, _ID_JPEG_INTERCHANGE_FORMAT_LENGTH)
+#define EXIFTAGTYPE_JPEG_INTERCHANGE_FORMAT_LENGTH EXIF_LONG
+
+// Length of the restart interval.
+// Use EXIFTAGTYPE_JPEG_RESTART_INTERVAL as the exif_tag_type (EXIF_SHORT)
+// Count is undefined
+#define _ID_JPEG_RESTART_INTERVAL 0x0203
+#define EXIFTAGID_JPEG_RESTART_INTERVAL CONSTRUCT_TAGID(JPEG_RESTART_INTERVAL, _ID_JPEG_RESTART_INTERVAL)
+#define EXIFTAGTYPE_JPEG_RESTART_INTERVAL EXIF_SHORT
+
+// JPEGLosslessPredictors
+// Use EXIFTAGTYPE_JPEG_LOSSLESS_PREDICTORS as the exif_tag_type (EXIF_SHORT)
+// Count is undefined
+#define _ID_JPEG_LOSSLESS_PREDICTORS 0x0205
+#define EXIFTAGID_JPEG_LOSSLESS_PREDICTORS CONSTRUCT_TAGID(JPEG_LOSSLESS_PREDICTORS, _ID_JPEG_LOSSLESS_PREDICTORS)
+#define EXIFTAGTYPE_JPEG_LOSSLESS_PREDICTORS EXIF_SHORT
+
+// JPEGPointTransforms
+// Use EXIFTAGTYPE_JPEG_POINT_TRANSFORMS as the exif_tag_type (EXIF_SHORT)
+// Count is undefined
+#define _ID_JPEG_POINT_TRANSFORMS 0x0206
+#define EXIFTAGID_JPEG_POINT_TRANSFORMS CONSTRUCT_TAGID(JPEG_POINT_TRANSFORMS, _ID_JPEG_POINT_TRANSFORMS)
+#define EXIFTAGTYPE_JPEG_POINT_TRANSFORMS EXIF_SHORT
+
+// JPEG_Q_TABLES
+// Use EXIFTAGTYPE_JPEG_Q_TABLES as the exif_tag_type (EXIF_LONG)
+// Count is undefined
+#define _ID_JPEG_Q_TABLES 0x0207
+#define EXIFTAGID_JPEG_Q_TABLES CONSTRUCT_TAGID(JPEG_Q_TABLES, _ID_JPEG_Q_TABLES)
+#define EXIFTAGTYPE_JPEG_Q_TABLES EXIF_LONG
+// JPEG_DC_TABLES
+// Use EXIFTAGTYPE_JPEG_DC_TABLES as the exif_tag_type (EXIF_LONG)
+// Count is undefined
+#define _ID_JPEG_DC_TABLES 0x0208
+#define EXIFTAGID_JPEG_DC_TABLES CONSTRUCT_TAGID(JPEG_DC_TABLES, _ID_JPEG_DC_TABLES)
+#define EXIFTAGTYPE_JPEG_DC_TABLES EXIF_LONG
+// JPEG_AC_TABLES
+// Use EXIFTAGTYPE_JPEG_AC_TABLES as the exif_tag_type (EXIF_LONG)
+// Count is undefined
+#define _ID_JPEG_AC_TABLES 0x0209
+#define EXIFTAGID_JPEG_AC_TABLES CONSTRUCT_TAGID(JPEG_AC_TABLES, _ID_JPEG_AC_TABLES)
+#define EXIFTAGTYPE_JPEG_AC_TABLES EXIF_LONG
+
+// Color space transformation matrix coefficients
+// Use EXIFTAGTYPE_YCBCR_COEFFICIENTS as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 3
+#define _ID_YCBCR_COEFFICIENTS 0x0211
+#define EXIFTAGID_YCBCR_COEFFICIENTS CONSTRUCT_TAGID(YCBCR_COEFFICIENTS, _ID_YCBCR_COEFFICIENTS)
+#define EXIFTAGTYPE_YCBCR_COEFFICIENTS EXIF_RATIONAL
+// Subsampling ratio of Y to C
+// Use EXIFTAGTYPE_YCBCR_SUB_SAMPLING as the exif_tag_type (EXIF_SHORT)
+// Count should be 2
+#define _ID_YCBCR_SUB_SAMPLING 0x0212
+#define EXIFTAGID_YCBCR_SUB_SAMPLING CONSTRUCT_TAGID(YCBCR_SUB_SAMPLING, _ID_YCBCR_SUB_SAMPLING)
+#define EXIFTAGTYPE_YCBCR_SUB_SAMPLING EXIF_SHORT
+// Y and C positioning
+// Use EXIFTAGTYPE_YCBCR_POSITIONING as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_YCBCR_POSITIONING 0x0213
+#define EXIFTAGID_YCBCR_POSITIONING CONSTRUCT_TAGID(YCBCR_POSITIONING, _ID_YCBCR_POSITIONING)
+#define EXIFTAGTYPE_YCBCR_POSITIONING EXIF_SHORT
+// Pair of black and white reference values
+// Use EXIFTAGTYPE_REFERENCE_BLACK_WHITE as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 6
+#define _ID_REFERENCE_BLACK_WHITE 0x0214
+#define EXIFTAGID_REFERENCE_BLACK_WHITE CONSTRUCT_TAGID(REFERENCE_BLACK_WHITE, _ID_REFERENCE_BLACK_WHITE)
+#define EXIFTAGTYPE_REFERENCE_BLACK_WHITE EXIF_RATIONAL
+// GAMMA
+// Use EXIFTAGTYPE_GAMMA as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 6
+#define _ID_GAMMA 0x0301
+#define EXIFTAGID_GAMMA CONSTRUCT_TAGID(GAMMA, _ID_GAMMA)
+#define EXIFTAGTYPE_GAMMA EXIF_RATIONAL
+// Null-terminated character string that identifies an ICC profile.
+// Use EXIFTAGTYPE_ICC_PROFILE_DESCRIPTOR as the exif_tag_type (EXIF_ASCII)
+// Count should be 6
+#define _ID_ICC_PROFILE_DESCRIPTOR 0x0302
+#define EXIFTAGID_ICC_PROFILE_DESCRIPTOR CONSTRUCT_TAGID(ICC_PROFILE_DESCRIPTOR, _ID_ICC_PROFILE_DESCRIPTOR)
+#define EXIFTAGTYPE_ICC_PROFILE_DESCRIPTOR EXIF_ASCII
+// SRGB_RENDERING_INTENT
+// Use EXIFTAGTYPE_SRGB_RENDERING_INTENT as the exif_tag_type (EXIF_BYTE)
+// Count should be 6
+#define _ID_SRGB_RENDERING_INTENT 0x0303
+#define EXIFTAGID_SRGB_RENDERING_INTENT CONSTRUCT_TAGID(SRGB_RENDERING_INTENT, _ID_SRGB_RENDERING_INTENT)
+#define EXIFTAGTYPE_SRGB_RENDERING_INTENT EXIF_BYTE
+
+// Null-terminated character string that specifies the title of the image.
+// Use EXIFTAGTYPE_IMAGE_TITLE as the exif_tag_type (EXIF_ASCII )
+//
+#define _ID_IMAGE_TITLE 0x0320
+#define EXIFTAGID_IMAGE_TITLE CONSTRUCT_TAGID(IMAGE_TITLE, _ID_IMAGE_TITLE)
+#define EXIFTAGTYPE_IMAGE_TITLE EXIF_ASCII
+
+// Copyright holder
+// Use EXIFTAGTYPE_COPYRIGHT as the exif_tag_type (EXIF_ASCII)
+// Count can be any
+#define _ID_COPYRIGHT 0x8298
+#define EXIFTAGID_COPYRIGHT CONSTRUCT_TAGID(COPYRIGHT, _ID_COPYRIGHT)
+#define EXIFTAGTYPE_COPYRIGHT EXIF_ASCII
+// Old Subfile Type
+// Use EXIFTAGTYPE_NEW_SUBFILE_TYPE as the exif_tag_type (EXIF_SHORT)
+// Count can be any
+#define _ID_NEW_SUBFILE_TYPE 0x00fe
+#define EXIFTAGID_NEW_SUBFILE_TYPE CONSTRUCT_TAGID(NEW_SUBFILE_TYPE, _ID_NEW_SUBFILE_TYPE)
+#define EXIFTAGTYPE_NEW_SUBFILE_TYPE EXIF_SHORT
+
+// New Subfile Type
+// Use EXIFTAGTYPE_NEW_SUBFILE_TYPE as the exif_tag_type (EXIF_LONG)
+// Count can be any
+#define _ID_SUBFILE_TYPE 0x00ff
+#define EXIFTAGID_SUBFILE_TYPE CONSTRUCT_TAGID(SUBFILE_TYPE, _ID_SUBFILE_TYPE)
+#define EXIFTAGTYPE_SUBFILE_TYPE EXIF_LONG
+
+// Image width (of thumbnail)
+// Use EXIFTAGTYPE_TN_IMAGE_WIDTH as the exif_tag_type (EXIF_LONG)
+// Count should be 1
+#define _ID_TN_IMAGE_WIDTH 0x0100
+#define EXIFTAGID_TN_IMAGE_WIDTH CONSTRUCT_TAGID(TN_IMAGE_WIDTH, _ID_TN_IMAGE_WIDTH)
+#define EXIFTAGTYPE_TN_IMAGE_WIDTH EXIF_LONG
+// Image height (of thumbnail)
+// Use EXIFTAGTYPE_TN_IMAGE_LENGTH as the exif_tag_type (EXIF_SHORT_OR_LONG)
+// Count should be 1
+#define _ID_TN_IMAGE_LENGTH 0x0101
+#define EXIFTAGID_TN_IMAGE_LENGTH CONSTRUCT_TAGID(TN_IMAGE_LENGTH, _ID_TN_IMAGE_LENGTH)
+#define EXIFTAGTYPE_TN_IMAGE_LENGTH EXIF_LONG
+// Number of bits per component (of thumbnail)
+// Use EXIFTAGTYPE_TN_BITS_PER_SAMPLE as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_TN_BITS_PER_SAMPLE 0x0102
+#define EXIFTAGID_TN_BITS_PER_SAMPLE CONSTRUCT_TAGID(TN_BITS_PER_SAMPLE, _ID_TN_BITS_PER_SAMPLE)
+#define EXIFTAGTYPE_TN_BITS_PER_SAMPLE EXIF_SHORT
+// Compression scheme (of thumbnail)
+// Use EXIFTAGTYPE_TN_COMPRESSION as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_TN_COMPRESSION 0x0103
+#define EXIFTAGID_TN_COMPRESSION CONSTRUCT_TAGID(TN_COMPRESSION, _ID_TN_COMPRESSION)
+#define EXIFTAGTYPE_TN_COMPRESSION EXIF_SHORT
+// Pixel composition (of thumbnail)
+// Use EXIFTAGTYPE_TN_PHOTOMETRIC_INTERPRETATION as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_TN_PHOTOMETRIC_INTERPRETATION 0x0106
+#define EXIFTAGID_TN_PHOTOMETRIC_INTERPRETATION CONSTRUCT_TAGID(TN_PHOTOMETRIC_INTERPRETATION, _ID_TN_PHOTOMETRIC_INTERPRETATION)
+#define EXIFTAGTYPE_TN_PHOTOMETRIC_INTERPRETATION EXIF_SHORT
+// Image title (of thumbnail)
+// Use EXIFTAGTYPE_TN_IMAGE_DESCRIPTION as the exif_tag_type (EXIF_ASCII)
+// Count can be any
+#define _ID_TN_IMAGE_DESCRIPTION 0x010e
+#define EXIFTAGID_TN_IMAGE_DESCRIPTION CONSTRUCT_TAGID(TN_IMAGE_DESCRIPTION, _ID_TN_IMAGE_DESCRIPTION)
+#define EXIFTAGTYPE_TN_IMAGE_DESCRIPTION EXIF_ASCII
+// Image input equipment manufacturer (of thumbnail)
+// Use EXIFTAGTYPE_TN_MAKE as the exif_tag_type (EXIF_ASCII)
+// Count can be any
+#define _ID_TN_MAKE 0x010f
+#define EXIFTAGID_TN_MAKE CONSTRUCT_TAGID(TN_MAKE, _ID_TN_MAKE)
+#define EXIFTAGTYPE_TN_MAKE EXIF_ASCII
+// Image input equipment model (of thumbnail)
+// Use EXIFTAGTYPE_TN_MODEL as the exif_tag_type (EXIF_ASCII)
+// Count can be any
+#define _ID_TN_MODEL 0x0110
+#define EXIFTAGID_TN_MODEL CONSTRUCT_TAGID(TN_MODEL, _ID_TN_MODEL)
+#define EXIFTAGTYPE_TN_MODEL EXIF_ASCII
+// Image data location (of thumbnail)
+// Use EXIFTAGTYPE_TN_STRIP_OFFSETS as the exif_tag_type (EXIF_LONG)
+// Count = StripsPerImage when PlanarConfiguration = 1
+// = SamplesPerPixel * StripsPerImage when PlanarConfiguration = 2
+#define _ID_TN_STRIP_OFFSETS 0x0111
+#define EXIFTAGID_TN_STRIP_OFFSETS CONSTRUCT_TAGID(STRIP_TN_OFFSETS, _ID_TN_STRIP_OFFSETS)
+#define EXIFTAGTYPE_TN_STRIP_OFFSETS EXIF_LONG
+// Orientation of image (of thumbnail)
+// Use EXIFTAGTYPE_TN_ORIENTATION as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_TN_ORIENTATION 0x0112
+#define EXIFTAGID_TN_ORIENTATION CONSTRUCT_TAGID(TN_ORIENTATION, _ID_TN_ORIENTATION)
+#define EXIFTAGTYPE_TN_ORIENTATION EXIF_SHORT
+// Number of components (of thumbnail)
+// Use EXIFTAGTYPE_TN_SAMPLES_PER_PIXEL as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_TN_SAMPLES_PER_PIXEL 0x0115
+#define EXIFTAGID_TN_SAMPLES_PER_PIXEL CONSTRUCT_TAGID(TN_SAMPLES_PER_PIXEL, _ID_TN_SAMPLES_PER_PIXEL)
+#define EXIFTAGTYPE_TN_SAMPLES_PER_PIXEL EXIF_SHORT
+// Number of rows per strip (of thumbnail)
+// Use EXIFTAGTYPE_TN_ROWS_PER_STRIP as the exif_tag_type (EXIF_LONG)
+// Count should be 1
+#define _ID_TN_ROWS_PER_STRIP 0x0116
+#define EXIFTAGID_TN_ROWS_PER_STRIP CONSTRUCT_TAGID(TN_ROWS_PER_STRIP, _ID_TN_ROWS_PER_STRIP)
+#define EXIFTAGTYPE_TN_ROWS_PER_STRIP EXIF_LONG
+// Bytes per compressed strip (of thumbnail)
+// Use EXIFTAGTYPE_TN_STRIP_BYTE_COUNTS as the exif_tag_type (EXIF_LONG)
+// Count = StripsPerImage when PlanarConfiguration = 1
+// = SamplesPerPixel * StripsPerImage when PlanarConfiguration = 2
+#define _ID_TN_STRIP_BYTE_COUNTS 0x0117
+#define EXIFTAGID_TN_STRIP_BYTE_COUNTS CONSTRUCT_TAGID(TN_STRIP_BYTE_COUNTS, _ID_TN_STRIP_BYTE_COUNTS)
+#define EXIFTAGTYPE_TN_STRIP_BYTE_COUNTS EXIF_LONG
+// Image resolution in width direction (of thumbnail)
+// Use EXIFTAGTYPE_TN_X_RESOLUTION as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 1
+#define _ID_TN_X_RESOLUTION 0x011a
+#define EXIFTAGID_TN_X_RESOLUTION CONSTRUCT_TAGID(TN_X_RESOLUTION, _ID_TN_X_RESOLUTION)
+#define EXIFTAGTYPE_TN_X_RESOLUTION EXIF_RATIONAL
+// Image resolution in height direction (of thumbnail)
+// Use EXIFTAGTYPE_TN_Y_RESOLUTION as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 1
+#define _ID_TN_Y_RESOLUTION 0x011b
+#define EXIFTAGID_TN_Y_RESOLUTION CONSTRUCT_TAGID(TN_Y_RESOLUTION, _ID_TN_Y_RESOLUTION)
+#define EXIFTAGTYPE_TN_Y_RESOLUTION EXIF_RATIONAL
+// Image data arrangement (of thumbnail)
+// Use EXIFTAGTYPE_TN_PLANAR_CONFIGURATION as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_TN_PLANAR_CONFIGURATION 0x011c
+#define EXIFTAGID_TN_PLANAR_CONFIGURATION CONSTRUCT_TAGID(TN_PLANAR_CONFIGURATION, _ID_TN_PLANAR_CONFIGURATION)
+#define EXIFTAGTYPE_TN_PLANAR_CONFIGURATION EXIF_SHORT
+// Unit of X and Y resolution (of thumbnail)
+// Use EXIFTAGTYPE_TN_RESOLUTION_UNIT as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_TN_RESOLUTION_UNIT 0x128
+#define EXIFTAGID_TN_RESOLUTION_UNIT CONSTRUCT_TAGID(TN_RESOLUTION_UNIT, _ID_TN_RESOLUTION_UNIT)
+#define EXIFTAGTYPE_TN_RESOLUTION_UNIT EXIF_SHORT
+// Transfer function (of thumbnail)
+// Use EXIFTAGTYPE_TN_TRANSFER_FUNCTION as the exif_tag_type (EXIF_SHORT)
+// Count should be 3*256
+#define _ID_TN_TRANSFER_FUNCTION 0x012d
+#define EXIFTAGID_TN_TRANSFER_FUNCTION CONSTRUCT_TAGID(TN_TRANSFER_FUNCTION, _ID_TN_TRANSFER_FUNCTION)
+#define EXIFTAGTYPE_TN_TRANSFER_FUNCTION EXIF_SHORT
+// Software used (of thumbnail)
+// Use EXIFTAGTYPE_TN_SOFTWARE as the exif_tag_type (EXIF_ASCII)
+// Count can be any
+#define _ID_TN_SOFTWARE 0x0131
+#define EXIFTAGID_TN_SOFTWARE CONSTRUCT_TAGID(TN_SOFTWARE, _ID_TN_SOFTWARE)
+#define EXIFTAGTYPE_TN_SOFTWARE EXIF_ASCII
+// File change date and time (of thumbnail)
+// Use EXIFTAGTYPE_TN_DATE_TIME as the exif_tag_type (EXIF_ASCII)
+// Count should be 20
+#define _ID_TN_DATE_TIME 0x0132
+#define EXIFTAGID_TN_DATE_TIME CONSTRUCT_TAGID(TN_DATE_TIME, _ID_TN_DATE_TIME)
+#define EXIFTAGTYPE_TN_DATE_TIME EXIF_ASCII
+// ARTIST, person who created this image (of thumbnail)
+// Use EXIFTAGTYPE_TN_ARTIST as the exif_tag_type (EXIF_ASCII)
+// Count can be any
+#define _ID_TN_ARTIST 0x013b
+#define EXIFTAGID_TN_ARTIST CONSTRUCT_TAGID(TN_ARTIST, _ID_TN_ARTIST)
+#define EXIFTAGTYPE_TN_ARTIST EXIF_ASCII
+// White point chromaticity (of thumbnail)
+// Use EXIFTAGTYPE_TN_WHITE_POINT as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 2
+#define _ID_TN_WHITE_POINT 0x013e
+#define EXIFTAGID_TN_WHITE_POINT CONSTRUCT_TAGID(TN_WHITE_POINT, _ID_TN_WHITE_POINT)
+#define EXIFTAGTYPE_TN_WHITE_POINT EXIF_RATIONAL
+// Chromaticities of primaries (of thumbnail)
+// Use EXIFTAGTYPE_TN_PRIMARY_CHROMATICITIES as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 6
+#define _ID_TN_PRIMARY_CHROMATICITIES 0x013f
+#define EXIFTAGID_TN_PRIMARY_CHROMATICITIES CONSTRUCT_TAGID(TN_PRIMARY_CHROMATICITIES, _ID_TN_PRIMARY_CHROMATICITIES)
+#define EXIFTAGTYPE_TN_PRIMARY_CHROMATICITIES EXIF_RATIONAL
+// Offset to JPEG SOI (of thumbnail)
+// Use EXIFTAGTYPE_TN_JPEGINTERCHANGE_FORMAT as the exif_tag_type (EXIF_LONG)
+// Count is undefined
+#define _ID_TN_JPEGINTERCHANGE_FORMAT 0x0201
+#define EXIFTAGID_TN_JPEGINTERCHANGE_FORMAT CONSTRUCT_TAGID(TN_JPEGINTERCHANGE_FORMAT, _ID_TN_JPEGINTERCHANGE_FORMAT)
+#define EXIFTAGTYPE_TN_JPEGINTERCHANGE_FORMAT EXIF_LONG
+// Bytes of JPEG data (of thumbnail)
+// Use EXIFTAGTYPE_TN_JPEGINTERCHANGE_FORMAT_L as the exif_tag_type (EXIF_LONG)
+// Count is undefined
+#define _ID_TN_JPEGINTERCHANGE_FORMAT_L 0x0202
+#define EXIFTAGID_TN_JPEGINTERCHANGE_FORMAT_L CONSTRUCT_TAGID(TN_JPEGINTERCHANGE_FORMAT_L, _ID_TN_JPEGINTERCHANGE_FORMAT_L)
+#define EXIFTAGTYPE_TN_JPEGINTERCHANGE_FORMAT_L EXIF_LONG
+// Color space transformation matrix coefficients (of thumbnail)
+// Use EXIFTAGTYPE_TN_YCBCR_COEFFICIENTS as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 3
+#define _ID_TN_YCBCR_COEFFICIENTS 0x0211
+#define EXIFTAGID_TN_YCBCR_COEFFICIENTS CONSTRUCT_TAGID(TN_YCBCR_COEFFICIENTS, _ID_TN_YCBCR_COEFFICIENTS)
+#define EXIFTAGTYPE_TN_YCBCR_COEFFICIENTS EXIF_RATIONAL
+// Subsampling ratio of Y to C (of thumbnail)
+// Use EXIFTAGTYPE_TN_YCBCR_SUB_SAMPLING as the exif_tag_type (EXIF_SHORT)
+// Count should be 2
+#define _ID_TN_YCBCR_SUB_SAMPLING 0x0212
+#define EXIFTAGID_TN_YCBCR_SUB_SAMPLING CONSTRUCT_TAGID(TN_YCBCR_SUB_SAMPLING, _ID_TN_YCBCR_SUB_SAMPLING)
+#define EXIFTAGTYPE_TN_YCBCR_SUB_SAMPLING EXIF_SHORT
+// Y and C positioning (of thumbnail)
+// Use EXIFTAGTYPE_TN_YCBCR_POSITIONING as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_TN_YCBCR_POSITIONING 0x0213
+#define EXIFTAGID_TN_YCBCR_POSITIONING CONSTRUCT_TAGID(TN_YCBCR_POSITIONING, _ID_TN_YCBCR_POSITIONING)
+#define EXIFTAGTYPE_TN_YCBCR_POSITIONING EXIF_SHORT
+// Pair of black and white reference values (of thumbnail)
+// Use EXIFTAGTYPE_TN_REFERENCE_BLACK_WHITE as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 6
+#define _ID_TN_REFERENCE_BLACK_WHITE 0x0214
+#define EXIFTAGID_TN_REFERENCE_BLACK_WHITE CONSTRUCT_TAGID(TN_REFERENCE_BLACK_WHITE, _ID_TN_REFERENCE_BLACK_WHITE)
+#define EXIFTAGTYPE_TN_REFERENCE_BLACK_WHITE EXIF_RATIONAL
+// Copyright holder (of thumbnail)
+// Use EXIFTAGTYPE_TN_COPYRIGHT as the exif_tag_type (EXIF_ASCII)
+// Count can be any
+#define _ID_TN_COPYRIGHT 0x8298
+#define EXIFTAGID_TN_COPYRIGHT CONSTRUCT_TAGID(TN_COPYRIGHT, _ID_TN_COPYRIGHT)
+#define EXIFTAGTYPE_TN_COPYRIGHT EXIF_ASCII
+// Exposure time
+// Use EXIFTAGTYPE_EXPOSURE_TIME as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 1
+#define _ID_EXPOSURE_TIME 0x829a
+#define EXIFTAGID_EXPOSURE_TIME CONSTRUCT_TAGID(EXPOSURE_TIME, _ID_EXPOSURE_TIME)
+#define EXIFTAGTYPE_EXPOSURE_TIME EXIF_RATIONAL
+// F number
+// Use EXIFTAGTYPE_F_NUMBER as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 1
+#define _ID_F_NUMBER 0x829d
+#define EXIFTAGID_F_NUMBER CONSTRUCT_TAGID(F_NUMBER, _ID_F_NUMBER)
+#define EXIFTAGTYPE_F_NUMBER EXIF_RATIONAL
+// Exif IFD pointer (NOT INTENDED to be accessible to user)
+#define _ID_EXIF_IFD_PTR 0x8769
+#define EXIFTAGID_EXIF_IFD_PTR CONSTRUCT_TAGID(EXIF_IFD, _ID_EXIF_IFD_PTR)
+#define EXIFTAGTYPE_EXIF_IFD_PTR EXIF_LONG
+
+// ICC_PROFILE (NOT INTENDED to be accessible to user)
+#define _ID_ICC_PROFILE 0x8773
+#define EXIFTAGID_ICC_PROFILE CONSTRUCT_TAGID(ICC_PROFILE, _ID_ICC_PROFILE)
+#define EXIFTAGTYPE_ICC_PROFILE EXIF_LONG
+// Exposure program
+// Use EXIFTAGTYPE_EXPOSURE_PROGRAM as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_EXPOSURE_PROGRAM 0x8822
+#define EXIFTAGID_EXPOSURE_PROGRAM CONSTRUCT_TAGID(EXPOSURE_PROGRAM, _ID_EXPOSURE_PROGRAM)
+#define EXIFTAGTYPE_EXPOSURE_PROGRAM EXIF_SHORT
+// Spectral sensitivity
+// Use EXIFTAGTYPE_SPECTRAL_SENSITIVITY as the exif_tag_type (EXIF_ASCII)
+// Count can be any
+#define _ID_SPECTRAL_SENSITIVITY 0x8824
+#define EXIFTAGID_SPECTRAL_SENSITIVITY CONSTRUCT_TAGID(SPECTRAL_SENSITIVITY, _ID_SPECTRAL_SENSITIVITY)
+#define EXIFTAGTYPE_SPECTRAL_SENSITIVITY EXIF_ASCII
+// GPS IFD pointer (NOT INTENDED to be accessible to user)
+#define _ID_GPS_IFD_PTR 0x8825
+#define EXIFTAGID_GPS_IFD_PTR CONSTRUCT_TAGID(GPS_IFD, _ID_GPS_IFD_PTR)
+#define EXIFTAGTYPE_GPS_IFD_PTR EXIF_LONG
+// ISO Speed Rating
+// Use EXIFTAGTYPE_ISO_SPEED_RATING as the exif_tag_type (EXIF_SHORT)
+// Count can be any
+#define _ID_ISO_SPEED_RATING 0x8827
+#define EXIFTAGID_ISO_SPEED_RATING CONSTRUCT_TAGID(ISO_SPEED_RATING, _ID_ISO_SPEED_RATING)
+#define EXIFTAGTYPE_ISO_SPEED_RATING EXIF_SHORT
+// Optoelectric conversion factor
+// Use EXIFTAGTYPE_OECF as the exif_tag_type (EXIF_UNDEFINED)
+// Count can be any
+#define _ID_OECF 0x8828
+#define EXIFTAGID_OECF CONSTRUCT_TAGID(OECF, _ID_OECF)
+#define EXIFTAGTYPE_OECF EXIF_UNDEFINED
+// Exif version
+// Use EXIFTAGTYPE_EXIF_VERSION as the exif_tag_type (EXIF_UNDEFINED)
+// Count should be 4
+#define _ID_EXIF_VERSION 0x9000
+#define EXIFTAGID_EXIF_VERSION CONSTRUCT_TAGID(EXIF_VERSION, _ID_EXIF_VERSION)
+#define EXIFTAGTYPE_EXIF_VERSION EXIF_UNDEFINED
+// Date and time of original data gerneration
+// Use EXIFTAGTYPE_EXIF_DATE_TIME_ORIGINAL as the exif_tag_type (EXIF_ASCII)
+// It should be 20 characters long including the null-terminating character.
+#define _ID_EXIF_DATE_TIME_ORIGINAL 0x9003
+#define EXIFTAGID_EXIF_DATE_TIME_ORIGINAL CONSTRUCT_TAGID(EXIF_DATE_TIME_ORIGINAL, _ID_EXIF_DATE_TIME_ORIGINAL)
+#define EXIFTAGTYPE_EXIF_DATE_TIME_ORIGINAL EXIF_ASCII
+// Date and time of digital data generation
+// Use EXIFTAGTYPE_EXIF_DATE_TIME_DIGITIZED as the exif_tag_type (EXIF_ASCII)
+// It should be 20 characters long including the null-terminating character.
+#define _ID_EXIF_DATE_TIME_DIGITIZED 0x9004
+#define EXIFTAGID_EXIF_DATE_TIME_DIGITIZED CONSTRUCT_TAGID(EXIF_DATE_TIME_DIGITIZED, _ID_EXIF_DATE_TIME_DIGITIZED)
+#define EXIFTAGTYPE_EXIF_DATE_TIME_DIGITIZED EXIF_ASCII
+// Meaning of each component
+// Use EXIFTAGTYPE_EXIF_COMPONENTS_CONFIG as the exif_tag_type (EXIF_UNDEFINED)
+// Count should be 4
+#define _ID_EXIF_COMPONENTS_CONFIG 0x9101
+#define EXIFTAGID_EXIF_COMPONENTS_CONFIG CONSTRUCT_TAGID(EXIF_COMPONENTS_CONFIG, _ID_EXIF_COMPONENTS_CONFIG)
+#define EXIFTAGTYPE_EXIF_COMPONENTS_CONFIG EXIF_UNDEFINED
+// Meaning of Image compression mode
+// Use EXIFTAGTYPE_EXIF_COMPRESSED_BITS_PER_PIXEL as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 1
+#define _ID_EXIF_COMPRESSED_BITS_PER_PIXEL 0x9102
+#define EXIFTAGID_EXIF_COMPRESSED_BITS_PER_PIXEL CONSTRUCT_TAGID(EXIF_COMPRESSED_BITS_PER_PIXEL, _ID_EXIF_COMPRESSED_BITS_PER_PIXEL)
+#define EXIFTAGTYPE_EXIF_COMPRESSED_BITS_PER_PIXEL EXIF_RATIONAL
+// Shutter speed
+// Use EXIFTAGTYPE_SHUTTER_SPEED as the exif_tag_type (EXIF_SRATIONAL)
+// Count should be 1
+#define _ID_SHUTTER_SPEED 0x9201
+#define EXIFTAGID_SHUTTER_SPEED CONSTRUCT_TAGID(SHUTTER_SPEED, _ID_SHUTTER_SPEED)
+#define EXIFTAGTYPE_SHUTTER_SPEED EXIF_SRATIONAL
+// Aperture
+// Use EXIFTAGTYPE_APERTURE as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 1
+#define _ID_APERTURE 0x9202
+#define EXIFTAGID_APERTURE CONSTRUCT_TAGID(APERTURE, _ID_APERTURE)
+#define EXIFTAGTYPE_APERTURE EXIF_RATIONAL
+// Brigthness
+// Use EXIFTAGTYPE_BRIGHTNESS as the exif_tag_type (EXIF_SRATIONAL)
+// Count should be 1
+#define _ID_BRIGHTNESS 0x9203
+#define EXIFTAGID_BRIGHTNESS CONSTRUCT_TAGID(BRIGHTNESS, _ID_BRIGHTNESS)
+#define EXIFTAGTYPE_BRIGHTNESS EXIF_SRATIONAL
+// Exposure bias
+// Use EXIFTAGTYPE_EXPOSURE_BIAS_VALUE as the exif_tag_type (EXIF_SRATIONAL)
+// Count should be 1
+#define _ID_EXPOSURE_BIAS_VALUE 0x9204
+#define EXIFTAGID_EXPOSURE_BIAS_VALUE CONSTRUCT_TAGID(EXPOSURE_BIAS_VALUE, _ID_EXPOSURE_BIAS_VALUE)
+#define EXIFTAGTYPE_EXPOSURE_BIAS_VALUE EXIF_SRATIONAL
+// Maximum lens aperture
+// Use EXIFTAGTYPE_MAX_APERTURE as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 1
+#define _ID_MAX_APERTURE 0x9205
+#define EXIFTAGID_MAX_APERTURE CONSTRUCT_TAGID(MAX_APERTURE, _ID_MAX_APERTURE)
+#define EXIFTAGTYPE_MAX_APERTURE EXIF_RATIONAL
+// Subject distance
+// Use EXIFTAGTYPE_SUBJECT_DISTANCE as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 1
+#define _ID_SUBJECT_DISTANCE 0x9206
+#define EXIFTAGID_SUBJECT_DISTANCE CONSTRUCT_TAGID(SUBJECT_DISTANCE, _ID_SUBJECT_DISTANCE)
+#define EXIFTAGTYPE_SUBJECT_DISTANCE EXIF_RATIONAL
+// Metering mode
+// Use EXIFTAGTYPE_METERING_MODE as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_METERING_MODE 0x9207
+#define EXIFTAGID_METERING_MODE CONSTRUCT_TAGID(METERING_MODE, _ID_METERING_MODE)
+#define EXIFTAGTYPE_METERING_MODE EXIF_SHORT
+// Light source
+// Use EXIFTAGTYPE_LIGHT_SOURCE as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_LIGHT_SOURCE 0x9208
+#define EXIFTAGID_LIGHT_SOURCE CONSTRUCT_TAGID(LIGHT_SOURCE, _ID_LIGHT_SOURCE)
+#define EXIFTAGTYPE_LIGHT_SOURCE EXIF_SHORT
+// Flash
+// Use EXIFTAGTYPE_FLASH as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_FLASH 0x9209
+#define EXIFTAGID_FLASH CONSTRUCT_TAGID(FLASH, _ID_FLASH)
+#define EXIFTAGTYPE_FLASH EXIF_SHORT
+// Lens focal length
+// Use EXIFTAGTYPE_FOCAL_LENGTH as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 1
+#define _ID_FOCAL_LENGTH 0x920a
+#define EXIFTAGID_FOCAL_LENGTH CONSTRUCT_TAGID(FOCAL_LENGTH, _ID_FOCAL_LENGTH)
+#define EXIFTAGTYPE_FOCAL_LENGTH EXIF_RATIONAL
+// Subject area
+// Use EXIFTAGTYPE_SUBJECT_AREA as exif_tag_type (EXIF_SHORT)
+// Count should be 2 or 3 or 4
+#define _ID_SUBJECT_AREA 0x9214
+#define EXIFTAGID_SUBJECT_AREA CONSTRUCT_TAGID(SUBJECT_AREA, _ID_SUBJECT_AREA)
+#define EXIFTAGTYPE_SUBJECT_AREA EXIF_SHORT
+// Maker note
+// Use EXIFTAGTYPE_EXIF_MAKER_NOTE as the exif_tag_type (EXIF_UNDEFINED)
+// Count can be any
+#define _ID_EXIF_MAKER_NOTE 0x927c
+#define EXIFTAGID_EXIF_MAKER_NOTE CONSTRUCT_TAGID(EXIF_MAKER_NOTE, _ID_EXIF_MAKER_NOTE)
+#define EXIFTAGTYPE_EXIF_MAKER_NOTE EXIF_UNDEFINED
+// User comments
+// Use EXIFTAGTYPE_EXIF_USER_COMMENT as the exif_tag_type (EXIF_UNDEFINED)
+// Count can be any
+#define _ID_EXIF_USER_COMMENT 0x9286
+#define EXIFTAGID_EXIF_USER_COMMENT CONSTRUCT_TAGID(EXIF_USER_COMMENT, _ID_EXIF_USER_COMMENT)
+#define EXIFTAGTYPE_EXIF_USER_COMMENT EXIF_UNDEFINED
+// Date time sub-seconds
+// Use EXIFTAGTYPE_SUBSEC_TIME as the exif_tag_type (EXIF_ASCII)
+// Count could be any
+#define _ID_SUBSEC_TIME 0x9290
+#define EXIFTAGID_SUBSEC_TIME CONSTRUCT_TAGID(SUBSEC_TIME, _ID_SUBSEC_TIME)
+#define EXIFTAGTYPE_SEBSEC_TIME EXIF_ASCII
+// Date time original sub-seconds
+// use EXIFTAGTYPE_SUBSEC_TIME_ORIGINAL as the exif_tag_type (EXIF_ASCII)
+// Count could be any
+#define _ID_SUBSEC_TIME_ORIGINAL 0x9291
+#define EXIFTAGID_SUBSEC_TIME_ORIGINAL CONSTRUCT_TAGID(SUBSEC_TIME_ORIGINAL, _ID_SUBSEC_TIME_ORIGINAL)
+#define EXIFTAGTYPE_SUBSEC_TIME_ORIGINAL EXIF_ASCII
+// Date time digitized sub-seconds
+// use EXIFTAGTYPE_SUBSEC_TIME_DIGITIZED as the exif_tag_type (EXIF_ASCII)
+// Count could be any
+#define _ID_SUBSEC_TIME_DIGITIZED 0x9292
+#define EXIFTAGID_SUBSEC_TIME_DIGITIZED CONSTRUCT_TAGID(SUBSEC_TIME_DIGITIZED, _ID_SUBSEC_TIME_DIGITIZED)
+#define EXIFTAGTYPE_SUBSEC_TIME_DIGITIZED EXIF_ASCII
+// Supported Flashpix version
+// Use EXIFTAGTYPE_EXIF_FLASHPIX_VERSION as the exif_tag_type (EXIF_UNDEFINED)
+// Count should be 4
+#define _ID_EXIF_FLASHPIX_VERSION 0xa000
+#define EXIFTAGID_EXIF_FLASHPIX_VERSION CONSTRUCT_TAGID(EXIF_FLASHPIX_VERSION, _ID_EXIF_FLASHPIX_VERSION)
+#define EXIFTAGTYPE_EXIF_FLASHPIX_VERSION EXIF_UNDEFINED
+// Color space information
+// Use EXIFTAGTYPE_EXIF_COLOR_SPACE as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_EXIF_COLOR_SPACE 0xa001
+#define EXIFTAGID_EXIF_COLOR_SPACE CONSTRUCT_TAGID(EXIF_COLOR_SPACE, _ID_EXIF_COLOR_SPACE)
+#define EXIFTAGTYPE_EXIF_COLOR_SPACE EXIF_SHORT
+// Valid image width
+// Use EXIFTAGTYPE_EXIF_PIXEL_X_DIMENSION as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_EXIF_PIXEL_X_DIMENSION 0xa002
+#define EXIFTAGID_EXIF_PIXEL_X_DIMENSION CONSTRUCT_TAGID(EXIF_PIXEL_X_DIMENSION, _ID_EXIF_PIXEL_X_DIMENSION)
+#define EXIFTAGTYPE_EXIF_PIXEL_X_DIMENSION EXIF_SHORT
+// Valid image height
+// Use EXIFTAGTYPE_EXIF_PIXEL_Y_DIMENSION as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_EXIF_PIXEL_Y_DIMENSION 0xa003
+#define EXIFTAGID_EXIF_PIXEL_Y_DIMENSION CONSTRUCT_TAGID(EXIF_PIXEL_Y_DIMENSION, _ID_EXIF_PIXEL_Y_DIMENSION)
+#define EXIFTAGTYPE_EXIF_PIXEL_Y_DIMENSION EXIF_SHORT
+// Related audio file
+// Use EXIFTAGTYPE_EXIF_RELATED_SOUND_FILE as the exif_tag_type (EXIF_ASCII)
+// Count should be 13
+#define _ID_RELATED_SOUND_FILE 0xa004
+#define EXIFTAGID_RELATED_SOUND_FILE CONSTRUCT_TAGID(RELATED_SOUND_FILE, _ID_RELATED_SOUND_FILE)
+#define EXIFTAGTYPE_RELATED_SOUND_FILE EXIF_ASCII
+// Interop IFD pointer (NOT INTENDED to be accessible to user)
+#define _ID_INTEROP_IFD_PTR 0xa005
+#define EXIFTAGID_INTEROP_IFD_PTR CONSTRUCT_TAGID(INTEROP, _ID_INTEROP_IFD_PTR)
+#define EXIFTAGTYPE_INTEROP_IFD_PTR EXIF_LONG
+// Flash energy
+// Use EXIFTAGTYPE_EXIF_FLASH_ENERGY as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 1
+#define _ID_FLASH_ENERGY 0xa20b
+#define EXIFTAGID_FLASH_ENERGY CONSTRUCT_TAGID(FLASH_ENERGY, _ID_FLASH_ENERGY)
+#define EXIFTAGTYPE_FLASH_ENERGY EXIF_RATIONAL
+// Spatial frequency response
+// Use EXIFTAGTYPE_SPATIAL_FREQ_RESPONSE as exif_tag_type (EXIF_UNDEFINED)
+// Count would be any
+#define _ID_SPATIAL_FREQ_RESPONSE 0xa20c
+#define EXIFTAGID_SPATIAL_FREQ_RESPONSE CONSTRUCT_TAGID(SPATIAL_FREQ_RESPONSE, _ID_SPATIAL_FREQ_RESPONSE)
+#define EXIFTAGTYPE_SPATIAL_FREQ_RESPONSE EXIF_UNDEFINED
+// Focal plane x resolution
+// Use EXIFTAGTYPE_FOCAL_PLANE_X_RESOLUTION as exif_tag_type (EXIF_RATIONAL)
+// Count should be 1
+#define _ID_FOCAL_PLANE_X_RESOLUTION 0xa20e
+#define EXIFTAGID_FOCAL_PLANE_X_RESOLUTION CONSTRUCT_TAGID(FOCAL_PLANE_X_RESOLUTION, _ID_FOCAL_PLANE_X_RESOLUTION)
+#define EXIFTAGTYPE_FOCAL_PLANE_X_RESOLUTION EXIF_RATIONAL
+// Focal plane y resolution
+// Use EXIFTAGTYPE_FOCAL_PLANE_Y_RESOLUTION as exif_tag_type (EXIF_RATIONAL)
+// Count should be 1
+#define _ID_FOCAL_PLANE_Y_RESOLUTION 0xa20f
+#define EXIFTAGID_FOCAL_PLANE_Y_RESOLUTION CONSTRUCT_TAGID(FOCAL_PLANE_Y_RESOLUTION, _ID_FOCAL_PLANE_Y_RESOLUTION)
+#define EXIFTAGTYPE_FOCAL_PLANE_Y_RESOLUTION EXIF_RATIONAL
+// Focal plane resolution unit
+// Use EXIFTAGTYPE_FOCAL_PLANE_RESOLUTION_UNIT as exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_FOCAL_PLANE_RESOLUTION_UNIT 0xa210
+#define EXIFTAGID_FOCAL_PLANE_RESOLUTION_UNIT CONSTRUCT_TAGID(FOCAL_PLANE_RESOLUTION_UNIT, _ID_FOCAL_PLANE_RESOLUTION_UNIT)
+#define EXIFTAGTYPE_FOCAL_PLANE_RESOLUTION_UNIT EXIF_SHORT
+// Subject location
+// Use EXIFTAGTYPE_SUBJECT_LOCATION as the exif_tag_type (EXIF_SHORT)
+// Count should be 2
+#define _ID_SUBJECT_LOCATION 0xa214
+#define EXIFTAGID_SUBJECT_LOCATION CONSTRUCT_TAGID(SUBJECT_LOCATION, _ID_SUBJECT_LOCATION)
+#define EXIFTAGTYPE_SUBJECT_LOCATION EXIF_SHORT
+// Exposure index
+// Use EXIFTAGTYPE_EXPOSURE_INDEX as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 1
+#define _ID_EXPOSURE_INDEX 0xa215
+#define EXIFTAGID_EXPOSURE_INDEX CONSTRUCT_TAGID(EXPOSURE_INDEX, _ID_EXPOSURE_INDEX)
+#define EXIFTAGTYPE_EXPOSURE_INDEX EXIF_RATIONAL
+// Sensing method
+// Use EXIFTAGTYPE_SENSING_METHOD as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_SENSING_METHOD 0xa217
+#define EXIFTAGID_SENSING_METHOD CONSTRUCT_TAGID(SENSING_METHOD, _ID_SENSING_METHOD)
+#define EXIFTAGTYPE_SENSING_METHOD EXIF_SHORT
+// File source
+// Use EXIFTAGTYPE_FILE_SOURCE as the exif_tag_type (EXIF_UNDEFINED)
+// Count should be 1
+#define _ID_FILE_SOURCE 0xa300
+#define EXIFTAGID_FILE_SOURCE CONSTRUCT_TAGID(FILE_SOURCE, _ID_FILE_SOURCE)
+#define EXIFTAGTYPE_FILE_SOURCE EXIF_UNDEFINED
+// Scene type
+// Use EXIFTAGTYPE_SCENE_TYPE as the exif_tag_type (EXIF_UNDEFINED)
+// Count should be 1
+#define _ID_SCENE_TYPE 0xa301
+#define EXIFTAGID_SCENE_TYPE CONSTRUCT_TAGID(SCENE_TYPE, _ID_SCENE_TYPE)
+#define EXIFTAGTYPE_SCENE_TYPE EXIF_UNDEFINED
+// CFA pattern
+// Use EXIFTAGTYPE_CFA_PATTERN as the exif_tag_type (EXIF_UNDEFINED)
+// Count can be any
+#define _ID_CFA_PATTERN 0xa302
+#define EXIFTAGID_CFA_PATTERN CONSTRUCT_TAGID(CFA_PATTERN, _ID_CFA_PATTERN)
+#define EXIFTAGTYPE_CFA_PATTERN EXIF_UNDEFINED
+// Custom image processing
+// Use EXIFTAGTYPE_CUSTOM_RENDERED as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_CUSTOM_RENDERED 0xa401
+#define EXIFTAGID_CUSTOM_RENDERED CONSTRUCT_TAGID(CUSTOM_RENDERED, _ID_CUSTOM_RENDERED)
+#define EXIFTAGTYPE_CUSTOM_RENDERED EXIF_SHORT
+// Exposure mode
+// Use EXIFTAGTYPE_EXPOSURE_MODE as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_EXPOSURE_MODE 0xa402
+#define EXIFTAGID_EXPOSURE_MODE CONSTRUCT_TAGID(EXPOSURE_MODE, _ID_EXPOSURE_MODE)
+#define EXIFTAGTYPE_EXPOSURE_MODE EXIF_SHORT
+// White balance
+// Use EXIFTAGTYPE_WHITE_BALANCE as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_WHITE_BALANCE 0xa403
+#define EXIFTAGID_WHITE_BALANCE CONSTRUCT_TAGID(WHITE_BALANCE, _ID_WHITE_BALANCE)
+#define EXIFTAGTYPE_WHITE_BALANCE EXIF_SHORT
+// Digital zoom ratio
+// Use EXIFTAGTYPE_DIGITAL_ZOOM_RATIO as the exif_tag_type (EXIF_RATIONAL)
+// Count should be 1
+#define _ID_DIGITAL_ZOOM_RATIO 0xa404
+#define EXIFTAGID_DIGITAL_ZOOM_RATIO CONSTRUCT_TAGID(DIGITAL_ZOOM_RATIO, _ID_DIGITAL_ZOOM_RATIO)
+#define EXIFTAGTYPE_DIGITAL_ZOOM_RATIO EXIF_RATIONAL
+// Focal length in 35mm film
+// Use EXIFTAGTYPE_FOCAL_LENGTH_35MM as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_FOCAL_LENGTH_35MM 0xa405
+#define EXIFTAGID_FOCAL_LENGTH_35MM CONSTRUCT_TAGID(FOCAL_LENGTH_35MM, _ID_FOCAL_LENGTH_35MM)
+#define EXIFTAGTYPE_FOCAL_LENGTH_35MM EXIF_SHORT
+// Scene capture type
+// Use EXIFTAGTYPE_SCENE_CAPTURE_TYPE as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_SCENE_CAPTURE_TYPE 0xa406
+#define EXIFTAGID_SCENE_CAPTURE_TYPE CONSTRUCT_TAGID(SCENE_CAPTURE_TYPE, _ID_SCENE_CAPTURE_TYPE)
+#define EXIFTAGTYPE_SCENE_CAPTURE_TYPE EXIF_SHORT
+// Gain control
+// Use EXIFTAGTYPE_GAIN_CONTROL as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_GAIN_CONTROL 0xa407
+#define EXIFTAGID_GAIN_CONTROL CONSTRUCT_TAGID(GAIN_CONTROL, _ID_GAIN_CONTROL)
+#define EXIFTAGTYPE_GAIN_CONTROL EXIF_SHORT
+// Contrast
+// Use EXIFTAGTYPE_CONTRAST as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_CONTRAST 0xa408
+#define EXIFTAGID_CONTRAST CONSTRUCT_TAGID(CONTRAST, _ID_CONTRAST)
+#define EXIFTAGTYPE_CONTRAST EXIF_SHORT
+// Saturation
+// Use EXIFTAGTYPE_SATURATION as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_SATURATION 0xa409
+#define EXIFTAGID_SATURATION CONSTRUCT_TAGID(SATURATION, _ID_SATURATION)
+#define EXIFTAGTYPE_SATURATION EXIF_SHORT
+// Sharpness
+// Use EXIFTAGTYPE_SHARPNESS as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_SHARPNESS 0xa40a
+#define EXIFTAGID_SHARPNESS CONSTRUCT_TAGID(SHARPNESS, _ID_SHARPNESS)
+#define EXIFTAGTYPE_SHARPNESS EXIF_SHORT
+// Device settings description
+// Use EXIFTAGID_DEVICE_SETTINGS_DESCRIPTION as exif_tag_type (EXIF_UNDEFINED)
+// Count could be any
+#define _ID_DEVICE_SETTINGS_DESCRIPTION 0xa40b
+#define EXIFTAGID_DEVICE_SETTINGS_DESCRIPTION CONSTRUCT_TAGID(DEVICE_SETTINGS_DESCRIPTION, _ID_DEVICE_SETTINGS_DESCRIPTION)
+#define EXIFTAGTYPE_DEVIC_SETTIGNS_DESCRIPTION EXIF_UNDEFINED
+// Subject distance range
+// Use EXIFTAGTYPE_SUBJECT_DISTANCE_RANGE as the exif_tag_type (EXIF_SHORT)
+// Count should be 1
+#define _ID_SUBJECT_DISTANCE_RANGE 0xa40c
+#define EXIFTAGID_SUBJECT_DISTANCE_RANGE CONSTRUCT_TAGID(SUBJECT_DISTANCE_RANGE, _ID_SUBJECT_DISTANCE_RANGE)
+#define EXIFTAGTYPE_SUBJECT_DISTANCE_RANGE EXIF_SHORT
+// Unique image id
+// Use EXIFTAG_TYPE_IMAGE_UIDas the exif_tag_type (EXIF_ASCII)
+// Count should be 33
+#define _ID_IMAGE_UID 0xa420
+#define EXIFTAGID_IMAGE_UID CONSTRUCT_TAGID(IMAGE_UID, _ID_IMAGE_UID)
+#define EXIFTAGTYPE_IMAGE_UID EXIF_ASCII
+// PIM tag
+// Use EXIFTAGTYPE_PIM_TAG as the exif_tag_type (EXIF_UNDEFINED)
+// Count can be any
+#define _ID_PIM 0xc4a5
+#define EXIFTAGID_PIM_TAG CONSTRUCT_TAGID(PIM, _ID_PIM)
+#define EXIFTAGTYPE_PIM_TAG EXIF_UNDEFINED
+
+#endif /* __QCAMERA_INTF_H__ */
diff --git a/camera/QualcommCamera.cpp b/camera/QualcommCamera.cpp
new file mode 100644
index 0000000..dfdc4c4
--- /dev/null
+++ b/camera/QualcommCamera.cpp
@@ -0,0 +1,702 @@
+/*
+** Copyright (c) 2011 The Linux Foundation. All rights reserved.
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+** http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+/*#error uncomment this for compiler test!*/
+
+//#define ALOG_NDEBUG 0
+#define ALOG_NIDEBUG 0
+#define LOG_TAG "QualcommCamera"
+#include <utils/Log.h>
+#include <utils/threads.h>
+#include <fcntl.h>
+#include <sys/mman.h>
+
+/* include QCamera Hardware Interface Header*/
+#include "QualcommCamera.h"
+#include "QualcommCameraHardware.h"
+//#include <camera/CameraHardwareInterface.h>
+
+extern "C" {
+#include <sys/time.h>
+}
+
+/* HAL function implementation goes here*/
+
+/**
+ * The functions need to be provided by the camera HAL.
+ *
+ * If getNumberOfCameras() returns N, the valid cameraId for getCameraInfo()
+ * and openCameraHardware() is 0 to N-1.
+ */
+
+static hw_module_methods_t camera_module_methods = {
+ open: camera_device_open,
+};
+
+
+static hw_module_t camera_common = {
+ tag: HARDWARE_MODULE_TAG,
+ version_major: 0,
+ version_minor: 01,
+ id: CAMERA_HARDWARE_MODULE_ID,
+ name: "Qcamera",
+ author:"Qcom",
+ methods: &camera_module_methods,
+ dso: NULL,
+ //reserved[0]: 0,
+};
+
+camera_module_t HAL_MODULE_INFO_SYM = {
+ common: camera_common,
+ get_number_of_cameras: get_number_of_cameras,
+ get_camera_info: get_camera_info,
+};
+
+camera_device_ops_t camera_ops = {
+ set_preview_window: android::set_preview_window,
+ set_callbacks: android::set_callbacks,
+ enable_msg_type: android::enable_msg_type,
+ disable_msg_type: android::disable_msg_type,
+ msg_type_enabled: android::msg_type_enabled,
+
+ start_preview: android::start_preview,
+ stop_preview: android::stop_preview,
+ preview_enabled: android::preview_enabled,
+ store_meta_data_in_buffers: android::store_meta_data_in_buffers,
+
+ start_recording: android::start_recording,
+ stop_recording: android::stop_recording,
+ recording_enabled: android::recording_enabled,
+ release_recording_frame: android::release_recording_frame,
+
+ auto_focus: android::auto_focus,
+ cancel_auto_focus: android::cancel_auto_focus,
+
+ take_picture: android::take_picture,
+ cancel_picture: android::cancel_picture,
+
+ set_parameters: android::set_parameters,
+ get_parameters: android::get_parameters,
+ put_parameters: android::put_parameters,
+ send_command: android::send_command,
+
+ release: android::release,
+ dump: android::dump,
+};
+
+namespace android {
+
+typedef struct {
+ QualcommCameraHardware *hardware;
+ int camera_released;
+ QCameraParameters parameters;
+ #if 1
+ camera_notify_callback notify_cb;
+ camera_data_callback data_cb;
+ camera_data_timestamp_callback data_cb_timestamp;
+ camera_request_memory get_memory;
+ void *user_data;
+ #endif
+} camera_hardware_t;
+
+typedef struct {
+ camera_memory_t mem;
+ int32_t msgType;
+ sp<IMemory> dataPtr;
+ void* user;
+ unsigned int index;
+} q_cam_memory_t;
+
+
+static void camera_release_memory(struct camera_memory *mem)
+{
+}
+
+void cam_notify_callback(int32_t msgType,
+ int32_t ext1,
+ int32_t ext2,
+ void* user)
+{
+ ALOGV("Q%s: E", __func__);
+ camera_device * device = (camera_device *)user;
+ if(device) {
+ camera_hardware_t *camHal = (camera_hardware_t *)device->priv;
+ if(camHal) {
+ camera_notify_callback notify_cb = camHal->notify_cb;
+ void *user_data = camHal->user_data;
+ if(notify_cb) {
+ notify_cb(msgType, ext1, ext2, user_data);
+ }
+ }
+ }
+}
+
+camera_memory_t* get_mem(int fd,size_t buf_size,
+ unsigned int num_bufs,
+ void *user)
+{
+ ALOGV("Q%s: E", __func__);
+ camera_device * device = (camera_device *)user;
+ if(device) {
+ camera_hardware_t *camHal = (camera_hardware_t *)device->priv;
+ if(camHal) {
+ camera_request_memory getmem_cb = camHal->get_memory;
+ void *user_data = camHal->user_data;
+ if(getmem_cb) {
+ return getmem_cb(fd, buf_size, num_bufs, user_data);
+ }
+ }
+ }
+ return NULL;
+}
+#if 0
+void native_send_data_callback(int32_t msgType,
+ camera_memory_t * framebuffer,
+ void* user)
+{
+ ALOGE("Q%s: E", __func__);
+ static unsigned int counter = 0;
+#if 0
+ camera_device * device = (camera_device *)user;
+ if(device) {
+ camera_hardware_t *camHal = (camera_hardware_t *)device->priv;
+ if(camHal) {
+ camera_data_callback data_cb = camHal->data_cb;
+ void *user_data = camHal->user_data;
+ if(data_cb) {
+ q_cam_memory_t *qmem = (q_cam_memory_t *)malloc(sizeof(q_cam_memory_t));
+ if (qmem) {
+ qmem->dataPtr = dataPtr;
+ qmem->mem.data = (void *)((int)dataPtr->pointer() + dataPtr->offset());
+ qmem->mem.handle = NULL; //(void *)dataPtr->getHeapID();
+ qmem->mem.size = dataPtr->size( );
+ qmem->mem.release = camera_release_memory;
+ qmem->msgType = msgType;
+ qmem->index = counter;
+#endif
+ data_cb(msgType, framebuffer, counter, NULL, user);
+ counter++;
+#if 0
+ } else {
+ ALOGE("%s: out of memory", __func__);
+ }
+#endif
+// }
+// }
+// }
+}
+#endif
+
+static void cam_data_callback(int32_t msgType,
+ const sp<IMemory>& dataPtr,
+ void* user)
+{
+ ALOGV("Q%s: E", __func__);
+ static unsigned int counter = 0;
+ camera_device * device = (camera_device *)user;
+ if(device) {
+ camera_hardware_t *camHal = (camera_hardware_t *)device->priv;
+ if(camHal) {
+ camera_data_callback data_cb = camHal->data_cb;
+ void *user_data = camHal->user_data;
+ if(data_cb) {
+ q_cam_memory_t *qmem = (q_cam_memory_t *)malloc(sizeof(q_cam_memory_t));
+ if (qmem) {
+ qmem->dataPtr = dataPtr;
+ qmem->mem.data = (void *)((int)dataPtr->pointer() + dataPtr->offset());
+ qmem->mem.handle = NULL; //(void *)dataPtr->getHeapID();
+ qmem->mem.size = dataPtr->size( );
+ qmem->mem.release = camera_release_memory;
+ qmem->msgType = msgType;
+ qmem->index = counter;
+ counter++;
+ data_cb(msgType, (camera_memory_t *)qmem, counter, NULL, user_data);
+ } else {
+ ALOGE("%s: out of memory", __func__);
+ }
+ }
+ }
+ }
+}
+
+static void cam_data_callback_timestamp(nsecs_t timestamp,
+ int32_t msgType,
+ const sp<IMemory>& dataPtr,
+ void* user)
+{
+ ALOGV("Q%s: E", __func__);
+
+ static unsigned int counter = 0;
+ camera_device * device = (camera_device *)user;
+ if(device) {
+ camera_hardware_t *camHal = (camera_hardware_t *)device->priv;
+ if(camHal) {
+ camera_data_timestamp_callback data_cb_timestamp = camHal->data_cb_timestamp;
+ void *user_data = camHal->user_data;
+ if(data_cb_timestamp) {
+ q_cam_memory_t *qmem = (q_cam_memory_t *)malloc(sizeof(q_cam_memory_t));
+ if (qmem) {
+ qmem->dataPtr = dataPtr;
+ qmem->mem.data = (void *)((int)dataPtr->pointer() + dataPtr->offset());
+ qmem->mem.handle = NULL; //(void *)dataPtr->getHeapID();
+ qmem->mem.size = dataPtr->size( );
+ qmem->mem.release = camera_release_memory;
+ qmem->msgType = msgType;
+ qmem->index = counter;
+ counter++;
+ data_cb_timestamp(timestamp, msgType, (camera_memory_t *)qmem, counter, user_data);
+ } else {
+ ALOGE("%s: out of memory", __func__);
+ }
+ }
+ }
+ }
+}
+
+QualcommCameraHardware * util_get_Hal_obj( struct camera_device * device)
+{
+ QualcommCameraHardware* hardware = NULL;
+ if(device && device->priv){
+ camera_hardware_t *camHal = (camera_hardware_t *)device->priv;
+ hardware = camHal->hardware;
+ }
+ return hardware;
+}
+void close_Hal_obj( struct camera_device * device)
+{
+ ALOGV("%s: E", __func__);
+ QualcommCameraHardware* hardware = NULL;
+ if(device && device->priv){
+ camera_hardware_t *camHal = (camera_hardware_t *)device->priv;
+ ALOGI("%s: clear hw", __func__);
+ hardware = camHal->hardware;
+ delete hardware;
+ }
+ ALOGV("%s: X", __func__);
+}
+
+
+QCameraParameters* util_get_HAL_parameter( struct camera_device * device)
+{
+ QCameraParameters *param = NULL;
+ if(device && device->priv){
+ camera_hardware_t *camHal = (camera_hardware_t *)device->priv;
+ param = &(camHal->parameters);
+ }
+ return param;
+}
+
+
+extern "C" int get_number_of_cameras()
+{
+ /* try to query every time we get the call!*/
+
+ ALOGV("Q%s: E", __func__);
+ return android::HAL_getNumberOfCameras( );
+}
+
+extern "C" int get_camera_info(int camera_id, struct camera_info *info)
+{
+ int rc = -1;
+ ALOGV("Q%s: E", __func__);
+ if(info) {
+ struct CameraInfo camInfo;
+ memset(&camInfo, -1, sizeof (struct CameraInfo));
+ HAL_getCameraInfo(camera_id, &camInfo);
+ if (camInfo.facing >= 0) {
+ rc = 0;
+ info->facing = camInfo.facing;
+ info->orientation = camInfo.orientation;
+ }
+ }
+ ALOGV("Q%s: X", __func__);
+ return rc;
+}
+
+
+/* HAL should return NULL if it fails to open camera hardware. */
+extern "C" int camera_device_open(
+ const struct hw_module_t* module, const char* id,
+ struct hw_device_t** hw_device)
+{
+ ALOGV("Q%s: E", __func__);
+ int rc = -1;
+ camera_device *device = NULL;
+ if(module && id && hw_device) {
+ int cameraId = atoi(id);
+
+ if (!strcmp(module->name, camera_common.name)) {
+ device =
+ (camera_device *)malloc(sizeof (struct camera_device));
+ if(device) {
+ camera_hardware_t *camHal =
+ (camera_hardware_t *) malloc(sizeof (camera_hardware_t));
+ if(camHal) {
+ memset(camHal, 0, sizeof (camera_hardware_t));
+ camHal->hardware = HAL_openCameraHardware(cameraId);
+ if (camHal->hardware != NULL) {
+ /*To Do: populate camHal*/
+ device->common.close = close_camera_device;
+ device->ops = &camera_ops;
+ device->priv = (void *)camHal;
+ rc = 0;
+ } else {
+ free(camHal);
+ free (device);
+ device = NULL;
+ }
+ } else {
+ free (device);
+ device = NULL;
+ }
+ }
+ }
+ }
+ *hw_device = (hw_device_t*)device;
+ return rc;
+}
+
+extern "C" int close_camera_device( hw_device_t *hw_dev)
+{
+ ALOGV("Q%s: device =%p E", __func__, hw_dev);
+ int rc = -1;
+ camera_device_t *device = (camera_device_t *)hw_dev;
+ if(device) {
+ camera_hardware_t *camHal = (camera_hardware_t *)device->priv;
+ if(camHal ) {
+ //if(!camHal->camera_released) {
+ QualcommCameraHardware* hardware = util_get_Hal_obj( device);
+ if(hardware != NULL) {
+ if(camHal->camera_released != true)
+ hardware->release( );
+ //hardware.clear( );
+
+ }
+ //}
+ close_Hal_obj(device);
+ free(device->priv);
+ device->priv = NULL;
+ }
+ free(device);
+ rc = 0;
+ }
+ return rc;
+}
+
+
+int set_preview_window(struct camera_device * device,
+ struct preview_stream_ops *window)
+{
+ ALOGV("Q%s: E window = %p", __func__, window);
+ int rc = -1;
+ QualcommCameraHardware *hardware = util_get_Hal_obj(device);
+ if(hardware != NULL) {
+ rc = hardware->set_PreviewWindow((void *)window);
+ }
+ return rc;
+}
+
+void set_callbacks(struct camera_device * device,
+ camera_notify_callback notify_cb,
+ camera_data_callback data_cb,
+ camera_data_timestamp_callback data_cb_timestamp,
+ camera_request_memory get_memory,
+ void *user)
+{
+ ALOGV("Q%s: E", __func__);
+ QualcommCameraHardware * hardware = util_get_Hal_obj(device);
+ if(hardware != NULL){
+ camera_hardware_t *camHal = (camera_hardware_t *)device->priv;
+ if(camHal) {
+ camera_notify_callback cam_nt_cb;
+ camera_data_callback cam_dt_cb;
+ camera_data_timestamp_callback cam_dt_timestamp_cb;
+
+ camHal->notify_cb = notify_cb;
+ camHal->data_cb = data_cb;
+ camHal->data_cb_timestamp = data_cb_timestamp;
+ camHal->user_data = user;
+ camHal->get_memory = get_memory;
+ #if 0
+ if(notify_cb) {
+ cam_nt_cb = cam_notify_callback;
+ } else {
+ cam_nt_cb = NULL;
+ }
+
+ if(data_cb) {
+ cam_dt_cb = cam_data_callback;
+ } else {
+ cam_dt_cb = NULL;
+ }
+
+ if(data_cb_timestamp) {
+ cam_dt_timestamp_cb = cam_data_callback_timestamp;
+ } else {
+ cam_dt_timestamp_cb = NULL;
+ }
+ #endif
+ ALOGV("cam_nt_cb =%p,cam_dt_cb=%p,cam_dt_timestamp_cb=%p", cam_nt_cb, cam_dt_cb, cam_dt_timestamp_cb);
+ hardware->setCallbacks(notify_cb,data_cb,data_cb_timestamp,get_memory, user);
+ }
+ }
+}
+
+void enable_msg_type(struct camera_device * device, int32_t msg_type)
+{
+ QualcommCameraHardware * hardware = util_get_Hal_obj(device);
+ if(hardware != NULL){
+ hardware->enableMsgType(msg_type);
+ }
+}
+
+void disable_msg_type(struct camera_device * device, int32_t msg_type)
+{
+ QualcommCameraHardware * hardware = util_get_Hal_obj(device);
+ ALOGV("Q%s: E", __func__);
+ if(hardware != NULL){
+ hardware->disableMsgType(msg_type);
+ }
+}
+
+int msg_type_enabled(struct camera_device * device, int32_t msg_type)
+{
+ ALOGV("Q%s: E", __func__);
+ int rc = -1;
+ QualcommCameraHardware * hardware = util_get_Hal_obj(device);
+ if(hardware != NULL){
+ rc = hardware->msgTypeEnabled(msg_type);
+ }
+ return rc;
+}
+
+int start_preview(struct camera_device * device)
+{
+ ALOGV("Q%s: E", __func__);
+ int rc = -1;
+ QualcommCameraHardware * hardware = util_get_Hal_obj(device);
+ if(hardware != NULL){
+ rc = hardware->startPreview( );
+ }
+ ALOGV("Q%s: X", __func__);
+ return rc;
+}
+
+void stop_preview(struct camera_device * device)
+{
+ ALOGV("Q%s: E", __func__);
+ QualcommCameraHardware * hardware = util_get_Hal_obj(device);
+ if(hardware != NULL){
+ hardware->stopPreview( );
+ }
+}
+
+int preview_enabled(struct camera_device * device)
+{
+ ALOGV("Q%s: E", __func__);
+ int rc = -1;
+ QualcommCameraHardware* hardware = util_get_Hal_obj(device);
+ if(hardware != NULL){
+ rc = hardware->previewEnabled( );
+ }
+ return rc;
+}
+
+int store_meta_data_in_buffers(struct camera_device * device, int enable)
+{
+ ALOGV("Q%s: E", __func__);
+ int rc = -1;
+ QualcommCameraHardware * hardware = util_get_Hal_obj(device);
+ if(hardware != NULL){
+ rc = hardware->storeMetaDataInBuffers( enable);
+ }
+ return rc;
+}
+
+int start_recording(struct camera_device * device)
+{
+ ALOGV("Q%s: E", __func__);
+ int rc = -1;
+ QualcommCameraHardware * hardware = util_get_Hal_obj(device);
+ if(hardware != NULL){
+ rc = hardware->startRecording( );
+ }
+ return rc;
+}
+
+void stop_recording(struct camera_device * device)
+{
+ ALOGV("Q%s: E", __func__);
+ QualcommCameraHardware* hardware = util_get_Hal_obj(device);
+ if(hardware != NULL){
+ hardware->stopRecording( );
+ }
+}
+
+int recording_enabled(struct camera_device * device)
+{
+ ALOGV("Q%s: E", __func__);
+ int rc = -1;
+ QualcommCameraHardware * hardware = util_get_Hal_obj(device);
+ if(hardware != NULL){
+ rc = hardware->recordingEnabled( );
+ }
+ return rc;
+}
+
+void release_recording_frame(struct camera_device * device,
+ const void *opaque)
+{
+ ALOGV("Q%s: E", __func__);
+ QualcommCameraHardware * hardware = util_get_Hal_obj(device);
+ if(hardware != NULL){
+ hardware->releaseRecordingFrame( opaque);
+ }
+}
+
+int auto_focus(struct camera_device * device)
+{
+ ALOGV("Q%s: E", __func__);
+ int rc = -1;
+ QualcommCameraHardware * hardware = util_get_Hal_obj(device);
+ if(hardware != NULL){
+ rc = hardware->autoFocus( );
+ }
+ return rc;
+}
+
+int cancel_auto_focus(struct camera_device * device)
+{
+ ALOGV("Q%s: E", __func__);
+ int rc = -1;
+ QualcommCameraHardware * hardware = util_get_Hal_obj(device);
+ if(hardware != NULL){
+ rc = hardware->cancelAutoFocus( );
+ }
+ return rc;
+}
+
+int take_picture(struct camera_device * device)
+{
+ ALOGV("Q%s: E", __func__);
+ int rc = -1;
+ QualcommCameraHardware * hardware = util_get_Hal_obj(device);
+ if(hardware != NULL){
+ rc = hardware->takePicture( );
+ }
+ return rc;
+}
+
+int cancel_picture(struct camera_device * device)
+
+{
+ ALOGV("Q%s: E", __func__);
+ int rc = -1;
+ QualcommCameraHardware * hardware = util_get_Hal_obj(device);
+ if(hardware != NULL){
+ rc = hardware->cancelPicture( );
+ }
+ return rc;
+}
+
+QCameraParameters g_param;
+String8 g_str;
+int set_parameters(struct camera_device * device, const char *parms)
+
+{
+ ALOGV("Q%s: E", __func__);
+ int rc = -1;
+ QualcommCameraHardware * hardware = util_get_Hal_obj(device);
+ if(hardware != NULL && parms){
+ // = util_get_HAL_parameter(device);
+ g_str = String8(parms);
+
+ g_param.unflatten(g_str);
+ rc = hardware->setParameters( g_param );
+ }
+ return rc;
+}
+
+char* get_parameters(struct camera_device * device)
+{
+ ALOGV("Q%s: E", __func__);
+ char* rc = NULL;
+
+ QCameraParameters param;
+ QualcommCameraHardware * hardware = util_get_Hal_obj(device);
+ if(hardware != NULL){
+ g_param = hardware->getParameters( );
+ g_str = g_param.flatten( );
+ rc = (char *)g_str.string( );
+ if (!rc) {
+ ALOGE("get_parameters: NULL string");
+ } else {
+ //ALOGE("get_parameters: %s", rc);
+ }
+ }
+ ALOGV("get_parameters X");
+ return rc;
+}
+
+void put_parameters(struct camera_device * device, char *parm)
+
+{
+ ALOGV("Q%s: E", __func__);
+ QualcommCameraHardware * hardware = util_get_Hal_obj(device);
+ if(hardware != NULL){
+ if(hardware != NULL){
+ //rc = hardware->putParameters(parm );
+ }
+ }
+ ALOGV("put_parameters X");
+}
+
+int send_command(struct camera_device * device,
+ int32_t cmd, int32_t arg1, int32_t arg2)
+{
+ ALOGV("Q%s: E", __func__);
+ int rc = -1;
+ QualcommCameraHardware * hardware = util_get_Hal_obj(device);
+ if(hardware != NULL){
+ rc = hardware->sendCommand( cmd, arg1, arg2);
+ }
+ return rc;
+}
+
+void release(struct camera_device * device)
+{
+ ALOGV("Q%s: E", __func__);
+ QualcommCameraHardware * hardware = util_get_Hal_obj(device);
+ if(hardware != NULL){
+ camera_hardware_t *camHal = (camera_hardware_t *)device->priv;
+ hardware->release( );
+ camHal->camera_released = true;
+ }
+}
+
+int dump(struct camera_device * device, int fd)
+{
+ ALOGV("Q%s: E", __func__);
+ int rc = -1;
+ QualcommCameraHardware * hardware = util_get_Hal_obj(device);
+ if(hardware != NULL){
+ //rc = hardware->dump( fd );
+ rc = 0;
+ }
+ return rc;
+}
+
+}; // namespace android
diff --git a/camera/QualcommCamera.h b/camera/QualcommCamera.h
new file mode 100644
index 0000000..f37fef5
--- /dev/null
+++ b/camera/QualcommCamera.h
@@ -0,0 +1,95 @@
+/*
+** Copyright (c) 2011 The Linux Foundation. All rights reserved.
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+** http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+#ifndef ANDROID_HARDWARE_QUALCOMM_CAMERA_H
+#define ANDROID_HARDWARE_QUALCOMM_CAMERA_H
+
+
+//#include <camera/CameraHardwareInterface.h>
+
+extern "C" {
+#include <hardware/camera.h>
+
+ int get_number_of_cameras();
+ int get_camera_info(int camera_id, struct camera_info *info);
+
+ int camera_device_open(const struct hw_module_t* module, const char* id,
+ struct hw_device_t** device);
+
+ hw_device_t * open_camera_device(int cameraId);
+
+ int close_camera_device( hw_device_t *);
+
+namespace android {
+ int set_preview_window(struct camera_device *,
+ struct preview_stream_ops *window);
+ void set_callbacks(struct camera_device *,
+ camera_notify_callback notify_cb,
+ camera_data_callback data_cb,
+ camera_data_timestamp_callback data_cb_timestamp,
+ camera_request_memory get_memory,
+ void *user);
+
+ void enable_msg_type(struct camera_device *, int32_t msg_type);
+
+ void disable_msg_type(struct camera_device *, int32_t msg_type);
+ int msg_type_enabled(struct camera_device *, int32_t msg_type);
+
+ int start_preview(struct camera_device *);
+
+ void stop_preview(struct camera_device *);
+
+ int preview_enabled(struct camera_device *);
+ int store_meta_data_in_buffers(struct camera_device *, int enable);
+
+ int start_recording(struct camera_device *);
+
+ void stop_recording(struct camera_device *);
+
+ int recording_enabled(struct camera_device *);
+
+ void release_recording_frame(struct camera_device *,
+ const void *opaque);
+
+ int auto_focus(struct camera_device *);
+
+ int cancel_auto_focus(struct camera_device *);
+
+ int take_picture(struct camera_device *);
+
+ int cancel_picture(struct camera_device *);
+
+ int set_parameters(struct camera_device *, const char *parms);
+
+ char* get_parameters(struct camera_device *);
+
+ void put_parameters(struct camera_device *, char *);
+
+ int send_command(struct camera_device *,
+ int32_t cmd, int32_t arg1, int32_t arg2);
+
+ void release(struct camera_device *);
+
+ int dump(struct camera_device *, int fd);
+
+
+
+}; // namespace android
+
+} //extern "C"
+
+#endif
+
diff --git a/camera/QualcommCamera2.cpp b/camera/QualcommCamera2.cpp
new file mode 100755
index 0000000..c495adb
--- /dev/null
+++ b/camera/QualcommCamera2.cpp
@@ -0,0 +1,524 @@
+/* Copyright (c) 2011, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ * * Neither the name of The Linux Foundation nor the names of its
+ * contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+/*#error uncomment this for compiler test!*/
+
+//#define ALOG_NDEBUG 0
+#define ALOG_NIDEBUG 0
+#define LOG_TAG "QualcommCamera"
+#include <utils/Log.h>
+#include <utils/threads.h>
+#include <fcntl.h>
+#include <sys/mman.h>
+
+#include "QCameraHAL.h"
+/* include QCamera Hardware Interface Header*/
+#include "QualcommCamera2.h"
+//#include "QualcommCameraHardware.h"
+//#include <camera/CameraHardwareInterface.h>
+
+extern "C" {
+#include <sys/time.h>
+}
+
+/* HAL function implementation goes here*/
+
+/**
+ * The functions need to be provided by the camera HAL.
+ *
+ * If getNumberOfCameras() returns N, the valid cameraId for getCameraInfo()
+ * and openCameraHardware() is 0 to N-1.
+ */
+
+static hw_module_methods_t camera_module_methods = {
+ open: camera_device_open,
+};
+
+static hw_module_t camera_common = {
+ tag: HARDWARE_MODULE_TAG,
+ version_major: 0,
+ version_minor: 01,
+ id: CAMERA_HARDWARE_MODULE_ID,
+ name: "Qcamera",
+ author:"Qcom",
+ methods: &camera_module_methods,
+ dso: NULL,
+ //reserved[0]: 0,
+};
+camera_module_t HAL_MODULE_INFO_SYM = {
+ common: camera_common,
+ get_number_of_cameras: get_number_of_cameras,
+ get_camera_info: get_camera_info,
+};
+
+camera_device_ops_t camera_ops = {
+ set_preview_window: android::set_preview_window,
+ set_callbacks: android::set_CallBacks,
+ enable_msg_type: android::enable_msg_type,
+ disable_msg_type: android::disable_msg_type,
+ msg_type_enabled: android::msg_type_enabled,
+
+ start_preview: android::start_preview,
+ stop_preview: android::stop_preview,
+ preview_enabled: android::preview_enabled,
+ store_meta_data_in_buffers: android::store_meta_data_in_buffers,
+
+ start_recording: android::start_recording,
+ stop_recording: android::stop_recording,
+ recording_enabled: android::recording_enabled,
+ release_recording_frame: android::release_recording_frame,
+
+ auto_focus: android::auto_focus,
+ cancel_auto_focus: android::cancel_auto_focus,
+
+ take_picture: android::take_picture,
+ cancel_picture: android::cancel_picture,
+
+ set_parameters: android::set_parameters,
+ get_parameters: android::get_parameters,
+ put_parameters: android::put_parameters,
+ send_command: android::send_command,
+
+ release: android::release,
+ dump: android::dump,
+};
+
+namespace android {
+
+typedef struct {
+ camera_device hw_dev;
+ //sp<CameraHardwareInterface> hardware;
+ QCameraHardwareInterface *hardware;
+ int camera_released;
+ int cameraId;
+ //QCameraParameters parameters;
+} camera_hardware_t;
+
+typedef struct {
+ camera_memory_t mem;
+ int32_t msgType;
+ sp<IMemory> dataPtr;
+ void* user;
+ unsigned int index;
+} q_cam_memory_t;
+
+QCameraHardwareInterface *util_get_Hal_obj( struct camera_device * device)
+{
+ QCameraHardwareInterface *hardware = NULL;
+ if(device && device->priv){
+ camera_hardware_t *camHal = (camera_hardware_t *)device->priv;
+ hardware = camHal->hardware;
+ }
+ return hardware;
+}
+
+#if 0
+QCameraParameters* util_get_HAL_parameter( struct camera_device * device)
+{
+ QCameraParameters *param = NULL;
+ if(device && device->priv){
+ camera_hardware_t *camHal = (camera_hardware_t *)device->priv;
+ param = &(camHal->parameters);
+ }
+ return param;
+}
+#endif
+extern "C" int get_number_of_cameras()
+{
+ /* try to query every time we get the call!*/
+
+ ALOGV("Q%s: E", __func__);
+ return android::HAL_getNumberOfCameras( );
+}
+
+extern "C" int get_camera_info(int camera_id, struct camera_info *info)
+{
+ int rc = -1;
+ ALOGV("Q%s: E", __func__);
+ if(info) {
+ struct CameraInfo camInfo;
+ memset(&camInfo, -1, sizeof (struct CameraInfo));
+ android::HAL_getCameraInfo(camera_id, &camInfo);
+ if (camInfo.facing >= 0) {
+ rc = 0;
+ info->facing = camInfo.facing;
+ info->orientation = camInfo.orientation;
+ }
+ }
+ ALOGV("Q%s: X", __func__);
+ return rc;
+}
+
+static pthread_mutex_t camera_session_lock = PTHREAD_MUTEX_INITIALIZER;
+static unsigned int QCameraSession = 0;
+
+/* HAL should return NULL if it fails to open camera hardware. */
+extern "C" int camera_device_open(
+ const struct hw_module_t* module, const char* id,
+ struct hw_device_t** hw_device)
+{
+ int rc = -1;
+ int mode = 0; // TODO: need to add 3d/2d mode, etc
+ camera_device *device = NULL;
+
+ pthread_mutex_lock(&camera_session_lock);
+
+ if(QCameraSession) {
+ ALOGE("%s Mutliple camera open instances are not supported",__func__);
+ pthread_mutex_unlock(&camera_session_lock);
+ return NULL;
+ }
+ if(module && id && hw_device) {
+ int cameraId = atoi(id);
+
+ if (!strcmp(module->name, camera_common.name)) {
+ camera_hardware_t *camHal =
+ (camera_hardware_t *) malloc(sizeof (camera_hardware_t));
+ if(!camHal) {
+ *hw_device = NULL;
+ ALOGE("%s: end in no mem", __func__);
+ pthread_mutex_unlock(&camera_session_lock);
+ return rc;
+ }
+ /* we have the camera_hardware obj malloced */
+ memset(camHal, 0, sizeof (camera_hardware_t));
+ camHal->hardware = new QCameraHardwareInterface(cameraId, mode); //HAL_openCameraHardware(cameraId);
+ if (camHal->hardware && camHal->hardware->isCameraReady()) {
+ camHal->cameraId = cameraId;
+ device = &camHal->hw_dev;
+ device->common.close = close_camera_device;
+ device->ops = &camera_ops;
+ device->priv = (void *)camHal;
+ QCameraSession++;
+ rc = 0;
+ } else {
+ if (camHal->hardware) {
+ delete camHal->hardware;
+ camHal->hardware = NULL;
+ }
+ free(camHal);
+ device = NULL;
+ }
+ }
+ }
+ /* pass actual hw_device ptr to framework. This amkes that we actally be use memberof() macro */
+ *hw_device = (hw_device_t*)&device->common;
+ ALOGV("%s: end rc %d", __func__, rc);
+ pthread_mutex_unlock(&camera_session_lock);
+ return rc;
+}
+
+extern "C" int close_camera_device( hw_device_t *hw_dev)
+{
+ ALOGV("Q%s: device =%p E", __func__, hw_dev);
+ int rc = -1;
+ camera_device_t *device = (camera_device_t *)hw_dev;
+
+ pthread_mutex_lock(&camera_session_lock);
+
+ if(device) {
+ camera_hardware_t *camHal = (camera_hardware_t *)device->priv;
+ if(camHal ) {
+ QCameraHardwareInterface *hardware = util_get_Hal_obj( device);
+ if(!camHal->camera_released) {
+ if(hardware != NULL) {
+ hardware->release( );
+ }
+ }
+ if (QCameraSession)
+ QCameraSession--;
+ if(hardware != NULL)
+ delete hardware;
+ free(camHal);
+ }
+ rc = 0;
+ }
+
+ pthread_mutex_unlock(&camera_session_lock);
+ return rc;
+}
+
+
+int set_preview_window(struct camera_device * device,
+ struct preview_stream_ops *window)
+{
+ int rc = -1;
+ QCameraHardwareInterface *hardware = util_get_Hal_obj(device);
+
+ if(hardware != NULL) {
+ rc = hardware->setPreviewWindow(window);
+ }
+ return rc;
+}
+
+void set_CallBacks(struct camera_device * device,
+ camera_notify_callback notify_cb,
+ camera_data_callback data_cb,
+ camera_data_timestamp_callback data_cb_timestamp,
+ camera_request_memory get_memory,
+ void *user)
+{
+ ALOGV("Q%s: E", __func__);
+ QCameraHardwareInterface *hardware = util_get_Hal_obj(device);
+ if(hardware != NULL){
+ hardware->setCallbacks(notify_cb,data_cb, data_cb_timestamp, get_memory, user);
+ }
+}
+
+void enable_msg_type(struct camera_device * device, int32_t msg_type)
+{
+ QCameraHardwareInterface *hardware = util_get_Hal_obj(device);
+ if(hardware != NULL){
+ hardware->enableMsgType(msg_type);
+ }
+}
+
+void disable_msg_type(struct camera_device * device, int32_t msg_type)
+{
+ QCameraHardwareInterface *hardware = util_get_Hal_obj(device);
+ ALOGV("Q%s: E", __func__);
+ if(hardware != NULL){
+ hardware->disableMsgType(msg_type);
+ }
+}
+
+int msg_type_enabled(struct camera_device * device, int32_t msg_type)
+{
+ ALOGV("Q%s: E", __func__);
+ int rc = -1;
+ QCameraHardwareInterface *hardware = util_get_Hal_obj(device);
+ if(hardware != NULL){
+ rc = hardware->msgTypeEnabled(msg_type);
+ }
+ return rc;
+}
+
+int start_preview(struct camera_device * device)
+{
+ ALOGV("Q%s: E", __func__);
+ int rc = -1;
+ QCameraHardwareInterface *hardware = util_get_Hal_obj(device);
+ if(hardware != NULL){
+ rc = hardware->startPreview( );
+ }
+ ALOGV("Q%s: X", __func__);
+ return rc;
+}
+
+void stop_preview(struct camera_device * device)
+{
+ ALOGV("Q%s: E", __func__);
+ QCameraHardwareInterface *hardware = util_get_Hal_obj(device);
+ if(hardware != NULL){
+ hardware->stopPreview( );
+ }
+}
+
+int preview_enabled(struct camera_device * device)
+{
+ ALOGV("Q%s: E", __func__);
+ int rc = -1;
+ QCameraHardwareInterface *hardware = util_get_Hal_obj(device);
+ if(hardware != NULL){
+ rc = hardware->previewEnabled( );
+ }
+ return rc;
+}
+
+int store_meta_data_in_buffers(struct camera_device * device, int enable)
+{
+ ALOGV("Q%s: E", __func__);
+ int rc = -1;
+ QCameraHardwareInterface *hardware = util_get_Hal_obj(device);
+ if(hardware != NULL){
+ rc = hardware->storeMetaDataInBuffers(enable);
+ }
+ return rc;
+}
+
+int start_recording(struct camera_device * device)
+{
+ ALOGV("Q%s: E", __func__);
+ int rc = -1;
+ QCameraHardwareInterface *hardware = util_get_Hal_obj(device);
+ if(hardware != NULL){
+ rc = hardware->startRecording( );
+ }
+ return rc;
+}
+
+void stop_recording(struct camera_device * device)
+{
+ ALOGV("Q%s: E", __func__);
+ QCameraHardwareInterface *hardware = util_get_Hal_obj(device);
+ if(hardware != NULL){
+ hardware->stopRecording( );
+ }
+}
+
+int recording_enabled(struct camera_device * device)
+{
+ ALOGV("Q%s: E", __func__);
+ int rc = -1;
+ QCameraHardwareInterface *hardware = util_get_Hal_obj(device);
+ if(hardware != NULL){
+ rc = hardware->recordingEnabled( );
+ }
+ return rc;
+}
+
+void release_recording_frame(struct camera_device * device,
+ const void *opaque)
+{
+ ALOGV("Q%s: E", __func__);
+ QCameraHardwareInterface *hardware = util_get_Hal_obj(device);
+ if(hardware != NULL){
+ hardware->releaseRecordingFrame(opaque);
+ }
+}
+
+int auto_focus(struct camera_device * device)
+{
+ ALOGV("Q%s: E", __func__);
+ int rc = -1;
+ QCameraHardwareInterface *hardware = util_get_Hal_obj(device);
+ if(hardware != NULL){
+ rc = hardware->autoFocus( );
+ }
+ return rc;
+}
+
+int cancel_auto_focus(struct camera_device * device)
+{
+ ALOGV("Q%s: E", __func__);
+ int rc = -1;
+ QCameraHardwareInterface *hardware = util_get_Hal_obj(device);
+ if(hardware != NULL){
+ rc = hardware->cancelAutoFocus( );
+ }
+ return rc;
+}
+
+int take_picture(struct camera_device * device)
+{
+ ALOGV("Q%s: E", __func__);
+ int rc = -1;
+ QCameraHardwareInterface *hardware = util_get_Hal_obj(device);
+ if(hardware != NULL){
+ rc = hardware->takePicture( );
+ }
+ return rc;
+}
+
+int cancel_picture(struct camera_device * device)
+
+{
+ ALOGV("Q%s: E", __func__);
+ int rc = -1;
+ QCameraHardwareInterface *hardware = util_get_Hal_obj(device);
+ if(hardware != NULL){
+ rc = hardware->cancelPicture( );
+ }
+ return rc;
+}
+
+int set_parameters(struct camera_device * device, const char *parms)
+
+{
+ ALOGV("Q%s: E", __func__);
+ int rc = -1;
+ QCameraHardwareInterface *hardware = util_get_Hal_obj(device);
+ if(hardware != NULL && parms){
+ //QCameraParameters param;// = util_get_HAL_parameter(device);
+ //String8 str = String8(parms);
+
+ //param.unflatten(str);
+ rc = hardware->setParameters(parms);
+ //rc = 0;
+ }
+ return rc;
+}
+
+char* get_parameters(struct camera_device * device)
+{
+ ALOGV("Q%s: E", __func__);
+ QCameraHardwareInterface *hardware = util_get_Hal_obj(device);
+ if(hardware != NULL){
+ char *parms = NULL;
+ hardware->getParameters(&parms);
+ return parms;
+ }
+ return NULL;
+}
+
+void put_parameters(struct camera_device * device, char *parm)
+
+{
+ ALOGV("Q%s: E", __func__);
+ QCameraHardwareInterface *hardware = util_get_Hal_obj(device);
+ if(hardware != NULL){
+ hardware->putParameters(parm);
+ }
+}
+
+int send_command(struct camera_device * device,
+ int32_t cmd, int32_t arg1, int32_t arg2)
+{
+ ALOGV("Q%s: E", __func__);
+ int rc = -1;
+ QCameraHardwareInterface *hardware = util_get_Hal_obj(device);
+ if(hardware != NULL){
+ rc = hardware->sendCommand( cmd, arg1, arg2);
+ }
+ return rc;
+}
+
+void release(struct camera_device * device)
+{
+ ALOGV("Q%s: E", __func__);
+ QCameraHardwareInterface *hardware = util_get_Hal_obj(device);
+ if(hardware != NULL){
+ camera_hardware_t *camHal = (camera_hardware_t *)device->priv;
+ hardware->release( );
+ camHal->camera_released = true;
+ }
+}
+
+int dump(struct camera_device * device, int fd)
+{
+ ALOGV("Q%s: E", __func__);
+ int rc = -1;
+ QCameraHardwareInterface *hardware = util_get_Hal_obj(device);
+ if(hardware != NULL){
+ rc = hardware->dump( fd );
+ //rc = 0;
+ }
+ return rc;
+}
+
+}; // namespace android
diff --git a/camera/QualcommCamera2.h b/camera/QualcommCamera2.h
new file mode 100644
index 0000000..8142689
--- /dev/null
+++ b/camera/QualcommCamera2.h
@@ -0,0 +1,108 @@
+/* Copyright (c) 2011, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ * * Neither the name of The Linux Foundation nor the names of its
+ * contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef ANDROID_HARDWARE_QUALCOMM_CAMERA_H
+#define ANDROID_HARDWARE_QUALCOMM_CAMERA_H
+
+
+#include "QCameraHWI.h"
+
+extern "C" {
+/*#include <hardware/camera.h>*/
+
+ int get_number_of_cameras();
+ int get_camera_info(int camera_id, struct camera_info *info);
+
+ int camera_device_open(const struct hw_module_t* module, const char* id,
+ struct hw_device_t** device);
+
+ hw_device_t * open_camera_device(int cameraId);
+
+ int close_camera_device( hw_device_t *);
+
+namespace android {
+ int set_preview_window(struct camera_device *,
+ struct preview_stream_ops *window);
+ void set_CallBacks(struct camera_device *,
+ camera_notify_callback notify_cb,
+ camera_data_callback data_cb,
+ camera_data_timestamp_callback data_cb_timestamp,
+ camera_request_memory get_memory,
+ void *user);
+
+ void enable_msg_type(struct camera_device *, int32_t msg_type);
+
+ void disable_msg_type(struct camera_device *, int32_t msg_type);
+ int msg_type_enabled(struct camera_device *, int32_t msg_type);
+
+ int start_preview(struct camera_device *);
+
+ void stop_preview(struct camera_device *);
+
+ int preview_enabled(struct camera_device *);
+ int store_meta_data_in_buffers(struct camera_device *, int enable);
+
+ int start_recording(struct camera_device *);
+
+ void stop_recording(struct camera_device *);
+
+ int recording_enabled(struct camera_device *);
+
+ void release_recording_frame(struct camera_device *,
+ const void *opaque);
+
+ int auto_focus(struct camera_device *);
+
+ int cancel_auto_focus(struct camera_device *);
+
+ int take_picture(struct camera_device *);
+
+ int cancel_picture(struct camera_device *);
+
+ int set_parameters(struct camera_device *, const char *parms);
+
+ char* get_parameters(struct camera_device *);
+
+ void put_parameters(struct camera_device *, char *);
+
+ int send_command(struct camera_device *,
+ int32_t cmd, int32_t arg1, int32_t arg2);
+
+ void release(struct camera_device *);
+
+ int dump(struct camera_device *, int fd);
+
+
+
+}; // namespace android
+
+} //extern "C"
+
+#endif
+
diff --git a/camera/QualcommCameraHardware.cpp b/camera/QualcommCameraHardware.cpp
new file mode 100644
index 0000000..915ded3
--- /dev/null
+++ b/camera/QualcommCameraHardware.cpp
@@ -0,0 +1,10027 @@
+
+/*
+** Copyright 2008, Google Inc.
+** Copyright (c) 2011-2012 The Linux Foundation. All rights reserved.
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+** http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+#define ALOG_NDEBUG 0
+#define ALOG_NIDEBUG 0
+#define LOG_TAG "QualcommCameraHardware"
+#include <utils/Log.h>
+#include "QualcommCameraHardware.h"
+
+#include <utils/Errors.h>
+#include <utils/threads.h>
+
+#include <binder/MemoryHeapPmem.h>
+#if 0
+#include <binder/MemoryHeapIon.h>
+#endif
+#include <camera/Camera.h>
+#include <hardware/camera.h>
+#include <utils/String16.h>
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <unistd.h>
+#include <fcntl.h>
+#include <cutils/properties.h>
+#include <math.h>
+#if HAVE_ANDROID_OS
+#include <linux/android_pmem.h>
+#endif
+#include <linux/ioctl.h>
+#include "QCameraParameters.h"
+#include <media/mediarecorder.h>
+#include <gralloc_priv.h>
+#include <genlock.h>
+
+#include "linux/msm_mdp.h"
+#include <linux/fb.h>
+#define LIKELY(exp) __builtin_expect(!!(exp), 1)
+#define UNLIKELY(exp) __builtin_expect(!!(exp), 0)
+#define CAMERA_HAL_UNUSED(expr) do { (void)(expr); } while (0)
+
+extern "C" {
+#include <fcntl.h>
+#include <time.h>
+#include <pthread.h>
+#include <stdio.h>
+#include <string.h>
+#include <unistd.h>
+#include <termios.h>
+#include <assert.h>
+#include <stdlib.h>
+#include <ctype.h>
+#include <signal.h>
+#include <errno.h>
+#include <sys/mman.h>
+#include <sys/system_properties.h>
+#include <sys/time.h>
+#include <stdlib.h>
+
+
+#include <camera.h>
+#include <cam_fifo.h>
+#include <liveshot.h>
+#include <jpege.h>
+#include <jpeg_encoder.h>
+
+#define DUMP_LIVESHOT_JPEG_FILE 0
+
+#define DEFAULT_PICTURE_WIDTH 640
+#define DEFAULT_PICTURE_HEIGHT 480
+#define DEFAULT_PICTURE_WIDTH_3D 1920
+#define DEFAULT_PICTURE_HEIGHT_3D 1080
+#define INITIAL_PREVIEW_HEIGHT 144
+#define INITIAL_PREVIEW_WIDTH 176
+
+#define THUMBNAIL_BUFFER_SIZE (THUMBNAIL_WIDTH * THUMBNAIL_HEIGHT * 3/2)
+#define MAX_ZOOM_LEVEL 5
+#define NOT_FOUND -1
+// Number of video buffers held by kernal (initially 1,2 &3)
+#define ACTIVE_VIDEO_BUFFERS 3
+#define ACTIVE_PREVIEW_BUFFERS 3
+#define ACTIVE_ZSL_BUFFERS 3
+#define APP_ORIENTATION 90
+#define HDR_HAL_FRAME 2
+
+#define FLASH_AUTO 24
+#define FLASH_SNAP 32
+
+#define DUMMY_CAMERA_STARTED 1;
+#define DUMMY_CAMERA_STOPPED 0;
+#define FLOOR16(X) ((X) & 0xFFF0)
+#if DLOPEN_LIBMMCAMERA
+#include <dlfcn.h>
+
+
+// Conversion routines from YV420sp to YV12 format
+int (*LINK_yuv_convert_ycrcb420sp_to_yv12_inplace) (yuv_image_type* yuvStructPtr);
+int (*LINK_yuv_convert_ycrcb420sp_to_yv12) (yuv_image_type* yuvStructPtrin, yuv_image_type* yuvStructPtrout);
+#define NUM_YV12_FRAMES 1
+#define FOCUS_AREA_INIT "(-1000,-1000,1000,1000,1000)"
+
+void *libmmcamera;
+void* (*LINK_cam_conf)(void *data);
+void* (*LINK_cam_frame)(void *data);
+void* (*LINK_wait_cam_frame_thread_ready)(void);
+void* (*LINK_cam_frame_set_exit_flag)(int flag);
+bool (*LINK_jpeg_encoder_init)();
+void (*LINK_jpeg_encoder_join)();
+bool (*LINK_jpeg_encoder_encode)(const cam_ctrl_dimension_t *dimen,
+ const uint8_t *thumbnailbuf, int thumbnailfd,
+ const uint8_t *snapshotbuf, int snapshotfd,
+ common_crop_t *scaling_parms, exif_tags_info_t *exif_data,
+ int exif_table_numEntries, int jpegPadding, const int32_t cbcroffset,int zsl_enable);
+void (*LINK_camframe_terminate)(void);
+//for 720p
+// Function pointer , called by camframe when a video frame is available.
+void (**LINK_camframe_video_callback)(struct msm_frame * frame);
+// Function to add a frame to free Q
+void (*LINK_camframe_add_frame)(cam_frame_type_t type,struct msm_frame *frame);
+
+void (*LINK_camframe_release_all_frames)(cam_frame_type_t type);
+
+int8_t (*LINK_jpeg_encoder_setMainImageQuality)(uint32_t quality);
+int8_t (*LINK_jpeg_encoder_setThumbnailQuality)(uint32_t quality);
+int8_t (*LINK_jpeg_encoder_setRotation)(uint32_t rotation);
+int8_t (*LINK_jpeg_encoder_get_buffer_offset)(uint32_t width, uint32_t height,
+ uint32_t* p_y_offset,
+ uint32_t* p_cbcr_offset,
+ uint32_t* p_buf_size);
+int8_t (*LINK_jpeg_encoder_setLocation)(const camera_position_type *location);
+void (*LINK_jpeg_encoder_set_3D_info)(cam_3d_frame_format_t format);
+const struct camera_size_type *(*LINK_default_sensor_get_snapshot_sizes)(int *len);
+int (*LINK_launch_cam_conf_thread)(void);
+int (*LINK_release_cam_conf_thread)(void);
+mm_camera_status_t (*LINK_mm_camera_init)(mm_camera_config *, mm_camera_notify*, mm_camera_ops*,uint8_t);
+mm_camera_status_t (*LINK_mm_camera_deinit)();
+mm_camera_status_t (*LINK_mm_camera_destroy)();
+mm_camera_status_t (*LINK_mm_camera_exec)();
+mm_camera_status_t (*LINK_mm_camera_get_camera_info) (qcamera_info_t* p_cam_info, int* p_num_cameras);
+
+int8_t (*LINK_zoom_crop_upscale)(uint32_t width, uint32_t height,
+ uint32_t cropped_width, uint32_t cropped_height, uint8_t *img_buf);
+
+// callbacks
+void (**LINK_mmcamera_shutter_callback)(common_crop_t *crop);
+void (**LINK_cancel_liveshot)(void);
+int8_t (*LINK_set_liveshot_params)(uint32_t a_width, uint32_t a_height, exif_tags_info_t *a_exif_data,
+ int a_exif_numEntries, uint8_t* a_out_buffer, uint32_t a_outbuffer_size);
+void (*LINK_set_liveshot_frame)(struct msm_frame *liveshot_frame);
+#else
+#define LINK_cam_conf cam_conf
+#define LINK_cam_frame cam_frame
+#define LINK_wait_cam_frame_thread_ready wait_cam_frame_thread_ready
+#define LINK_cam_frame cam_frame_set_exit_flag
+#define LINK_jpeg_encoder_init jpeg_encoder_init
+#define LINK_jpeg_encoder_join jpeg_encoder_join
+#define LINK_jpeg_encoder_encode jpeg_encoder_encode
+#define LINK_camframe_terminate camframe_terminate
+#define LINK_jpeg_encoder_setMainImageQuality jpeg_encoder_setMainImageQuality
+#define LINK_jpeg_encoder_setThumbnailQuality jpeg_encoder_setThumbnailQuality
+#define LINK_jpeg_encoder_setRotation jpeg_encoder_setRotation
+#define LINK_jpeg_encoder_get_buffer_offset jpeg_encoder_get_buffer_offset
+#define LINK_jpeg_encoder_setLocation jpeg_encoder_setLocation
+#define LINK_jpeg_encoder_set_3D_info jpeg_encoder_set_3D_info
+#define LINK_default_sensor_get_snapshot_sizes default_sensor_get_snapshot_sizes
+#define LINK_launch_cam_conf_thread launch_cam_conf_thread
+#define LINK_release_cam_conf_thread release_cam_conf_thread
+#define LINK_zoom_crop_upscale zoom_crop_upscale
+#define LINK_mm_camera_init mm_camera_config_init
+#define LINK_mm_camera_deinit mm_camera_config_deinit
+#define LINK_mm_camera_destroy mm_camera_config_destroy
+#define LINK_mm_camera_exec mm_camera_exec
+#define LINK_camframe_add_frame camframe_add_frame
+#define LINK_camframe_release_all_frames camframe_release_all_frames
+#define LINK_mm_camera_get_camera_info mm_camera_get_camera_info
+
+extern void (*mmcamera_camframe_callback)(struct msm_frame *frame);
+extern void (*mmcamera_camstats_callback)(camstats_type stype, camera_preview_histogram_info* histinfo);
+extern void (*mmcamera_jpegfragment_callback)(uint8_t *buff_ptr,
+ uint32_t buff_size);
+extern void (*mmcamera_jpeg_callback)(jpeg_event_t status);
+extern void (*mmcamera_shutter_callback)(common_crop_t *crop);
+extern void (*mmcamera_liveshot_callback)(liveshot_status status, uint32_t jpeg_size);
+#define LINK_set_liveshot_params set_liveshot_params
+#define LINK_set_liveshot_frame set_liveshot_frame
+#endif
+
+} // extern "C"
+
+#ifndef HAVE_CAMERA_SIZE_TYPE
+struct camera_size_type {
+ int width;
+ int height;
+};
+#endif
+#if 0
+typedef struct crop_info_struct {
+ int32_t x;
+ int32_t y;
+ int32_t w;
+ int32_t h;
+} zoom_crop_info;
+#endif
+union zoomimage
+{
+ char d[sizeof(struct mdp_blit_req_list) + sizeof(struct mdp_blit_req) * 1];
+ struct mdp_blit_req_list list;
+} zoomImage;
+
+//Default to VGA
+#define DEFAULT_PREVIEW_WIDTH 640
+#define DEFAULT_PREVIEW_HEIGHT 480
+#define DEFAULT_PREVIEW_WIDTH_3D 1280
+#define DEFAULT_PREVIEW_HEIGHT_3D 720
+
+//Default FPS
+#define MINIMUM_FPS 5
+#define MAXIMUM_FPS 31
+#define DEFAULT_FPS MAXIMUM_FPS
+#define DEFAULT_FIXED_FPS_VALUE 30
+/*
+ * Modifying preview size requires modification
+ * in bitmasks for boardproperties
+ */
+static uint32_t PREVIEW_SIZE_COUNT;
+static uint32_t HFR_SIZE_COUNT;
+
+board_property boardProperties[] = {
+ {TARGET_MSM7625, 0x00000fff, false, false, false},
+ {TARGET_MSM7625A, 0x00000fff, false, false, false},
+ {TARGET_MSM7627, 0x000006ff, false, false, false},
+ {TARGET_MSM7627A, 0x000006ff, false, false, false},
+ {TARGET_MSM7630, 0x00000fff, true, true, false},
+ {TARGET_MSM8660, 0x00001fff, true, true, false},
+ {TARGET_QSD8250, 0x00000fff, false, false, false}
+};
+
+//static const camera_size_type* picture_sizes;
+//static int PICTURE_SIZE_COUNT;
+/* TODO
+ * Ideally this should be a populated by lower layers.
+ * But currently this is no API to do that at lower layer.
+ * Hence populating with default sizes for now. This needs
+ * to be changed once the API is supported.
+ */
+//sorted on column basis
+static struct camera_size_type zsl_picture_sizes[] = {
+ { 1024, 768}, // 1MP XGA
+ { 800, 600}, //SVGA
+ { 800, 480}, // WVGA
+ { 640, 480}, // VGA
+ { 352, 288}, //CIF
+ { 320, 240}, // QVGA
+ { 176, 144} // QCIF
+};
+
+static struct camera_size_type for_3D_picture_sizes[] = {
+ { 1920, 1080},
+};
+
+static int data_counter = 0;
+static int sensor_rotation = 0;
+static int record_flag = 0;
+static camera_size_type* picture_sizes;
+static camera_size_type* preview_sizes;
+static camera_size_type* hfr_sizes;
+static unsigned int PICTURE_SIZE_COUNT;
+static const camera_size_type * picture_sizes_ptr;
+static int supportedPictureSizesCount;
+static liveshotState liveshot_state = LIVESHOT_DONE;
+
+#ifdef Q12
+#undef Q12
+#endif
+
+#define Q12 4096
+
+static const target_map targetList [] = {
+ { "msm7625", TARGET_MSM7625 },
+ { "msm7625a", TARGET_MSM7625A },
+ { "msm7627", TARGET_MSM7627 },
+ { "msm7627a", TARGET_MSM7627A },
+ { "qsd8250", TARGET_QSD8250 },
+ { "msm7630", TARGET_MSM7630 },
+ { "msm8660", TARGET_MSM8660 }
+
+};
+static targetType mCurrentTarget = TARGET_MAX;
+
+typedef struct {
+ uint32_t aspect_ratio;
+ uint32_t width;
+ uint32_t height;
+} thumbnail_size_type;
+
+static thumbnail_size_type thumbnail_sizes[] = {
+ { 7281, 512, 288 }, //1.777778
+ { 6826, 480, 288 }, //1.666667
+ { 6808, 256, 154 }, //1.662337
+ { 6144, 432, 288 }, //1.5
+ { 5461, 512, 384 }, //1.333333
+ { 5006, 352, 288 }, //1.222222
+};
+#define THUMBNAIL_SIZE_COUNT (sizeof(thumbnail_sizes)/sizeof(thumbnail_size_type))
+#define DEFAULT_THUMBNAIL_SETTING 4
+#define THUMBNAIL_WIDTH_STR "512"
+#define THUMBNAIL_HEIGHT_STR "384"
+#define THUMBNAIL_SMALL_HEIGHT 144
+static camera_size_type jpeg_thumbnail_sizes[] = {
+ { 512, 288 },
+ { 480, 288 },
+ { 432, 288 },
+ { 512, 384 },
+ { 352, 288 },
+ {0,0}
+};
+//supported preview fps ranges should be added to this array in the form (minFps,maxFps)
+static android::FPSRange FpsRangesSupported[] = {{MINIMUM_FPS*1000,MAXIMUM_FPS*1000}};
+
+#define FPS_RANGES_SUPPORTED_COUNT (sizeof(FpsRangesSupported)/sizeof(FpsRangesSupported[0]))
+
+#define JPEG_THUMBNAIL_SIZE_COUNT (sizeof(jpeg_thumbnail_sizes)/sizeof(camera_size_type))
+static int attr_lookup(const str_map arr[], int len, const char *name)
+{
+ if (name) {
+ for (int i = 0; i < len; i++) {
+ if (!strcmp(arr[i].desc, name))
+ return arr[i].val;
+ }
+ }
+ return NOT_FOUND;
+}
+
+// round to the next power of two
+static inline unsigned clp2(unsigned x)
+{
+ x = x - 1;
+ x = x | (x >> 1);
+ x = x | (x >> 2);
+ x = x | (x >> 4);
+ x = x | (x >> 8);
+ x = x | (x >>16);
+ return x + 1;
+}
+
+static int exif_table_numEntries = 0;
+#define MAX_EXIF_TABLE_ENTRIES 14
+exif_tags_info_t exif_data[MAX_EXIF_TABLE_ENTRIES];
+//static zoom_crop_info zoomCropInfo;
+static android_native_rect_t zoomCropInfo;
+static void *mLastQueuedFrame = NULL;
+#define RECORD_BUFFERS 9
+#define RECORD_BUFFERS_8x50 8
+static int kRecordBufferCount;
+/* controls whether VPE is avialable for the target
+ * under consideration.
+ * 1: VPE support is available
+ * 0: VPE support is not available (default)
+ */
+static bool mVpeEnabled;
+static cam_frame_start_parms camframeParams;
+
+static int HAL_numOfCameras;
+static qcamera_info_t HAL_cameraInfo[MSM_MAX_CAMERA_SENSORS];
+static int HAL_currentCameraId;
+static int HAL_currentCameraMode;
+static mm_camera_config mCfgControl;
+static bool mCameraOpen;
+
+static int HAL_currentSnapshotMode;
+static int previewWidthToNativeZoom;
+static int previewHeightToNativeZoom;
+#define CAMERA_SNAPSHOT_NONZSL 0x04
+#define CAMERA_SNAPSHOT_ZSL 0x08
+
+namespace android {
+ extern void native_send_data_callback(int32_t msgType,
+ camera_memory_t * framebuffer,
+ void* user);
+
+ extern camera_memory_t* get_mem(int fd,size_t buf_size,
+ unsigned int num_bufs,
+ void *user);
+
+static const int PICTURE_FORMAT_JPEG = 1;
+static const int PICTURE_FORMAT_RAW = 2;
+
+// from aeecamera.h
+static const str_map whitebalance[] = {
+ { QCameraParameters::WHITE_BALANCE_AUTO, CAMERA_WB_AUTO },
+ { QCameraParameters::WHITE_BALANCE_INCANDESCENT, CAMERA_WB_INCANDESCENT },
+ { QCameraParameters::WHITE_BALANCE_FLUORESCENT, CAMERA_WB_FLUORESCENT },
+ { QCameraParameters::WHITE_BALANCE_DAYLIGHT, CAMERA_WB_DAYLIGHT },
+ { QCameraParameters::WHITE_BALANCE_CLOUDY_DAYLIGHT, CAMERA_WB_CLOUDY_DAYLIGHT }
+};
+
+// from camera_effect_t. This list must match aeecamera.h
+static const str_map effects[] = {
+ { QCameraParameters::EFFECT_NONE, CAMERA_EFFECT_OFF },
+ { QCameraParameters::EFFECT_MONO, CAMERA_EFFECT_MONO },
+ { QCameraParameters::EFFECT_NEGATIVE, CAMERA_EFFECT_NEGATIVE },
+ { QCameraParameters::EFFECT_SOLARIZE, CAMERA_EFFECT_SOLARIZE },
+ { QCameraParameters::EFFECT_SEPIA, CAMERA_EFFECT_SEPIA },
+ { QCameraParameters::EFFECT_POSTERIZE, CAMERA_EFFECT_POSTERIZE },
+ { QCameraParameters::EFFECT_WHITEBOARD, CAMERA_EFFECT_WHITEBOARD },
+ { QCameraParameters::EFFECT_BLACKBOARD, CAMERA_EFFECT_BLACKBOARD },
+ { QCameraParameters::EFFECT_AQUA, CAMERA_EFFECT_AQUA }
+};
+
+// from qcamera/common/camera.h
+static const str_map autoexposure[] = {
+ { QCameraParameters::AUTO_EXPOSURE_FRAME_AVG, CAMERA_AEC_FRAME_AVERAGE },
+ { QCameraParameters::AUTO_EXPOSURE_CENTER_WEIGHTED, CAMERA_AEC_CENTER_WEIGHTED },
+ { QCameraParameters::AUTO_EXPOSURE_SPOT_METERING, CAMERA_AEC_SPOT_METERING }
+};
+
+// from qcamera/common/camera.h
+static const str_map antibanding[] = {
+ { QCameraParameters::ANTIBANDING_OFF, CAMERA_ANTIBANDING_OFF },
+ { QCameraParameters::ANTIBANDING_50HZ, CAMERA_ANTIBANDING_50HZ },
+ { QCameraParameters::ANTIBANDING_60HZ, CAMERA_ANTIBANDING_60HZ },
+ { QCameraParameters::ANTIBANDING_AUTO, CAMERA_ANTIBANDING_AUTO }
+};
+
+static const str_map antibanding_3D[] = {
+ { QCameraParameters::ANTIBANDING_OFF, CAMERA_ANTIBANDING_OFF },
+ { QCameraParameters::ANTIBANDING_50HZ, CAMERA_ANTIBANDING_50HZ },
+ { QCameraParameters::ANTIBANDING_60HZ, CAMERA_ANTIBANDING_60HZ }
+};
+
+/* Mapping from MCC to antibanding type */
+struct country_map {
+ uint32_t country_code;
+ camera_antibanding_type type;
+};
+
+#if 0 //not using this function. keeping this as this came from Google.
+static struct country_map country_numeric[] = {
+ { 202, CAMERA_ANTIBANDING_50HZ }, // Greece
+ { 204, CAMERA_ANTIBANDING_50HZ }, // Netherlands
+ { 206, CAMERA_ANTIBANDING_50HZ }, // Belgium
+ { 208, CAMERA_ANTIBANDING_50HZ }, // France
+ { 212, CAMERA_ANTIBANDING_50HZ }, // Monaco
+ { 213, CAMERA_ANTIBANDING_50HZ }, // Andorra
+ { 214, CAMERA_ANTIBANDING_50HZ }, // Spain
+ { 216, CAMERA_ANTIBANDING_50HZ }, // Hungary
+ { 219, CAMERA_ANTIBANDING_50HZ }, // Croatia
+ { 220, CAMERA_ANTIBANDING_50HZ }, // Serbia
+ { 222, CAMERA_ANTIBANDING_50HZ }, // Italy
+ { 226, CAMERA_ANTIBANDING_50HZ }, // Romania
+ { 228, CAMERA_ANTIBANDING_50HZ }, // Switzerland
+ { 230, CAMERA_ANTIBANDING_50HZ }, // Czech Republic
+ { 231, CAMERA_ANTIBANDING_50HZ }, // Slovakia
+ { 232, CAMERA_ANTIBANDING_50HZ }, // Austria
+ { 234, CAMERA_ANTIBANDING_50HZ }, // United Kingdom
+ { 235, CAMERA_ANTIBANDING_50HZ }, // United Kingdom
+ { 238, CAMERA_ANTIBANDING_50HZ }, // Denmark
+ { 240, CAMERA_ANTIBANDING_50HZ }, // Sweden
+ { 242, CAMERA_ANTIBANDING_50HZ }, // Norway
+ { 244, CAMERA_ANTIBANDING_50HZ }, // Finland
+ { 246, CAMERA_ANTIBANDING_50HZ }, // Lithuania
+ { 247, CAMERA_ANTIBANDING_50HZ }, // Latvia
+ { 248, CAMERA_ANTIBANDING_50HZ }, // Estonia
+ { 250, CAMERA_ANTIBANDING_50HZ }, // Russian Federation
+ { 255, CAMERA_ANTIBANDING_50HZ }, // Ukraine
+ { 257, CAMERA_ANTIBANDING_50HZ }, // Belarus
+ { 259, CAMERA_ANTIBANDING_50HZ }, // Moldova
+ { 260, CAMERA_ANTIBANDING_50HZ }, // Poland
+ { 262, CAMERA_ANTIBANDING_50HZ }, // Germany
+ { 266, CAMERA_ANTIBANDING_50HZ }, // Gibraltar
+ { 268, CAMERA_ANTIBANDING_50HZ }, // Portugal
+ { 270, CAMERA_ANTIBANDING_50HZ }, // Luxembourg
+ { 272, CAMERA_ANTIBANDING_50HZ }, // Ireland
+ { 274, CAMERA_ANTIBANDING_50HZ }, // Iceland
+ { 276, CAMERA_ANTIBANDING_50HZ }, // Albania
+ { 278, CAMERA_ANTIBANDING_50HZ }, // Malta
+ { 280, CAMERA_ANTIBANDING_50HZ }, // Cyprus
+ { 282, CAMERA_ANTIBANDING_50HZ }, // Georgia
+ { 283, CAMERA_ANTIBANDING_50HZ }, // Armenia
+ { 284, CAMERA_ANTIBANDING_50HZ }, // Bulgaria
+ { 286, CAMERA_ANTIBANDING_50HZ }, // Turkey
+ { 288, CAMERA_ANTIBANDING_50HZ }, // Faroe Islands
+ { 290, CAMERA_ANTIBANDING_50HZ }, // Greenland
+ { 293, CAMERA_ANTIBANDING_50HZ }, // Slovenia
+ { 294, CAMERA_ANTIBANDING_50HZ }, // Macedonia
+ { 295, CAMERA_ANTIBANDING_50HZ }, // Liechtenstein
+ { 297, CAMERA_ANTIBANDING_50HZ }, // Montenegro
+ { 302, CAMERA_ANTIBANDING_60HZ }, // Canada
+ { 310, CAMERA_ANTIBANDING_60HZ }, // United States of America
+ { 311, CAMERA_ANTIBANDING_60HZ }, // United States of America
+ { 312, CAMERA_ANTIBANDING_60HZ }, // United States of America
+ { 313, CAMERA_ANTIBANDING_60HZ }, // United States of America
+ { 314, CAMERA_ANTIBANDING_60HZ }, // United States of America
+ { 315, CAMERA_ANTIBANDING_60HZ }, // United States of America
+ { 316, CAMERA_ANTIBANDING_60HZ }, // United States of America
+ { 330, CAMERA_ANTIBANDING_60HZ }, // Puerto Rico
+ { 334, CAMERA_ANTIBANDING_60HZ }, // Mexico
+ { 338, CAMERA_ANTIBANDING_50HZ }, // Jamaica
+ { 340, CAMERA_ANTIBANDING_50HZ }, // Martinique
+ { 342, CAMERA_ANTIBANDING_50HZ }, // Barbados
+ { 346, CAMERA_ANTIBANDING_60HZ }, // Cayman Islands
+ { 350, CAMERA_ANTIBANDING_60HZ }, // Bermuda
+ { 352, CAMERA_ANTIBANDING_50HZ }, // Grenada
+ { 354, CAMERA_ANTIBANDING_60HZ }, // Montserrat
+ { 362, CAMERA_ANTIBANDING_50HZ }, // Netherlands Antilles
+ { 363, CAMERA_ANTIBANDING_60HZ }, // Aruba
+ { 364, CAMERA_ANTIBANDING_60HZ }, // Bahamas
+ { 365, CAMERA_ANTIBANDING_60HZ }, // Anguilla
+ { 366, CAMERA_ANTIBANDING_50HZ }, // Dominica
+ { 368, CAMERA_ANTIBANDING_60HZ }, // Cuba
+ { 370, CAMERA_ANTIBANDING_60HZ }, // Dominican Republic
+ { 372, CAMERA_ANTIBANDING_60HZ }, // Haiti
+ { 401, CAMERA_ANTIBANDING_50HZ }, // Kazakhstan
+ { 402, CAMERA_ANTIBANDING_50HZ }, // Bhutan
+ { 404, CAMERA_ANTIBANDING_50HZ }, // India
+ { 405, CAMERA_ANTIBANDING_50HZ }, // India
+ { 410, CAMERA_ANTIBANDING_50HZ }, // Pakistan
+ { 413, CAMERA_ANTIBANDING_50HZ }, // Sri Lanka
+ { 414, CAMERA_ANTIBANDING_50HZ }, // Myanmar
+ { 415, CAMERA_ANTIBANDING_50HZ }, // Lebanon
+ { 416, CAMERA_ANTIBANDING_50HZ }, // Jordan
+ { 417, CAMERA_ANTIBANDING_50HZ }, // Syria
+ { 418, CAMERA_ANTIBANDING_50HZ }, // Iraq
+ { 419, CAMERA_ANTIBANDING_50HZ }, // Kuwait
+ { 420, CAMERA_ANTIBANDING_60HZ }, // Saudi Arabia
+ { 421, CAMERA_ANTIBANDING_50HZ }, // Yemen
+ { 422, CAMERA_ANTIBANDING_50HZ }, // Oman
+ { 424, CAMERA_ANTIBANDING_50HZ }, // United Arab Emirates
+ { 425, CAMERA_ANTIBANDING_50HZ }, // Israel
+ { 426, CAMERA_ANTIBANDING_50HZ }, // Bahrain
+ { 427, CAMERA_ANTIBANDING_50HZ }, // Qatar
+ { 428, CAMERA_ANTIBANDING_50HZ }, // Mongolia
+ { 429, CAMERA_ANTIBANDING_50HZ }, // Nepal
+ { 430, CAMERA_ANTIBANDING_50HZ }, // United Arab Emirates
+ { 431, CAMERA_ANTIBANDING_50HZ }, // United Arab Emirates
+ { 432, CAMERA_ANTIBANDING_50HZ }, // Iran
+ { 434, CAMERA_ANTIBANDING_50HZ }, // Uzbekistan
+ { 436, CAMERA_ANTIBANDING_50HZ }, // Tajikistan
+ { 437, CAMERA_ANTIBANDING_50HZ }, // Kyrgyz Rep
+ { 438, CAMERA_ANTIBANDING_50HZ }, // Turkmenistan
+ { 440, CAMERA_ANTIBANDING_60HZ }, // Japan
+ { 441, CAMERA_ANTIBANDING_60HZ }, // Japan
+ { 452, CAMERA_ANTIBANDING_50HZ }, // Vietnam
+ { 454, CAMERA_ANTIBANDING_50HZ }, // Hong Kong
+ { 455, CAMERA_ANTIBANDING_50HZ }, // Macao
+ { 456, CAMERA_ANTIBANDING_50HZ }, // Cambodia
+ { 457, CAMERA_ANTIBANDING_50HZ }, // Laos
+ { 460, CAMERA_ANTIBANDING_50HZ }, // China
+ { 466, CAMERA_ANTIBANDING_60HZ }, // Taiwan
+ { 470, CAMERA_ANTIBANDING_50HZ }, // Bangladesh
+ { 472, CAMERA_ANTIBANDING_50HZ }, // Maldives
+ { 502, CAMERA_ANTIBANDING_50HZ }, // Malaysia
+ { 505, CAMERA_ANTIBANDING_50HZ }, // Australia
+ { 510, CAMERA_ANTIBANDING_50HZ }, // Indonesia
+ { 514, CAMERA_ANTIBANDING_50HZ }, // East Timor
+ { 515, CAMERA_ANTIBANDING_60HZ }, // Philippines
+ { 520, CAMERA_ANTIBANDING_50HZ }, // Thailand
+ { 525, CAMERA_ANTIBANDING_50HZ }, // Singapore
+ { 530, CAMERA_ANTIBANDING_50HZ }, // New Zealand
+ { 535, CAMERA_ANTIBANDING_60HZ }, // Guam
+ { 536, CAMERA_ANTIBANDING_50HZ }, // Nauru
+ { 537, CAMERA_ANTIBANDING_50HZ }, // Papua New Guinea
+ { 539, CAMERA_ANTIBANDING_50HZ }, // Tonga
+ { 541, CAMERA_ANTIBANDING_50HZ }, // Vanuatu
+ { 542, CAMERA_ANTIBANDING_50HZ }, // Fiji
+ { 544, CAMERA_ANTIBANDING_60HZ }, // American Samoa
+ { 545, CAMERA_ANTIBANDING_50HZ }, // Kiribati
+ { 546, CAMERA_ANTIBANDING_50HZ }, // New Caledonia
+ { 548, CAMERA_ANTIBANDING_50HZ }, // Cook Islands
+ { 602, CAMERA_ANTIBANDING_50HZ }, // Egypt
+ { 603, CAMERA_ANTIBANDING_50HZ }, // Algeria
+ { 604, CAMERA_ANTIBANDING_50HZ }, // Morocco
+ { 605, CAMERA_ANTIBANDING_50HZ }, // Tunisia
+ { 606, CAMERA_ANTIBANDING_50HZ }, // Libya
+ { 607, CAMERA_ANTIBANDING_50HZ }, // Gambia
+ { 608, CAMERA_ANTIBANDING_50HZ }, // Senegal
+ { 609, CAMERA_ANTIBANDING_50HZ }, // Mauritania
+ { 610, CAMERA_ANTIBANDING_50HZ }, // Mali
+ { 611, CAMERA_ANTIBANDING_50HZ }, // Guinea
+ { 613, CAMERA_ANTIBANDING_50HZ }, // Burkina Faso
+ { 614, CAMERA_ANTIBANDING_50HZ }, // Niger
+ { 616, CAMERA_ANTIBANDING_50HZ }, // Benin
+ { 617, CAMERA_ANTIBANDING_50HZ }, // Mauritius
+ { 618, CAMERA_ANTIBANDING_50HZ }, // Liberia
+ { 619, CAMERA_ANTIBANDING_50HZ }, // Sierra Leone
+ { 620, CAMERA_ANTIBANDING_50HZ }, // Ghana
+ { 621, CAMERA_ANTIBANDING_50HZ }, // Nigeria
+ { 622, CAMERA_ANTIBANDING_50HZ }, // Chad
+ { 623, CAMERA_ANTIBANDING_50HZ }, // Central African Republic
+ { 624, CAMERA_ANTIBANDING_50HZ }, // Cameroon
+ { 625, CAMERA_ANTIBANDING_50HZ }, // Cape Verde
+ { 627, CAMERA_ANTIBANDING_50HZ }, // Equatorial Guinea
+ { 631, CAMERA_ANTIBANDING_50HZ }, // Angola
+ { 633, CAMERA_ANTIBANDING_50HZ }, // Seychelles
+ { 634, CAMERA_ANTIBANDING_50HZ }, // Sudan
+ { 636, CAMERA_ANTIBANDING_50HZ }, // Ethiopia
+ { 637, CAMERA_ANTIBANDING_50HZ }, // Somalia
+ { 638, CAMERA_ANTIBANDING_50HZ }, // Djibouti
+ { 639, CAMERA_ANTIBANDING_50HZ }, // Kenya
+ { 640, CAMERA_ANTIBANDING_50HZ }, // Tanzania
+ { 641, CAMERA_ANTIBANDING_50HZ }, // Uganda
+ { 642, CAMERA_ANTIBANDING_50HZ }, // Burundi
+ { 643, CAMERA_ANTIBANDING_50HZ }, // Mozambique
+ { 645, CAMERA_ANTIBANDING_50HZ }, // Zambia
+ { 646, CAMERA_ANTIBANDING_50HZ }, // Madagascar
+ { 647, CAMERA_ANTIBANDING_50HZ }, // France
+ { 648, CAMERA_ANTIBANDING_50HZ }, // Zimbabwe
+ { 649, CAMERA_ANTIBANDING_50HZ }, // Namibia
+ { 650, CAMERA_ANTIBANDING_50HZ }, // Malawi
+ { 651, CAMERA_ANTIBANDING_50HZ }, // Lesotho
+ { 652, CAMERA_ANTIBANDING_50HZ }, // Botswana
+ { 653, CAMERA_ANTIBANDING_50HZ }, // Swaziland
+ { 654, CAMERA_ANTIBANDING_50HZ }, // Comoros
+ { 655, CAMERA_ANTIBANDING_50HZ }, // South Africa
+ { 657, CAMERA_ANTIBANDING_50HZ }, // Eritrea
+ { 702, CAMERA_ANTIBANDING_60HZ }, // Belize
+ { 704, CAMERA_ANTIBANDING_60HZ }, // Guatemala
+ { 706, CAMERA_ANTIBANDING_60HZ }, // El Salvador
+ { 708, CAMERA_ANTIBANDING_60HZ }, // Honduras
+ { 710, CAMERA_ANTIBANDING_60HZ }, // Nicaragua
+ { 712, CAMERA_ANTIBANDING_60HZ }, // Costa Rica
+ { 714, CAMERA_ANTIBANDING_60HZ }, // Panama
+ { 722, CAMERA_ANTIBANDING_50HZ }, // Argentina
+ { 724, CAMERA_ANTIBANDING_60HZ }, // Brazil
+ { 730, CAMERA_ANTIBANDING_50HZ }, // Chile
+ { 732, CAMERA_ANTIBANDING_60HZ }, // Colombia
+ { 734, CAMERA_ANTIBANDING_60HZ }, // Venezuela
+ { 736, CAMERA_ANTIBANDING_50HZ }, // Bolivia
+ { 738, CAMERA_ANTIBANDING_60HZ }, // Guyana
+ { 740, CAMERA_ANTIBANDING_60HZ }, // Ecuador
+ { 742, CAMERA_ANTIBANDING_50HZ }, // French Guiana
+ { 744, CAMERA_ANTIBANDING_50HZ }, // Paraguay
+ { 746, CAMERA_ANTIBANDING_60HZ }, // Suriname
+ { 748, CAMERA_ANTIBANDING_50HZ }, // Uruguay
+ { 750, CAMERA_ANTIBANDING_50HZ }, // Falkland Islands
+};
+#define country_number (sizeof(country_numeric) / sizeof(country_map))
+/* Look up pre-sorted antibanding_type table by current MCC. */
+static camera_antibanding_type camera_get_location(void) {
+ char value[PROP_VALUE_MAX];
+ char country_value[PROP_VALUE_MAX];
+ uint32_t country_code;
+ memset(value, 0x00, sizeof(value));
+ memset(country_value, 0x00, sizeof(country_value));
+ if (!__system_property_get("gsm.operator.numeric", value)) {
+ return CAMERA_ANTIBANDING_60HZ;
+ }
+ memcpy(country_value, value, 3);
+ country_code = atoi(country_value);
+ ALOGD("value:%s, country value:%s, country code:%d\n",
+ value, country_value, country_code);
+ int left = 0;
+ int right = country_number - 1;
+ while (left <= right) {
+ int index = (left + right) >> 1;
+ if (country_numeric[index].country_code == country_code)
+ return country_numeric[index].type;
+ else if (country_numeric[index].country_code > country_code)
+ right = index - 1;
+ else
+ left = index + 1;
+ }
+ return CAMERA_ANTIBANDING_60HZ;
+}
+#endif
+
+static const str_map scenemode[] = {
+ { QCameraParameters::SCENE_MODE_AUTO, CAMERA_BESTSHOT_OFF },
+ { QCameraParameters::SCENE_MODE_ASD, CAMERA_BESTSHOT_AUTO },
+ { QCameraParameters::SCENE_MODE_ACTION, CAMERA_BESTSHOT_ACTION },
+ { QCameraParameters::SCENE_MODE_PORTRAIT, CAMERA_BESTSHOT_PORTRAIT },
+ { QCameraParameters::SCENE_MODE_LANDSCAPE, CAMERA_BESTSHOT_LANDSCAPE },
+ { QCameraParameters::SCENE_MODE_NIGHT, CAMERA_BESTSHOT_NIGHT },
+ { QCameraParameters::SCENE_MODE_NIGHT_PORTRAIT, CAMERA_BESTSHOT_NIGHT_PORTRAIT },
+ { QCameraParameters::SCENE_MODE_THEATRE, CAMERA_BESTSHOT_THEATRE },
+ { QCameraParameters::SCENE_MODE_BEACH, CAMERA_BESTSHOT_BEACH },
+ { QCameraParameters::SCENE_MODE_SNOW, CAMERA_BESTSHOT_SNOW },
+ { QCameraParameters::SCENE_MODE_SUNSET, CAMERA_BESTSHOT_SUNSET },
+ { QCameraParameters::SCENE_MODE_STEADYPHOTO, CAMERA_BESTSHOT_ANTISHAKE },
+ { QCameraParameters::SCENE_MODE_FIREWORKS , CAMERA_BESTSHOT_FIREWORKS },
+ { QCameraParameters::SCENE_MODE_SPORTS , CAMERA_BESTSHOT_SPORTS },
+ { QCameraParameters::SCENE_MODE_PARTY, CAMERA_BESTSHOT_PARTY },
+ { QCameraParameters::SCENE_MODE_CANDLELIGHT, CAMERA_BESTSHOT_CANDLELIGHT },
+ { QCameraParameters::SCENE_MODE_BACKLIGHT, CAMERA_BESTSHOT_BACKLIGHT },
+ { QCameraParameters::SCENE_MODE_FLOWERS, CAMERA_BESTSHOT_FLOWERS },
+ { QCameraParameters::SCENE_MODE_AR, CAMERA_BESTSHOT_AR },
+};
+
+static const str_map scenedetect[] = {
+ { QCameraParameters::SCENE_DETECT_OFF, FALSE },
+ { QCameraParameters::SCENE_DETECT_ON, TRUE },
+};
+
+// from camera.h, led_mode_t
+static const str_map flash[] = {
+ { QCameraParameters::FLASH_MODE_OFF, LED_MODE_OFF },
+ { QCameraParameters::FLASH_MODE_AUTO, LED_MODE_AUTO },
+ { QCameraParameters::FLASH_MODE_ON, LED_MODE_ON },
+ { QCameraParameters::FLASH_MODE_TORCH, LED_MODE_TORCH}
+};
+
+// from mm-camera/common/camera.h.
+static const str_map iso[] = {
+ { QCameraParameters::ISO_AUTO, CAMERA_ISO_AUTO},
+ { QCameraParameters::ISO_HJR, CAMERA_ISO_DEBLUR},
+ { QCameraParameters::ISO_100, CAMERA_ISO_100},
+ { QCameraParameters::ISO_200, CAMERA_ISO_200},
+ { QCameraParameters::ISO_400, CAMERA_ISO_400},
+ { QCameraParameters::ISO_800, CAMERA_ISO_800 },
+ { QCameraParameters::ISO_1600, CAMERA_ISO_1600 }
+};
+
+static const str_map iso_3D[] = {
+ { QCameraParameters::ISO_AUTO, CAMERA_ISO_AUTO},
+ { QCameraParameters::ISO_100, CAMERA_ISO_100},
+ { QCameraParameters::ISO_200, CAMERA_ISO_200},
+ { QCameraParameters::ISO_400, CAMERA_ISO_400},
+ { QCameraParameters::ISO_800, CAMERA_ISO_800 },
+ { QCameraParameters::ISO_1600, CAMERA_ISO_1600 }
+};
+
+
+#define DONT_CARE AF_MODE_MAX
+static const str_map focus_modes[] = {
+ { QCameraParameters::FOCUS_MODE_AUTO, AF_MODE_AUTO},
+ { QCameraParameters::FOCUS_MODE_INFINITY, DONT_CARE },
+ { QCameraParameters::FOCUS_MODE_NORMAL, AF_MODE_NORMAL },
+ { QCameraParameters::FOCUS_MODE_MACRO, AF_MODE_MACRO },
+ { QCameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE, AF_MODE_CAF },
+ { QCameraParameters::FOCUS_MODE_CONTINUOUS_VIDEO, DONT_CARE }
+};
+
+static const str_map lensshade[] = {
+ { QCameraParameters::LENSSHADE_ENABLE, TRUE },
+ { QCameraParameters::LENSSHADE_DISABLE, FALSE }
+};
+
+static const str_map hfr[] = {
+ { QCameraParameters::VIDEO_HFR_OFF, CAMERA_HFR_MODE_OFF },
+ { QCameraParameters::VIDEO_HFR_2X, CAMERA_HFR_MODE_60FPS },
+ { QCameraParameters::VIDEO_HFR_3X, CAMERA_HFR_MODE_90FPS },
+ { QCameraParameters::VIDEO_HFR_4X, CAMERA_HFR_MODE_120FPS },
+};
+
+static const str_map mce[] = {
+ { QCameraParameters::MCE_ENABLE, TRUE },
+ { QCameraParameters::MCE_DISABLE, FALSE }
+};
+
+static const str_map hdr[] = {
+ { QCameraParameters::HDR_ENABLE, TRUE },
+ { QCameraParameters::HDR_DISABLE, FALSE }
+};
+
+static const str_map histogram[] = {
+ { QCameraParameters::HISTOGRAM_ENABLE, TRUE },
+ { QCameraParameters::HISTOGRAM_DISABLE, FALSE }
+};
+
+static const str_map skinToneEnhancement[] = {
+ { QCameraParameters::SKIN_TONE_ENHANCEMENT_ENABLE, TRUE },
+ { QCameraParameters::SKIN_TONE_ENHANCEMENT_DISABLE, FALSE }
+};
+
+static const str_map denoise[] = {
+ { QCameraParameters::DENOISE_OFF, FALSE },
+ { QCameraParameters::DENOISE_ON, TRUE }
+};
+
+static const str_map selectable_zone_af[] = {
+ { QCameraParameters::SELECTABLE_ZONE_AF_AUTO, AUTO },
+ { QCameraParameters::SELECTABLE_ZONE_AF_SPOT_METERING, SPOT },
+ { QCameraParameters::SELECTABLE_ZONE_AF_CENTER_WEIGHTED, CENTER_WEIGHTED },
+ { QCameraParameters::SELECTABLE_ZONE_AF_FRAME_AVERAGE, AVERAGE }
+};
+
+static const str_map facedetection[] = {
+ { QCameraParameters::FACE_DETECTION_OFF, FALSE },
+ { QCameraParameters::FACE_DETECTION_ON, TRUE }
+};
+
+#define DONT_CARE_COORDINATE -1
+static const str_map touchafaec[] = {
+ { QCameraParameters::TOUCH_AF_AEC_OFF, FALSE },
+ { QCameraParameters::TOUCH_AF_AEC_ON, TRUE }
+};
+
+static const str_map redeye_reduction[] = {
+ { QCameraParameters::REDEYE_REDUCTION_ENABLE, TRUE },
+ { QCameraParameters::REDEYE_REDUCTION_DISABLE, FALSE }
+};
+
+static const str_map zsl_modes[] = {
+ { QCameraParameters::ZSL_OFF, FALSE },
+ { QCameraParameters::ZSL_ON, TRUE },
+};
+
+/*
+ * Values based on aec.c
+ */
+#define DONT_CARE_COORDINATE -1
+#define CAMERA_HISTOGRAM_ENABLE 1
+#define CAMERA_HISTOGRAM_DISABLE 0
+#define HISTOGRAM_STATS_SIZE 257
+
+/*
+ * Values based on aec.c
+ */
+#define EXPOSURE_COMPENSATION_MAXIMUM_NUMERATOR 12
+#define EXPOSURE_COMPENSATION_MINIMUM_NUMERATOR -12
+#define EXPOSURE_COMPENSATION_DEFAULT_NUMERATOR 0
+#define EXPOSURE_COMPENSATION_DENOMINATOR 6
+#define EXPOSURE_COMPENSATION_STEP ((float (1))/EXPOSURE_COMPENSATION_DENOMINATOR)
+
+static const str_map picture_formats[] = {
+ {QCameraParameters::PIXEL_FORMAT_JPEG, PICTURE_FORMAT_JPEG},
+ {QCameraParameters::PIXEL_FORMAT_RAW, PICTURE_FORMAT_RAW}
+};
+
+static const str_map recording_Hints[] = {
+ {"false", FALSE},
+ {"true", TRUE}
+};
+
+static const str_map picture_formats_zsl[] = {
+ {QCameraParameters::PIXEL_FORMAT_JPEG, PICTURE_FORMAT_JPEG}
+};
+
+static const str_map frame_rate_modes[] = {
+ {QCameraParameters::KEY_PREVIEW_FRAME_RATE_AUTO_MODE, FPS_MODE_AUTO},
+ {QCameraParameters::KEY_PREVIEW_FRAME_RATE_FIXED_MODE, FPS_MODE_FIXED}
+};
+
+static int mPreviewFormat;
+static const str_map preview_formats[] = {
+ {QCameraParameters::PIXEL_FORMAT_YUV420SP, CAMERA_YUV_420_NV21},
+ {QCameraParameters::PIXEL_FORMAT_YUV420SP_ADRENO, CAMERA_YUV_420_NV21_ADRENO},
+ {QCameraParameters::PIXEL_FORMAT_YUV420P, CAMERA_YUV_420_YV12}
+};
+static const str_map preview_formats1[] = {
+ {QCameraParameters::PIXEL_FORMAT_YUV420SP, CAMERA_YUV_420_NV21},
+ {QCameraParameters::PIXEL_FORMAT_YUV420P, CAMERA_YUV_420_YV12}
+};
+
+static const str_map app_preview_formats[] = {
+{QCameraParameters::PIXEL_FORMAT_YUV420SP, HAL_PIXEL_FORMAT_YCrCb_420_SP}, //nv21
+//{QCameraParameters::PIXEL_FORMAT_YUV420SP_ADRENO, HAL_PIXEL_FORMAT_YCrCb_420_SP_ADRENO}, //nv21_adreno
+{QCameraParameters::PIXEL_FORMAT_YUV420P, HAL_PIXEL_FORMAT_YV12}, //YV12
+};
+
+
+static bool parameter_string_initialized = false;
+static String8 preview_size_values;
+static String8 hfr_size_values;
+static String8 picture_size_values;
+static String8 fps_ranges_supported_values;
+static String8 jpeg_thumbnail_size_values;
+static String8 antibanding_values;
+static String8 effect_values;
+static String8 autoexposure_values;
+static String8 whitebalance_values;
+static String8 flash_values;
+static String8 focus_mode_values;
+static String8 iso_values;
+static String8 lensshade_values;
+static String8 mce_values;
+static String8 hdr_values;
+static String8 histogram_values;
+static String8 skinToneEnhancement_values;
+static String8 touchafaec_values;
+static String8 picture_format_values;
+static String8 scenemode_values;
+static String8 denoise_values;
+static String8 zoom_ratio_values;
+static String8 preview_frame_rate_values;
+static String8 frame_rate_mode_values;
+static String8 scenedetect_values;
+static String8 preview_format_values;
+static String8 selectable_zone_af_values;
+static String8 facedetection_values;
+static String8 hfr_values;
+static String8 redeye_reduction_values;
+static String8 zsl_values;
+
+mm_camera_notify mCamNotify;
+mm_camera_ops mCamOps;
+static mm_camera_buffer_t mEncodeOutputBuffer[MAX_SNAPSHOT_BUFFERS];
+static encode_params_t mImageEncodeParms;
+static capture_params_t mImageCaptureParms;
+static raw_capture_params_t mRawCaptureParms;
+static zsl_capture_params_t mZslCaptureParms;
+static zsl_params_t mZslParms;
+static yv12_format_parms_t myv12_params;
+
+static String8 create_sizes_str(const camera_size_type *sizes, int len) {
+ String8 str;
+ char buffer[32];
+
+ if (len > 0) {
+ sprintf(buffer, "%dx%d", sizes[0].width, sizes[0].height);
+ str.append(buffer);
+ }
+ for (int i = 1; i < len; i++) {
+ sprintf(buffer, ",%dx%d", sizes[i].width, sizes[i].height);
+ str.append(buffer);
+ }
+ return str;
+}
+
+static String8 create_fps_str(const android:: FPSRange* fps, int len) {
+ String8 str;
+ char buffer[32];
+
+ if (len > 0) {
+ sprintf(buffer, "(%d,%d)", fps[0].minFPS, fps[0].maxFPS);
+ str.append(buffer);
+ }
+ for (int i = 1; i < len; i++) {
+ sprintf(buffer, ",(%d,%d)", fps[i].minFPS, fps[i].maxFPS);
+ str.append(buffer);
+ }
+ return str;
+}
+
+static String8 create_values_str(const str_map *values, int len) {
+ String8 str;
+
+ if (len > 0) {
+ str.append(values[0].desc);
+ }
+ for (int i = 1; i < len; i++) {
+ str.append(",");
+ str.append(values[i].desc);
+ }
+ return str;
+}
+
+
+static String8 create_str(int16_t *arr, int length){
+ String8 str;
+ char buffer[32];
+
+ if(length > 0){
+ snprintf(buffer, sizeof(buffer), "%d", arr[0]);
+ str.append(buffer);
+ }
+
+ for (int i =1;i<length;i++){
+ snprintf(buffer, sizeof(buffer), ",%d",arr[i]);
+ str.append(buffer);
+ }
+ return str;
+}
+
+static String8 create_values_range_str(int min, int max){
+ String8 str;
+ char buffer[32];
+
+ if(min <= max){
+ snprintf(buffer, sizeof(buffer), "%d", min);
+ str.append(buffer);
+
+ for (int i = min + 1; i <= max; i++) {
+ snprintf(buffer, sizeof(buffer), ",%d", i);
+ str.append(buffer);
+ }
+ }
+ return str;
+}
+
+extern "C" {
+//------------------------------------------------------------------------
+// : 720p busyQ funcitons
+// --------------------------------------------------------------------
+static struct fifo_queue g_busy_frame_queue =
+ {0, 0, 0, PTHREAD_MUTEX_INITIALIZER, PTHREAD_COND_INITIALIZER, (char *)"video_busy_q"};
+};
+
+static void cam_frame_wait_video (void)
+{
+ ALOGV("cam_frame_wait_video E ");
+ if ((g_busy_frame_queue.num_of_frames) <=0){
+ pthread_cond_wait(&(g_busy_frame_queue.wait), &(g_busy_frame_queue.mut));
+ }
+ ALOGV("cam_frame_wait_video X");
+ return;
+}
+
+void cam_frame_flush_video (void)
+{
+ ALOGV("cam_frame_flush_video: in n = %d\n", g_busy_frame_queue.num_of_frames);
+ pthread_mutex_lock(&(g_busy_frame_queue.mut));
+
+ while (g_busy_frame_queue.front)
+ {
+ //dequeue from the busy queue
+ struct fifo_node *node = dequeue (&g_busy_frame_queue);
+ if(node)
+ free(node);
+
+ ALOGV("cam_frame_flush_video: node \n");
+ }
+ pthread_mutex_unlock(&(g_busy_frame_queue.mut));
+ ALOGV("cam_frame_flush_video: out n = %d\n", g_busy_frame_queue.num_of_frames);
+ return ;
+}
+
+static struct msm_frame * cam_frame_get_video()
+{
+ struct msm_frame *p = NULL;
+ ALOGV("cam_frame_get_video... in\n");
+ ALOGV("cam_frame_get_video... got lock\n");
+ if (g_busy_frame_queue.front)
+ {
+ //dequeue
+ struct fifo_node *node = dequeue (&g_busy_frame_queue);
+ if (node)
+ {
+ p = (struct msm_frame *)node->f;
+ free (node);
+ }
+ ALOGV("cam_frame_get_video... out = %x\n", p->buffer);
+ }
+ return p;
+}
+
+// Parse string like "(1, 2, 3, 4, ..., N)"
+// num is pointer to an allocated array of size N
+static int parseNDimVector_HAL(const char *str, int *num, int N, char delim = ',')
+{
+ char *start, *end;
+ if(num == NULL) {
+ ALOGE("Invalid output array (num == NULL)");
+ return -1;
+ }
+ //check if string starts and ends with parantheses
+ if(str[0] != '(' || str[strlen(str)-1] != ')') {
+ ALOGE("Invalid format of string %s, valid format is (n1, n2, n3, n4 ...)", str);
+ return -1;
+ }
+ start = (char*) str;
+ start++;
+ for(int i=0; i<N; i++) {
+ *(num+i) = (int) strtol(start, &end, 10);
+ if(*end != delim && i < N-1) {
+ ALOGE("Cannot find delimeter '%c' in string \"%s\". end = %c", delim, str, *end);
+ return -1;
+ }
+ start = end+1;
+ }
+ return 0;
+}
+static int countChar(const char *str , char ch )
+{
+ int noOfChar = 0;
+
+ for ( int i = 0; str[i] != '\0'; i++) {
+ if ( str[i] == ch )
+ noOfChar = noOfChar + 1;
+ }
+
+ return noOfChar;
+}
+int checkAreaParameters(const char *str)
+{
+ int areaValues[6];
+ int left, right, top, bottom, weight;
+
+ if(countChar(str, ',') > 4) {
+ ALOGE("%s: No of area parameters exceeding the expected number %s", __FUNCTION__, str);
+ return -1;
+ }
+
+ if(parseNDimVector_HAL(str, areaValues, 5) !=0) {
+ ALOGE("%s: Failed to parse the input string %s", __FUNCTION__, str);
+ return -1;
+ }
+
+ ALOGV("%s: Area values are %d,%d,%d,%d,%d", __FUNCTION__,
+ areaValues[0], areaValues[1], areaValues[2], areaValues[3], areaValues[4]);
+
+ left = areaValues[0];
+ top = areaValues[1];
+ right = areaValues[2];
+ bottom = areaValues[3];
+ weight = areaValues[4];
+
+ // left should >= -1000
+ if (!(left >= -1000))
+ return -1;
+ // top should >= -1000
+ if(!(top >= -1000))
+ return -1;
+ // right should <= 1000
+ if(!(right <= 1000))
+ return -1;
+ // bottom should <= 1000
+ if(!(bottom <= 1000))
+ return -1;
+ // weight should >= 1
+ // weight should <= 1000
+ if(!((1 <= weight) && (weight <= 1000)))
+ return -1;
+ // left should < right
+ if(!(left < right))
+ return -1;
+ // top should < bottom
+ if(!(top < bottom))
+ return -1;
+
+ return 0;
+}
+
+static void cam_frame_post_video (struct msm_frame *p)
+{
+ if (!p)
+ {
+ ALOGE("post video , buffer is null");
+ return;
+ }
+ ALOGV("cam_frame_post_video... in = %x\n", (unsigned int)(p->buffer));
+ pthread_mutex_lock(&(g_busy_frame_queue.mut));
+ ALOGV("post_video got lock. q count before enQ %d", g_busy_frame_queue.num_of_frames);
+ //enqueue to busy queue
+ struct fifo_node *node = (struct fifo_node *)malloc (sizeof (struct fifo_node));
+ if (node)
+ {
+ ALOGV(" post video , enqueing in busy queue");
+ node->f = p;
+ node->next = NULL;
+ enqueue (&g_busy_frame_queue, node);
+ ALOGV("post_video got lock. q count after enQ %d", g_busy_frame_queue.num_of_frames);
+ }
+ else
+ {
+ ALOGE("cam_frame_post_video error... out of memory\n");
+ }
+
+ pthread_mutex_unlock(&(g_busy_frame_queue.mut));
+ pthread_cond_signal(&(g_busy_frame_queue.wait));
+
+ ALOGV("cam_frame_post_video... out = %x\n", p->buffer);
+
+ return;
+}
+
+QualcommCameraHardware::FrameQueue::FrameQueue(){
+ mInitialized = false;
+}
+
+QualcommCameraHardware::FrameQueue::~FrameQueue(){
+ flush();
+}
+
+void QualcommCameraHardware::FrameQueue::init(){
+ Mutex::Autolock l(&mQueueLock);
+ mInitialized = true;
+ mQueueWait.signal();
+}
+
+void QualcommCameraHardware::FrameQueue::deinit(){
+ Mutex::Autolock l(&mQueueLock);
+ mInitialized = false;
+ mQueueWait.signal();
+}
+
+bool QualcommCameraHardware::FrameQueue::isInitialized(){
+ Mutex::Autolock l(&mQueueLock);
+ return mInitialized;
+}
+
+bool QualcommCameraHardware::FrameQueue::add(
+ struct msm_frame * element){
+ Mutex::Autolock l(&mQueueLock);
+ if(mInitialized == false)
+ return false;
+
+ mContainer.add(element);
+ mQueueWait.signal();
+ return true;
+}
+
+struct msm_frame * QualcommCameraHardware::FrameQueue::get(){
+
+ struct msm_frame *frame;
+ mQueueLock.lock();
+ while(mInitialized && mContainer.isEmpty()){
+ mQueueWait.wait(mQueueLock);
+ }
+
+ if(!mInitialized){
+ mQueueLock.unlock();
+ return NULL;
+ }
+
+ frame = mContainer.itemAt(0);
+ mContainer.removeAt(0);
+ mQueueLock.unlock();
+ return frame;
+}
+
+void QualcommCameraHardware::FrameQueue::flush(){
+ Mutex::Autolock l(&mQueueLock);
+ mContainer.clear();
+
+}
+
+
+void QualcommCameraHardware::storeTargetType(void) {
+ char mDeviceName[PROPERTY_VALUE_MAX];
+ property_get("ro.product.device",mDeviceName," ");
+ mCurrentTarget = TARGET_MAX;
+ for( int i = 0; i < TARGET_MAX ; i++) {
+ if( !strncmp(mDeviceName, targetList[i].targetStr, 7)) {
+ mCurrentTarget = targetList[i].targetEnum;
+ if(mCurrentTarget == TARGET_MSM7625) {
+ if(!strncmp(mDeviceName, "msm7625a" , 8))
+ mCurrentTarget = TARGET_MSM7625A;
+ }
+ if(mCurrentTarget == TARGET_MSM7627) {
+ if(!strncmp(mDeviceName, "msm7627a" , 8))
+ mCurrentTarget = TARGET_MSM7627A;
+ }
+ break;
+ }
+ }
+ ALOGV(" Storing the current target type as %d ", mCurrentTarget );
+ return;
+}
+
+void *openCamera(void *data) {
+ ALOGV(" openCamera : E");
+ mCameraOpen = false;
+
+ if (!libmmcamera) {
+ ALOGE("FATAL ERROR: could not dlopen liboemcamera.so: %s", dlerror());
+ return false;
+ }
+
+ *(void **)&LINK_mm_camera_init =
+ ::dlsym(libmmcamera, "mm_camera_init");
+
+ *(void **)&LINK_mm_camera_exec =
+ ::dlsym(libmmcamera, "mm_camera_exec");
+
+ *(void **)&LINK_mm_camera_deinit =
+ ::dlsym(libmmcamera, "mm_camera_deinit");
+
+
+ if (MM_CAMERA_SUCCESS != LINK_mm_camera_init(&mCfgControl, &mCamNotify, &mCamOps, 0)) {
+ ALOGE("startCamera: mm_camera_init failed:");
+ return false;
+ //pthread_exit((void*) ret_val);
+ }
+
+ uint8_t camera_id8 = (uint8_t)HAL_currentCameraId;
+ if (MM_CAMERA_SUCCESS != mCfgControl.mm_camera_set_parm(CAMERA_PARM_CAMERA_ID, &camera_id8)) {
+ ALOGE("setting camera id failed");
+ LINK_mm_camera_deinit();
+ return false;
+ //pthread_exit((void*) ret_val);
+ }
+
+ //camera_mode_t mode = (camera_mode_t)HAL_currentCameraMode;
+ camera_mode_t mode = CAMERA_MODE_2D;
+ if (MM_CAMERA_SUCCESS != mCfgControl.mm_camera_set_parm(CAMERA_PARM_MODE, &mode)) {
+ ALOGE("startCamera: CAMERA_PARM_MODE failed:");
+ LINK_mm_camera_deinit();
+ return false;
+ //pthread_exit((void*) ret_val);
+ }
+
+ if (MM_CAMERA_SUCCESS != LINK_mm_camera_exec()) {
+ ALOGE("startCamera: mm_camera_exec failed:");
+ return false;
+ //pthread_exit((void*) ret_val);
+ }
+ mCameraOpen = true;
+ ALOGV(" openCamera : X");
+ if (CAMERA_MODE_3D == mode) {
+ camera_3d_frame_t snapshotFrame;
+ snapshotFrame.frame_type = CAM_SNAPSHOT_FRAME;
+ if(MM_CAMERA_SUCCESS !=
+ mCfgControl.mm_camera_get_parm(CAMERA_PARM_3D_FRAME_FORMAT,
+ (void *)&snapshotFrame)){
+ ALOGE("%s: get 3D format failed", __func__);
+ LINK_mm_camera_deinit();
+ return false;
+ //pthread_exit((void*) ret_val);
+ }
+ QualcommCameraHardware* obj = QualcommCameraHardware::getInstance();
+ if (obj != 0) {
+ obj->mSnapshot3DFormat = snapshotFrame.format;
+ ALOGI("%s: 3d format snapshot %d", __func__, obj->mSnapshot3DFormat);
+ }
+ }
+
+ ALOGV("openCamera : X");
+// pthread_exit((void*) ret_val);
+
+ return NULL;
+}
+//-------------------------------------------------------------------------------------
+static Mutex singleton_lock;
+static bool singleton_releasing;
+static nsecs_t singleton_releasing_start_time;
+static const nsecs_t SINGLETON_RELEASING_WAIT_TIME = seconds_to_nanoseconds(5);
+static const nsecs_t SINGLETON_RELEASING_RECHECK_TIMEOUT = seconds_to_nanoseconds(1);
+static Condition singleton_wait;
+
+static void receive_camframe_callback(struct msm_frame *frame);
+static void receive_liveshot_callback(liveshot_status status, uint32_t jpeg_size);
+static void receive_camstats_callback(camstats_type stype, camera_preview_histogram_info* histinfo);
+static void receive_camframe_video_callback(struct msm_frame *frame); // 720p
+static int8_t receive_event_callback(mm_camera_event* event);
+static void receive_shutter_callback(common_crop_t *crop);
+static void receive_camframe_error_callback(camera_error_type err);
+static int fb_fd = -1;
+static int32_t mMaxZoom = 0;
+static bool zoomSupported = false;
+static int dstOffset = 0;
+
+static int16_t * zoomRatios;
+
+
+/* When using MDP zoom, double the preview buffers. The usage of these
+ * buffers is as follows:
+ * 1. As all the buffers comes under a single FD, and at initial registration,
+ * this FD will be passed to surface flinger, surface flinger can have access
+ * to all the buffers when needed.
+ * 2. Only "kPreviewBufferCount" buffers (SrcSet) will be registered with the
+ * camera driver to receive preview frames. The remaining buffers (DstSet),
+ * will be used at HAL and by surface flinger only when crop information
+ * is present in the frame.
+ * 3. When there is no crop information, there will be no call to MDP zoom,
+ * and the buffers in SrcSet will be passed to surface flinger to display.
+ * 4. With crop information present, MDP zoom will be called, and the final
+ * data will be placed in a buffer from DstSet, and this buffer will be given
+ * to surface flinger to display.
+ */
+#define NUM_MORE_BUFS 2
+
+QualcommCameraHardware::QualcommCameraHardware()
+ : mParameters(),
+ mCameraRunning(false),
+ mPreviewInitialized(false),
+ mPreviewThreadRunning(false),
+ mHFRThreadRunning(false),
+ mFrameThreadRunning(false),
+ mVideoThreadRunning(false),
+ mSnapshotThreadRunning(false),
+ mJpegThreadRunning(false),
+ mSmoothzoomThreadRunning(false),
+ mSmoothzoomThreadExit(false),
+ mInSnapshotMode(false),
+ mEncodePending(false),
+ mBuffersInitialized(false),
+ mSnapshotFormat(0),
+ mFirstFrame(true),
+ mReleasedRecordingFrame(false),
+ mPreviewFrameSize(0),
+ mRawSize(0),
+ mCbCrOffsetRaw(0),
+ mYOffset(0),
+ mAutoFocusThreadRunning(false),
+ mInitialized(false),
+ mBrightness(0),
+ mSkinToneEnhancement(0),
+ mHJR(0),
+ mInPreviewCallback(false),
+ //mUseOverlay(0),
+ mIs3DModeOn(0),
+ //mOverlay(0),
+ mMsgEnabled(0),
+ mNotifyCallback(0),
+ mDataCallback(0),
+ mDataCallbackTimestamp(0),
+ mCallbackCookie(0),
+ mDebugFps(0),
+ mSnapshotDone(0),
+ maxSnapshotWidth(0),
+ maxSnapshotHeight(0),
+ mHasAutoFocusSupport(0),
+ mDisEnabled(0),
+ mRotation(0),
+ mResetWindowCrop(false),
+ mThumbnailWidth(0),
+ mThumbnailHeight(0),
+ strTexturesOn(false),
+ mPictureWidth(0),
+ mPictureHeight(0),
+ mPostviewWidth(0),
+ mPostviewHeight(0),
+ mPreviewWindow(NULL),
+ mTotalPreviewBufferCount(0),
+ mZslFlashEnable(false),
+ mZslPanorama(false),
+ mSnapshotCancel(false),
+ mHFRMode(false),
+ mActualPictWidth(0),
+ mActualPictHeight(0),
+ mDenoiseValue(0),
+ mPreviewStopping(false),
+ mInHFRThread(false),
+ mPrevHeapDeallocRunning(false),
+ mHdrMode(false ),
+ mExpBracketMode(false),
+ mZslEnable(false),
+ mStoreMetaDataInFrame(0),
+ mRecordingState(0)
+{
+ ALOGI("QualcommCameraHardware constructor E");
+ mMMCameraDLRef = MMCameraDL::getInstance();
+ libmmcamera = mMMCameraDLRef->pointer();
+ char value[PROPERTY_VALUE_MAX];
+ mCameraOpen = false;
+ /*if(HAL_currentSnapshotMode == CAMERA_SNAPSHOT_ZSL) {
+ ALOGI("%s: this is ZSL mode", __FUNCTION__);
+ mZslEnable = true;
+ }*/
+
+ property_get("persist.camera.hal.multitouchaf", value, "0");
+ mMultiTouch = atoi(value);
+
+ storeTargetType();
+ for(int i=0; i< MAX_SNAPSHOT_BUFFERS; i++) {
+ mRawMapped[i] = NULL;
+ mJpegMapped[i] = NULL;
+ mThumbnailMapped[i] = NULL;
+ }
+ mRawSnapshotMapped = NULL;
+ mJpegCopyMapped = NULL;
+ for(int i=0; i< RECORD_BUFFERS; i++) {
+ mRecordMapped[i] = NULL;
+ }
+
+ for(int i=0; i<3; i++)
+ mStatsMapped[i] = NULL;
+
+ mJpegLiveSnapMapped = NULL;
+ if(HAL_currentCameraMode == CAMERA_SUPPORT_MODE_3D){
+ mIs3DModeOn = true;
+ }
+ /* TODO: Will remove this command line interface at end */
+ property_get("persist.camera.hal.3dmode", value, "0");
+ int mode = atoi(value);
+ if( mode == 1) {
+ mIs3DModeOn = true;
+ HAL_currentCameraMode = CAMERA_MODE_3D;
+ }
+
+ if( (pthread_create(&mDeviceOpenThread, NULL, openCamera, NULL)) != 0) {
+ ALOGE(" openCamera thread creation failed ");
+ }
+ memset(&mDimension, 0, sizeof(mDimension));
+ memset(&mCrop, 0, sizeof(mCrop));
+ memset(&zoomCropInfo, 0, sizeof(android_native_rect_t));
+ //storeTargetType();
+ property_get("persist.debug.sf.showfps", value, "0");
+ mDebugFps = atoi(value);
+ if( mCurrentTarget == TARGET_MSM7630 || mCurrentTarget == TARGET_MSM8660 ) {
+ kPreviewBufferCountActual = kPreviewBufferCount;
+ kRecordBufferCount = RECORD_BUFFERS;
+ recordframes = new msm_frame[kRecordBufferCount];
+ record_buffers_tracking_flag = new bool[kRecordBufferCount];
+ }
+ else {
+ kPreviewBufferCountActual = kPreviewBufferCount + NUM_MORE_BUFS;
+ if( mCurrentTarget == TARGET_QSD8250 ) {
+ kRecordBufferCount = RECORD_BUFFERS_8x50;
+ recordframes = new msm_frame[kRecordBufferCount];
+ record_buffers_tracking_flag = new bool[kRecordBufferCount];
+ }
+ }
+ mTotalPreviewBufferCount = kTotalPreviewBufferCount;
+ if((mCurrentTarget != TARGET_MSM7630 ) && (mCurrentTarget != TARGET_QSD8250)
+ && (mCurrentTarget != TARGET_MSM8660)) {
+ for (int i = 0; i < mTotalPreviewBufferCount; i++)
+ metadata_memory[i] = NULL;
+ }
+ else {
+ for (int i = 0; i < kRecordBufferCount; i++)
+ metadata_memory[i] = NULL;
+ }
+ switch(mCurrentTarget){
+ case TARGET_MSM7627:
+ case TARGET_MSM7627A:
+ jpegPadding = 0; // to be checked.
+ break;
+ case TARGET_QSD8250:
+ case TARGET_MSM7630:
+ case TARGET_MSM8660:
+ jpegPadding = 0;
+ break;
+ default:
+ jpegPadding = 0;
+ break;
+ }
+ // Initialize with default format values. The format values can be
+ // overriden when application requests.
+ mDimension.prev_format = CAMERA_YUV_420_NV21;
+ mPreviewFormat = CAMERA_YUV_420_NV21;
+ mDimension.enc_format = CAMERA_YUV_420_NV21;
+ if((mCurrentTarget == TARGET_MSM7630) || (mCurrentTarget == TARGET_MSM8660))
+ mDimension.enc_format = CAMERA_YUV_420_NV12;
+ mDimension.main_img_format = CAMERA_YUV_420_NV21;
+ mDimension.thumb_format = CAMERA_YUV_420_NV21;
+
+ if( (mCurrentTarget == TARGET_MSM7630) || (mCurrentTarget == TARGET_MSM8660) ){
+ /* DIS is disabled all the time in VPE support targets.
+ * No provision for the user to control this.
+ */
+ mDisEnabled = 0;
+ /* Get the DIS value from properties, to check whether
+ * DIS is disabled or not. If the property is not found
+ * default to DIS disabled.*/
+ property_get("persist.camera.hal.dis", value, "0");
+ mDisEnabled = atoi(value);
+ mVpeEnabled = 1;
+ }
+ if(mIs3DModeOn) {
+ mDisEnabled = 0;
+ }
+ ALOGV("constructor EX");
+}
+
+void QualcommCameraHardware::hasAutoFocusSupport(){
+ if( !mCamOps.mm_camera_is_supported(CAMERA_OPS_FOCUS)){
+ ALOGI("AutoFocus is not supported");
+ mHasAutoFocusSupport = false;
+ }else {
+ mHasAutoFocusSupport = true;
+ }
+ if(mZslEnable)
+ mHasAutoFocusSupport = false;
+}
+
+//filter Picture sizes based on max width and height
+void QualcommCameraHardware::filterPictureSizes(){
+ unsigned int i;
+ if(PICTURE_SIZE_COUNT <= 0)
+ return;
+ maxSnapshotWidth = picture_sizes[0].width;
+ maxSnapshotHeight = picture_sizes[0].height;
+ // Iterate through all the width and height to find the max value
+ for(i =0; i<PICTURE_SIZE_COUNT;i++){
+ if(((maxSnapshotWidth < picture_sizes[i].width) &&
+ (maxSnapshotHeight <= picture_sizes[i].height))){
+ maxSnapshotWidth = picture_sizes[i].width;
+ maxSnapshotHeight = picture_sizes[i].height;
+ }
+ }
+ if(mZslEnable){
+ // due to lack of PMEM we restrict to lower resolution
+ picture_sizes_ptr = zsl_picture_sizes;
+ supportedPictureSizesCount = 7;
+ }
+ else if(mIs3DModeOn){
+ // In 3D mode we only want 1080p picture size
+ picture_sizes_ptr = for_3D_picture_sizes;
+ supportedPictureSizesCount = 1;
+ }
+ else{
+ picture_sizes_ptr = picture_sizes;
+ supportedPictureSizesCount = PICTURE_SIZE_COUNT;
+ }
+}
+
+bool QualcommCameraHardware::supportsSceneDetection() {
+ unsigned int prop = 0;
+ for(prop=0; prop<sizeof(boardProperties)/sizeof(board_property); prop++) {
+ if((mCurrentTarget == boardProperties[prop].target)
+ && boardProperties[prop].hasSceneDetect == true) {
+ return true;
+ break;
+ }
+ }
+ return false;
+}
+
+bool QualcommCameraHardware::supportsSelectableZoneAf() {
+ unsigned int prop = 0;
+ for(prop=0; prop<sizeof(boardProperties)/sizeof(board_property); prop++) {
+ if((mCurrentTarget == boardProperties[prop].target)
+ && boardProperties[prop].hasSelectableZoneAf == true) {
+ return true;
+ break;
+ }
+ }
+ return false;
+}
+
+bool QualcommCameraHardware::supportsFaceDetection() {
+ unsigned int prop = 0;
+ for(prop=0; prop<sizeof(boardProperties)/sizeof(board_property); prop++) {
+ if((mCurrentTarget == boardProperties[prop].target)
+ && boardProperties[prop].hasFaceDetect == true) {
+ return true;
+ break;
+ }
+ }
+ return false;
+}
+
+void QualcommCameraHardware::initDefaultParameters()
+{
+ ALOGV("initDefaultParameters E");
+ mDimension.picture_width = DEFAULT_PICTURE_WIDTH;
+ mDimension.picture_height = DEFAULT_PICTURE_HEIGHT;
+ mDimension.ui_thumbnail_width =
+ thumbnail_sizes[DEFAULT_THUMBNAIL_SETTING].width;
+ mDimension.ui_thumbnail_height =
+ thumbnail_sizes[DEFAULT_THUMBNAIL_SETTING].height;
+ bool ret = native_set_parms(CAMERA_PARM_DIMENSION,
+ sizeof(cam_ctrl_dimension_t),(void *) &mDimension);
+ if(ret != true) {
+ ALOGE("CAMERA_PARM_DIMENSION failed!!!");
+ return;
+ }
+ hasAutoFocusSupport();
+ //Disable DIS for Web Camera
+ if( !mCfgControl.mm_camera_is_supported(CAMERA_PARM_VIDEO_DIS)){
+ ALOGI("DISABLE DIS");
+ mDisEnabled = 0;
+ }else {
+ ALOGI("Enable DIS");
+ }
+ // Initialize constant parameter strings. This will happen only once in the
+ // lifetime of the mediaserver process.
+ if (!parameter_string_initialized) {
+ if(mIs3DModeOn){
+ antibanding_values = create_values_str(
+ antibanding_3D, sizeof(antibanding_3D) / sizeof(str_map));
+ } else{
+ antibanding_values = create_values_str(
+ antibanding, sizeof(antibanding) / sizeof(str_map));
+ }
+ effect_values = create_values_str(
+ effects, sizeof(effects) / sizeof(str_map));
+ autoexposure_values = create_values_str(
+ autoexposure, sizeof(autoexposure) / sizeof(str_map));
+ whitebalance_values = create_values_str(
+ whitebalance, sizeof(whitebalance) / sizeof(str_map));
+ //filter picture sizes
+ filterPictureSizes();
+ picture_size_values = create_sizes_str(
+ picture_sizes_ptr, supportedPictureSizesCount);
+ preview_size_values = create_sizes_str(
+ preview_sizes, PREVIEW_SIZE_COUNT);
+ mParameters.set(QCameraParameters::KEY_SUPPORTED_PREVIEW_SIZES,
+ preview_size_values.string());
+
+ mParameters.set(QCameraParameters::KEY_SUPPORTED_VIDEO_SIZES,
+ preview_size_values.string());
+
+ mParameters.set(QCameraParameters::KEY_SUPPORTED_PICTURE_SIZES,
+ picture_size_values.string());
+ mParameters.set(QCameraParameters::KEY_VIDEO_SNAPSHOT_SUPPORTED,
+ "true");
+ mParameters.set(QCameraParameters::KEY_SUPPORTED_FOCUS_MODES,
+ QCameraParameters::FOCUS_MODE_INFINITY);
+ mParameters.set(QCameraParameters::KEY_FOCUS_MODE,
+ QCameraParameters::FOCUS_MODE_INFINITY);
+ mParameters.set(QCameraParameters::KEY_MAX_NUM_FOCUS_AREAS, "1");
+
+ mParameters.set(QCameraParameters::KEY_FOCUS_AREAS, FOCUS_AREA_INIT);
+ mParameters.set(QCameraParameters::KEY_METERING_AREAS, FOCUS_AREA_INIT);
+
+ if(!mIs3DModeOn){
+ hfr_size_values = create_sizes_str(
+ hfr_sizes, HFR_SIZE_COUNT);
+ }
+ fps_ranges_supported_values = create_fps_str(
+ FpsRangesSupported,FPS_RANGES_SUPPORTED_COUNT );
+ mParameters.set(
+ QCameraParameters::KEY_SUPPORTED_PREVIEW_FPS_RANGE,
+ fps_ranges_supported_values);
+ mParameters.setPreviewFpsRange(MINIMUM_FPS*1000,MAXIMUM_FPS*1000);
+
+ flash_values = create_values_str(
+ flash, sizeof(flash) / sizeof(str_map));
+ if(mHasAutoFocusSupport){
+ focus_mode_values = create_values_str(
+ focus_modes, sizeof(focus_modes) / sizeof(str_map));
+ }
+ if(mIs3DModeOn){
+ iso_values = create_values_str(
+ iso_3D,sizeof(iso_3D)/sizeof(str_map));
+ } else{
+ iso_values = create_values_str(
+ iso,sizeof(iso)/sizeof(str_map));
+ }
+ lensshade_values = create_values_str(
+ lensshade,sizeof(lensshade)/sizeof(str_map));
+ mce_values = create_values_str(
+ mce,sizeof(mce)/sizeof(str_map));
+ if(!mIs3DModeOn){
+ hfr_values = create_values_str(
+ hfr,sizeof(hfr)/sizeof(str_map));
+ }
+ if(mCurrentTarget == TARGET_MSM8660)
+ hdr_values = create_values_str(
+ hdr,sizeof(hdr)/sizeof(str_map));
+ //Currently Enabling Histogram for 8x60
+ if(mCurrentTarget == TARGET_MSM8660) {
+ histogram_values = create_values_str(
+ histogram,sizeof(histogram)/sizeof(str_map));
+ }
+ //Currently Enabling Skin Tone Enhancement for 8x60 and 7630
+ if((mCurrentTarget == TARGET_MSM8660)||(mCurrentTarget == TARGET_MSM7630)) {
+ skinToneEnhancement_values = create_values_str(
+ skinToneEnhancement,sizeof(skinToneEnhancement)/sizeof(str_map));
+ }
+ if(mHasAutoFocusSupport){
+ touchafaec_values = create_values_str(
+ touchafaec,sizeof(touchafaec)/sizeof(str_map));
+ }
+ zsl_values = create_values_str(
+ zsl_modes,sizeof(zsl_modes)/sizeof(str_map));
+
+ if(mZslEnable){
+ picture_format_values = create_values_str(
+ picture_formats_zsl, sizeof(picture_formats_zsl)/sizeof(str_map));
+ } else{
+ picture_format_values = create_values_str(
+ picture_formats, sizeof(picture_formats)/sizeof(str_map));
+ }
+ if(mCurrentTarget == TARGET_MSM8660 ||
+ (mCurrentTarget == TARGET_MSM7625A ||
+ mCurrentTarget == TARGET_MSM7627A)) {
+ denoise_values = create_values_str(
+ denoise, sizeof(denoise) / sizeof(str_map));
+ }
+ if(mCfgControl.mm_camera_query_parms(CAMERA_PARM_ZOOM_RATIO,
+ (void **)&zoomRatios, (uint32_t *) &mMaxZoom) == MM_CAMERA_SUCCESS) {
+ zoomSupported = true;
+ if( mMaxZoom >0) {
+ ALOGI("Maximum zoom value is %d", mMaxZoom);
+ if(zoomRatios != NULL) {
+ zoom_ratio_values = create_str(zoomRatios, mMaxZoom);
+ } else {
+ ALOGE("Failed to get zoomratios ..");
+ }
+ } else {
+ zoomSupported = false;
+ }
+ } else {
+ zoomSupported = false;
+ ALOGE("Failed to get maximum zoom value...setting max "
+ "zoom to zero");
+ mMaxZoom = 0;
+ }
+ preview_frame_rate_values = create_values_range_str(
+ MINIMUM_FPS, MAXIMUM_FPS);
+
+ scenemode_values = create_values_str(
+ scenemode, sizeof(scenemode) / sizeof(str_map));
+
+ if(supportsSceneDetection()) {
+ scenedetect_values = create_values_str(
+ scenedetect, sizeof(scenedetect) / sizeof(str_map));
+ }
+
+ if(mHasAutoFocusSupport && supportsSelectableZoneAf()){
+ selectable_zone_af_values = create_values_str(
+ selectable_zone_af, sizeof(selectable_zone_af) / sizeof(str_map));
+ }
+
+ if(mHasAutoFocusSupport && supportsFaceDetection()) {
+ facedetection_values = create_values_str(
+ facedetection, sizeof(facedetection) / sizeof(str_map));
+ }
+
+ redeye_reduction_values = create_values_str(
+ redeye_reduction, sizeof(redeye_reduction) / sizeof(str_map));
+
+ parameter_string_initialized = true;
+ }
+ //set video size
+ if(( mCurrentTarget == TARGET_MSM7630 ) || (mCurrentTarget == TARGET_QSD8250) || (mCurrentTarget == TARGET_MSM8660)) {
+ String8 vSize = create_sizes_str(preview_sizes, 1);
+ mParameters.set(QCameraParameters::KEY_VIDEO_SIZE, vSize.string());
+ }
+ if(mIs3DModeOn){
+ ALOGI("In initDefaultParameters - 3D mode on so set the default preview to 1280 x 720");
+ mParameters.setPreviewSize(DEFAULT_PREVIEW_WIDTH_3D, DEFAULT_PREVIEW_HEIGHT_3D);
+ mDimension.display_width = DEFAULT_PREVIEW_WIDTH_3D;
+ mDimension.display_height = DEFAULT_PREVIEW_HEIGHT_3D;
+ } else{
+ mParameters.setPreviewSize(DEFAULT_PREVIEW_WIDTH, DEFAULT_PREVIEW_HEIGHT);
+ mDimension.display_width = DEFAULT_PREVIEW_WIDTH;
+ mDimension.display_height = DEFAULT_PREVIEW_HEIGHT;
+ }
+ mParameters.setPreviewFrameRate(DEFAULT_FPS);
+ if( mCfgControl.mm_camera_is_supported(CAMERA_PARM_FPS)){
+ mParameters.set(
+ QCameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES,
+ preview_frame_rate_values.string());
+ } else {
+ mParameters.setPreviewFrameRate(DEFAULT_FIXED_FPS_VALUE);
+ mParameters.set(
+ QCameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES,
+ DEFAULT_FIXED_FPS_VALUE);
+ }
+ mParameters.setPreviewFrameRateMode("frame-rate-auto");
+ mParameters.setPreviewFormat("yuv420sp"); // informative
+ mParameters.set("overlay-format", HAL_PIXEL_FORMAT_YCbCr_420_SP);
+ if(mIs3DModeOn){
+ mParameters.setPictureSize(DEFAULT_PICTURE_WIDTH_3D, DEFAULT_PICTURE_HEIGHT_3D);
+ } else{
+ mParameters.setPictureSize(DEFAULT_PICTURE_WIDTH, DEFAULT_PICTURE_HEIGHT);
+ }
+ mParameters.setPictureFormat("jpeg"); // informative
+
+ mParameters.set(QCameraParameters::KEY_VIDEO_FRAME_FORMAT, "yuv420sp");
+
+ mParameters.set(QCameraParameters::KEY_JPEG_QUALITY, "85"); // max quality
+
+ mParameters.set("power-mode-supported", "false");
+
+ mParameters.set(QCameraParameters::KEY_JPEG_THUMBNAIL_WIDTH,
+ THUMBNAIL_WIDTH_STR); // informative
+ mParameters.set(QCameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT,
+ THUMBNAIL_HEIGHT_STR); // informative
+ mDimension.ui_thumbnail_width =
+ thumbnail_sizes[DEFAULT_THUMBNAIL_SETTING].width;
+ mDimension.ui_thumbnail_height =
+ thumbnail_sizes[DEFAULT_THUMBNAIL_SETTING].height;
+ mParameters.set(QCameraParameters::KEY_JPEG_THUMBNAIL_QUALITY, "90");
+
+ String8 valuesStr = create_sizes_str(jpeg_thumbnail_sizes, JPEG_THUMBNAIL_SIZE_COUNT);
+ mParameters.set(QCameraParameters::KEY_SUPPORTED_JPEG_THUMBNAIL_SIZES,
+ valuesStr.string());
+
+ // Define CAMERA_SMOOTH_ZOOM in Android.mk file , to enable smoothzoom
+#ifdef CAMERA_SMOOTH_ZOOM
+ mParameters.set(QCameraParameters::KEY_SMOOTH_ZOOM_SUPPORTED, "true");
+#endif
+
+ if(zoomSupported){
+ mParameters.set(QCameraParameters::KEY_ZOOM_SUPPORTED, "true");
+ ALOGI("max zoom is %d", mMaxZoom-1);
+ /* mMaxZoom value that the query interface returns is the size
+ * of zoom table. So the actual max zoom value will be one
+ * less than that value.
+ */
+ mParameters.set("max-zoom",mMaxZoom-1);
+ mParameters.set(QCameraParameters::KEY_ZOOM_RATIOS,
+ zoom_ratio_values);
+ } else {
+ mParameters.set(QCameraParameters::KEY_ZOOM_SUPPORTED, "false");
+ }
+ /* Enable zoom support for video application if VPE enabled */
+ if(zoomSupported && mVpeEnabled) {
+ mParameters.set("video-zoom-support", "true");
+ } else {
+ mParameters.set("video-zoom-support", "false");
+ }
+
+ mParameters.set(QCameraParameters::KEY_CAMERA_MODE,0);
+
+ mParameters.set(QCameraParameters::KEY_ANTIBANDING,
+ QCameraParameters::ANTIBANDING_OFF);
+ mParameters.set(QCameraParameters::KEY_EFFECT,
+ QCameraParameters::EFFECT_NONE);
+ mParameters.set(QCameraParameters::KEY_AUTO_EXPOSURE,
+ QCameraParameters::AUTO_EXPOSURE_FRAME_AVG);
+ mParameters.set(QCameraParameters::KEY_WHITE_BALANCE,
+ QCameraParameters::WHITE_BALANCE_AUTO);
+ if( (mCurrentTarget != TARGET_MSM7630)
+ && (mCurrentTarget != TARGET_QSD8250)
+ && (mCurrentTarget != TARGET_MSM8660)
+ && (mCurrentTarget != TARGET_MSM7627A)) {
+ mParameters.set(QCameraParameters::KEY_SUPPORTED_PREVIEW_FORMATS,
+ "yuv420sp");
+ } else if(mCurrentTarget == TARGET_MSM7627A || mCurrentTarget == TARGET_MSM7627) {
+ preview_format_values = create_values_str(
+ preview_formats1, sizeof(preview_formats1) / sizeof(str_map));
+ mParameters.set(QCameraParameters::KEY_SUPPORTED_PREVIEW_FORMATS,
+ preview_format_values.string());
+ } else {
+ preview_format_values = create_values_str(
+ preview_formats, sizeof(preview_formats) / sizeof(str_map));
+ mParameters.set(QCameraParameters::KEY_SUPPORTED_PREVIEW_FORMATS,
+ preview_format_values.string());
+ }
+
+ frame_rate_mode_values = create_values_str(
+ frame_rate_modes, sizeof(frame_rate_modes) / sizeof(str_map));
+ if( mCfgControl.mm_camera_is_supported(CAMERA_PARM_FPS_MODE)){
+ mParameters.set(QCameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATE_MODES,
+ frame_rate_mode_values.string());
+ }
+
+ mParameters.set(QCameraParameters::KEY_SUPPORTED_PREVIEW_SIZES,
+ preview_size_values.string());
+ mParameters.set(QCameraParameters::KEY_SUPPORTED_PICTURE_SIZES,
+ picture_size_values.string());
+ mParameters.set(QCameraParameters::KEY_SUPPORTED_ANTIBANDING,
+ antibanding_values);
+ mParameters.set(QCameraParameters::KEY_SUPPORTED_EFFECTS, effect_values);
+ mParameters.set(QCameraParameters::KEY_SUPPORTED_AUTO_EXPOSURE, autoexposure_values);
+ mParameters.set(QCameraParameters::KEY_SUPPORTED_WHITE_BALANCE,
+ whitebalance_values);
+
+ if(mHasAutoFocusSupport){
+ mParameters.set(QCameraParameters::KEY_SUPPORTED_FOCUS_MODES,
+ focus_mode_values);
+ mParameters.set(QCameraParameters::KEY_FOCUS_MODE,
+ QCameraParameters::FOCUS_MODE_AUTO);
+ } else {
+ mParameters.set(QCameraParameters::KEY_SUPPORTED_FOCUS_MODES,
+ QCameraParameters::FOCUS_MODE_INFINITY);
+ mParameters.set(QCameraParameters::KEY_FOCUS_MODE,
+ QCameraParameters::FOCUS_MODE_INFINITY);
+ }
+
+ mParameters.set(QCameraParameters::KEY_SUPPORTED_PICTURE_FORMATS,
+ picture_format_values);
+
+ if(mCfgControl.mm_camera_is_supported(CAMERA_PARM_LED_MODE)) {
+ mParameters.set(QCameraParameters::KEY_FLASH_MODE,
+ QCameraParameters::FLASH_MODE_OFF);
+ mParameters.set(QCameraParameters::KEY_SUPPORTED_FLASH_MODES,
+ flash_values);
+ }
+
+ mParameters.set(QCameraParameters::KEY_MAX_SHARPNESS,
+ CAMERA_MAX_SHARPNESS);
+ mParameters.set(QCameraParameters::KEY_MAX_CONTRAST,
+ CAMERA_MAX_CONTRAST);
+ mParameters.set(QCameraParameters::KEY_MAX_SATURATION,
+ CAMERA_MAX_SATURATION);
+
+ mParameters.set(
+ QCameraParameters::KEY_MAX_EXPOSURE_COMPENSATION,
+ EXPOSURE_COMPENSATION_MAXIMUM_NUMERATOR);
+ mParameters.set(
+ QCameraParameters::KEY_MIN_EXPOSURE_COMPENSATION,
+ EXPOSURE_COMPENSATION_MINIMUM_NUMERATOR);
+ mParameters.set(
+ QCameraParameters::KEY_EXPOSURE_COMPENSATION,
+ EXPOSURE_COMPENSATION_DEFAULT_NUMERATOR);
+ mParameters.setFloat(
+ QCameraParameters::KEY_EXPOSURE_COMPENSATION_STEP,
+ EXPOSURE_COMPENSATION_STEP);
+
+ mParameters.set("luma-adaptation", "3");
+ mParameters.set("skinToneEnhancement", "0");
+ mParameters.set("zoom-supported", "true");
+ mParameters.set("zoom", 0);
+ mParameters.set(QCameraParameters::KEY_PICTURE_FORMAT,
+ QCameraParameters::PIXEL_FORMAT_JPEG);
+
+ mParameters.set(QCameraParameters::KEY_SHARPNESS,
+ CAMERA_DEF_SHARPNESS);
+ mParameters.set(QCameraParameters::KEY_CONTRAST,
+ CAMERA_DEF_CONTRAST);
+ mParameters.set(QCameraParameters::KEY_SATURATION,
+ CAMERA_DEF_SATURATION);
+
+ mParameters.set(QCameraParameters::KEY_ISO_MODE,
+ QCameraParameters::ISO_AUTO);
+ mParameters.set(QCameraParameters::KEY_LENSSHADE,
+ QCameraParameters::LENSSHADE_ENABLE);
+ mParameters.set(QCameraParameters::KEY_SUPPORTED_ISO_MODES,
+ iso_values);
+ mParameters.set(QCameraParameters::KEY_SUPPORTED_LENSSHADE_MODES,
+ lensshade_values);
+ mParameters.set(QCameraParameters::KEY_MEMORY_COLOR_ENHANCEMENT,
+ QCameraParameters::MCE_ENABLE);
+ mParameters.set(QCameraParameters::KEY_SUPPORTED_MEM_COLOR_ENHANCE_MODES,
+ mce_values);
+ if(mCfgControl.mm_camera_is_supported(CAMERA_PARM_HFR) && !(mIs3DModeOn)) {
+ mParameters.set(QCameraParameters::KEY_VIDEO_HIGH_FRAME_RATE,
+ QCameraParameters::VIDEO_HFR_OFF);
+ mParameters.set(QCameraParameters::KEY_SUPPORTED_HFR_SIZES,
+ hfr_size_values.string());
+ mParameters.set(QCameraParameters::KEY_SUPPORTED_VIDEO_HIGH_FRAME_RATE_MODES,
+ hfr_values);
+ } else
+ mParameters.set(QCameraParameters::KEY_SUPPORTED_HFR_SIZES,"");
+
+ mParameters.set(QCameraParameters::KEY_HIGH_DYNAMIC_RANGE_IMAGING,
+ QCameraParameters::MCE_DISABLE);
+ mParameters.set(QCameraParameters::KEY_SUPPORTED_HDR_IMAGING_MODES,
+ hdr_values);
+ mParameters.set(QCameraParameters::KEY_HISTOGRAM,
+ QCameraParameters::HISTOGRAM_DISABLE);
+ mParameters.set(QCameraParameters::KEY_SUPPORTED_HISTOGRAM_MODES,
+ histogram_values);
+ mParameters.set(QCameraParameters::KEY_SKIN_TONE_ENHANCEMENT,
+ QCameraParameters::SKIN_TONE_ENHANCEMENT_DISABLE);
+ mParameters.set(QCameraParameters::KEY_SUPPORTED_SKIN_TONE_ENHANCEMENT_MODES,
+ skinToneEnhancement_values);
+ mParameters.set(QCameraParameters::KEY_SCENE_MODE,
+ QCameraParameters::SCENE_MODE_AUTO);
+ mParameters.set("strtextures", "OFF");
+
+ mParameters.set(QCameraParameters::KEY_SUPPORTED_SCENE_MODES,
+ scenemode_values);
+ mParameters.set(QCameraParameters::KEY_DENOISE,
+ QCameraParameters::DENOISE_OFF);
+ mParameters.set(QCameraParameters::KEY_SUPPORTED_DENOISE,
+ denoise_values);
+
+ //touch af/aec parameters
+ mParameters.set(QCameraParameters::KEY_TOUCH_AF_AEC,
+ QCameraParameters::TOUCH_AF_AEC_OFF);
+ mParameters.set(QCameraParameters::KEY_SUPPORTED_TOUCH_AF_AEC,
+ touchafaec_values);
+ mParameters.set("touchAfAec-dx","100");
+ mParameters.set("touchAfAec-dy","100");
+ mParameters.set(QCameraParameters::KEY_MAX_NUM_FOCUS_AREAS, "1");
+ mParameters.set(QCameraParameters::KEY_MAX_NUM_METERING_AREAS, "1");
+
+ mParameters.set(QCameraParameters::KEY_SCENE_DETECT,
+ QCameraParameters::SCENE_DETECT_OFF);
+ mParameters.set(QCameraParameters::KEY_SUPPORTED_SCENE_DETECT,
+ scenedetect_values);
+ mParameters.set(QCameraParameters::KEY_SELECTABLE_ZONE_AF,
+ QCameraParameters::SELECTABLE_ZONE_AF_AUTO);
+ mParameters.set(QCameraParameters::KEY_SUPPORTED_SELECTABLE_ZONE_AF,
+ selectable_zone_af_values);
+ mParameters.set(QCameraParameters::KEY_FACE_DETECTION,
+ QCameraParameters::FACE_DETECTION_OFF);
+ mParameters.set(QCameraParameters::KEY_SUPPORTED_FACE_DETECTION,
+ facedetection_values);
+ mParameters.set(QCameraParameters::KEY_REDEYE_REDUCTION,
+ QCameraParameters::REDEYE_REDUCTION_DISABLE);
+ mParameters.set(QCameraParameters::KEY_SUPPORTED_REDEYE_REDUCTION,
+ redeye_reduction_values);
+ mParameters.set(QCameraParameters::KEY_ZSL,
+ QCameraParameters::ZSL_OFF);
+ mParameters.set(QCameraParameters::KEY_SUPPORTED_ZSL_MODES,
+ zsl_values);
+
+ float focalLength = 0.0f;
+ float horizontalViewAngle = 0.0f;
+ float verticalViewAngle = 0.0f;
+
+ mCfgControl.mm_camera_get_parm(CAMERA_PARM_FOCAL_LENGTH,
+ (void *)&focalLength);
+ mParameters.setFloat(QCameraParameters::KEY_FOCAL_LENGTH,
+ focalLength);
+ mCfgControl.mm_camera_get_parm(CAMERA_PARM_HORIZONTAL_VIEW_ANGLE,
+ (void *)&horizontalViewAngle);
+ mParameters.setFloat(QCameraParameters::KEY_HORIZONTAL_VIEW_ANGLE,
+ horizontalViewAngle);
+ mCfgControl.mm_camera_get_parm(CAMERA_PARM_VERTICAL_VIEW_ANGLE,
+ (void *)&verticalViewAngle);
+ mParameters.setFloat(QCameraParameters::KEY_VERTICAL_VIEW_ANGLE,
+ verticalViewAngle);
+ numCapture = 1;
+ if(mZslEnable) {
+ int maxSnapshot = MAX_SNAPSHOT_BUFFERS - 2;
+ char value[5];
+ property_get("persist.camera.hal.capture", value, "1");
+ numCapture = atoi(value);
+ if(numCapture > maxSnapshot)
+ numCapture = maxSnapshot;
+ else if(numCapture < 1)
+ numCapture = 1;
+ mParameters.set("capture-burst-captures-values", maxSnapshot);
+ mParameters.set("capture-burst-interval-supported", "false");
+ }
+ mParameters.set("num-snaps-per-shutter", numCapture);
+ ALOGI("%s: setting num-snaps-per-shutter to %d", __FUNCTION__, numCapture);
+ if(mIs3DModeOn)
+ mParameters.set("3d-frame-format", "left-right");
+
+ switch(mCurrentTarget){
+ case TARGET_MSM7627:
+ case TARGET_QSD8250:
+ case TARGET_MSM7630:
+ mParameters.set(QCameraParameters::KEY_PREFERRED_PREVIEW_SIZE_FOR_VIDEO, "800x480");
+ break;
+ case TARGET_MSM7627A:
+ mParameters.set(QCameraParameters::KEY_PREFERRED_PREVIEW_SIZE_FOR_VIDEO, "864x480");
+ break;
+ case TARGET_MSM8660:
+ mParameters.set(QCameraParameters::KEY_PREFERRED_PREVIEW_SIZE_FOR_VIDEO, "1920x1088");
+ break;
+ default:
+ mParameters.set(QCameraParameters::KEY_PREFERRED_PREVIEW_SIZE_FOR_VIDEO, "640x480");
+ break;
+ }
+ if (setParameters(mParameters) != NO_ERROR) {
+ ALOGE("Failed to set default parameters?!");
+ }
+
+ /* Initialize the camframe_timeout_flag*/
+ Mutex::Autolock l(&mCamframeTimeoutLock);
+ camframe_timeout_flag = FALSE;
+ mPostviewHeap = NULL;
+ mDisplayHeap = NULL;
+ mLastPreviewFrameHeap = NULL;
+ mThumbnailHeap = NULL;
+
+ mInitialized = true;
+ strTexturesOn = false;
+
+ ALOGV("initDefaultParameters X");
+}
+
+
+#define ROUND_TO_PAGE(x) (((x)+0xfff)&~0xfff)
+
+bool QualcommCameraHardware::startCamera()
+{
+ ALOGV("startCamera E");
+ if( mCurrentTarget == TARGET_MAX ) {
+ ALOGE(" Unable to determine the target type. Camera will not work ");
+ return false;
+ }
+#if DLOPEN_LIBMMCAMERA
+
+ ALOGV("loading liboemcamera at %p", libmmcamera);
+ if (!libmmcamera) {
+ ALOGE("FATAL ERROR: could not dlopen liboemcamera.so: %s", dlerror());
+ return false;
+ }
+
+ *(void **)&LINK_cam_frame =
+ ::dlsym(libmmcamera, "cam_frame");
+ *(void **)&LINK_wait_cam_frame_thread_ready =
+ ::dlsym(libmmcamera, "wait_cam_frame_thread_ready");
+ *(void **)&LINK_cam_frame_set_exit_flag =
+ ::dlsym(libmmcamera, "cam_frame_set_exit_flag");
+ *(void **)&LINK_camframe_terminate =
+ ::dlsym(libmmcamera, "camframe_terminate");
+
+ *(void **)&LINK_jpeg_encoder_init =
+ ::dlsym(libmmcamera, "jpeg_encoder_init");
+
+ *(void **)&LINK_jpeg_encoder_encode =
+ ::dlsym(libmmcamera, "jpeg_encoder_encode");
+
+ *(void **)&LINK_jpeg_encoder_join =
+ ::dlsym(libmmcamera, "jpeg_encoder_join");
+
+ mCamNotify.preview_frame_cb = &receive_camframe_callback;
+
+ mCamNotify.camstats_cb = &receive_camstats_callback;
+
+ mCamNotify.on_event = &receive_event_callback;
+
+ mCamNotify.on_error_event = &receive_camframe_error_callback;
+
+ // 720 p new recording functions
+ mCamNotify.video_frame_cb = &receive_camframe_video_callback;
+ // 720 p new recording functions
+
+ *(void **)&LINK_camframe_add_frame = ::dlsym(libmmcamera, "camframe_add_frame");
+
+ *(void **)&LINK_camframe_release_all_frames = ::dlsym(libmmcamera, "camframe_release_all_frames");
+
+ *(void **)&LINK_mmcamera_shutter_callback =
+ ::dlsym(libmmcamera, "mmcamera_shutter_callback");
+
+ *LINK_mmcamera_shutter_callback = receive_shutter_callback;
+
+ *(void**)&LINK_jpeg_encoder_setMainImageQuality =
+ ::dlsym(libmmcamera, "jpeg_encoder_setMainImageQuality");
+
+ *(void**)&LINK_jpeg_encoder_setThumbnailQuality =
+ ::dlsym(libmmcamera, "jpeg_encoder_setThumbnailQuality");
+
+ *(void**)&LINK_jpeg_encoder_setRotation =
+ ::dlsym(libmmcamera, "jpeg_encoder_setRotation");
+
+ *(void**)&LINK_jpeg_encoder_get_buffer_offset =
+ ::dlsym(libmmcamera, "jpeg_encoder_get_buffer_offset");
+
+ *(void**)&LINK_jpeg_encoder_set_3D_info =
+ ::dlsym(libmmcamera, "jpeg_encoder_set_3D_info");
+
+/* Disabling until support is available.
+ *(void**)&LINK_jpeg_encoder_setLocation =
+ ::dlsym(libmmcamera, "jpeg_encoder_setLocation");
+*/
+ *(void **)&LINK_cam_conf =
+ ::dlsym(libmmcamera, "cam_conf");
+
+/* Disabling until support is available.
+ *(void **)&LINK_default_sensor_get_snapshot_sizes =
+ ::dlsym(libmmcamera, "default_sensor_get_snapshot_sizes");
+*/
+ *(void **)&LINK_launch_cam_conf_thread =
+ ::dlsym(libmmcamera, "launch_cam_conf_thread");
+
+ *(void **)&LINK_release_cam_conf_thread =
+ ::dlsym(libmmcamera, "release_cam_conf_thread");
+
+ mCamNotify.on_liveshot_event = &receive_liveshot_callback;
+
+ *(void **)&LINK_cancel_liveshot =
+ ::dlsym(libmmcamera, "cancel_liveshot");
+
+ *(void **)&LINK_set_liveshot_params =
+ ::dlsym(libmmcamera, "set_liveshot_params");
+
+ *(void **)&LINK_set_liveshot_frame =
+ ::dlsym(libmmcamera, "set_liveshot_frame");
+
+ *(void **)&LINK_mm_camera_destroy =
+ ::dlsym(libmmcamera, "mm_camera_destroy");
+
+ *(void **)&LINK_yuv_convert_ycrcb420sp_to_yv12_inplace =
+ ::dlsym(libmmcamera, "yuv_convert_ycrcb420sp_to_yv12");
+
+ *(void **)&LINK_yuv_convert_ycrcb420sp_to_yv12 =
+ ::dlsym(libmmcamera, "yuv_convert_ycrcb420sp_to_yv12_ver2");
+
+ /* Disabling until support is available.*/
+ *(void **)&LINK_zoom_crop_upscale =
+ ::dlsym(libmmcamera, "zoom_crop_upscale");
+
+
+#else
+ mCamNotify.preview_frame_cb = &receive_camframe_callback;
+ mCamNotify.camstats_cb = &receive_camstats_callback;
+ mCamNotify.on_event = &receive_event_callback;
+
+ mmcamera_shutter_callback = receive_shutter_callback;
+ mCamNotify.on_liveshot_event = &receive_liveshot_callback;
+ mCamNotify.video_frame_cb = &receive_camframe_video_callback;
+
+#endif // DLOPEN_LIBMMCAMERA
+#if 0 //commenting this for now as not getting graphics permission
+ if((mCurrentTarget != TARGET_MSM7630) && (mCurrentTarget != TARGET_MSM8660)){
+ fb_fd = open("/dev/graphics/fb0", O_RDWR);
+ if (fb_fd < 0) {
+ ALOGE("startCamera: fb0 open failed: %s!", strerror(errno));
+ return FALSE;
+ }
+ }
+#endif
+ int ret_val;
+ if (pthread_join(mDeviceOpenThread, (void**)&ret_val) != 0) {
+ ALOGE("openCamera thread exit failed");
+ return false;
+ }
+
+ if (!mCameraOpen) {
+ ALOGE("openCamera() failed");
+ return false;
+ }
+
+
+ mCfgControl.mm_camera_query_parms(CAMERA_PARM_PICT_SIZE, (void **)&picture_sizes, &PICTURE_SIZE_COUNT);
+ if ((picture_sizes == NULL) || (!PICTURE_SIZE_COUNT)) {
+ ALOGE("startCamera X: could not get snapshot sizes");
+ return false;
+ }
+ ALOGI("startCamera picture_sizes %p PICTURE_SIZE_COUNT %d", picture_sizes, PICTURE_SIZE_COUNT);
+ mCfgControl.mm_camera_query_parms(CAMERA_PARM_PREVIEW_SIZE, (void **)&preview_sizes, &PREVIEW_SIZE_COUNT);
+ if ((preview_sizes == NULL) || (!PREVIEW_SIZE_COUNT)) {
+ ALOGE("startCamera X: could not get preview sizes");
+ return false;
+ }
+ ALOGI("startCamera preview_sizes %p previewSizeCount %d", preview_sizes, PREVIEW_SIZE_COUNT);
+
+ mCfgControl.mm_camera_query_parms(CAMERA_PARM_HFR_SIZE, (void **)&hfr_sizes, &HFR_SIZE_COUNT);
+ if ((hfr_sizes == NULL) || (!HFR_SIZE_COUNT)) {
+ ALOGE("startCamera X: could not get hfr sizes");
+ return false;
+ }
+ ALOGI("startCamera hfr_sizes %p hfrSizeCount %d", hfr_sizes, HFR_SIZE_COUNT);
+
+
+ ALOGV("startCamera X");
+ return true;
+}
+
+status_t QualcommCameraHardware::dump(int fd,
+ const Vector<String16>& args) const
+{
+ const size_t SIZE = 256;
+ char buffer[SIZE];
+ String8 result;
+#if 0
+ // Dump internal primitives.
+ result.append("QualcommCameraHardware::dump");
+ snprintf(buffer, 255, "mMsgEnabled (%d)\n", mMsgEnabled);
+ result.append(buffer);
+ int width, height;
+ mParameters.getPreviewSize(&width, &height);
+ snprintf(buffer, 255, "preview width(%d) x height (%d)\n", width, height);
+ result.append(buffer);
+ mParameters.getPictureSize(&width, &height);
+ snprintf(buffer, 255, "raw width(%d) x height (%d)\n", width, height);
+ result.append(buffer);
+ snprintf(buffer, 255,
+ "preview frame size(%d), raw size (%d), jpeg size (%d) "
+ "and jpeg max size (%d)\n", mPreviewFrameSize, mRawSize,
+ mJpegSize, mJpegMaxSize);
+ result.append(buffer);
+ write(fd, result.string(), result.size());
+
+ // Dump internal objects.
+ if (mPreviewHeap[0] != 0) {
+ mPreviewHeap[0]->dump(fd, args);
+ }
+ if (mRawHeap != 0) {
+ mRawHeap->dump(fd, args);
+ }
+ if (mJpegHeap != 0) {
+ mJpegHeap->dump(fd, args);
+ }
+ mParameters.dump(fd, args);
+#endif
+ return NO_ERROR;
+}
+
+/* Issue ioctl calls related to starting Camera Operations*/
+bool static native_start_ops(mm_camera_ops_type_t type, void* value)
+{
+ if(mCamOps.mm_camera_start(type, value,NULL) != MM_CAMERA_SUCCESS) {
+ ALOGE("native_start_ops: type %d error %s",
+ type,strerror(errno));
+ return false;
+ }
+ return true;
+}
+
+/* Issue ioctl calls related to stopping Camera Operations*/
+bool static native_stop_ops(mm_camera_ops_type_t type, void* value)
+{
+ if(mCamOps.mm_camera_stop(type, value,NULL) != MM_CAMERA_SUCCESS) {
+ ALOGE("native_stop_ops: type %d error %s",
+ type,strerror(errno));
+ return false;
+ }
+ return true;
+}
+/*==========================================================================*/
+
+
+#define GPS_PROCESSING_METHOD_SIZE 101
+#define FOCAL_LENGTH_DECIMAL_PRECISON 100
+
+static const char ExifAsciiPrefix[] = { 0x41, 0x53, 0x43, 0x49, 0x49, 0x0, 0x0, 0x0 };
+#define EXIF_ASCII_PREFIX_SIZE (sizeof(ExifAsciiPrefix))
+
+static rat_t latitude[3];
+static rat_t longitude[3];
+static char lonref[2];
+static char latref[2];
+static rat_t altitude;
+static rat_t gpsTimestamp[3];
+static char gpsDatestamp[20];
+static char dateTime[20];
+static rat_t focalLength;
+static uint16_t flashMode;
+static int iso_arr[] = {0,1,100,200,400,800,1600};
+static uint16_t isoMode;
+static char gpsProcessingMethod[EXIF_ASCII_PREFIX_SIZE + GPS_PROCESSING_METHOD_SIZE];
+static void addExifTag(exif_tag_id_t tagid, exif_tag_type_t type,
+ uint32_t count, uint8_t copy, void *data) {
+
+ if(exif_table_numEntries == MAX_EXIF_TABLE_ENTRIES) {
+ ALOGE("Number of entries exceeded limit");
+ return;
+ }
+
+ int index = exif_table_numEntries;
+ exif_data[index].tag_id = tagid;
+ exif_data[index].tag_entry.type = type;
+ exif_data[index].tag_entry.count = count;
+ exif_data[index].tag_entry.copy = copy;
+ if((type == EXIF_RATIONAL) && (count > 1))
+ exif_data[index].tag_entry.data._rats = (rat_t *)data;
+ if((type == EXIF_RATIONAL) && (count == 1))
+ exif_data[index].tag_entry.data._rat = *(rat_t *)data;
+ else if(type == EXIF_ASCII)
+ exif_data[index].tag_entry.data._ascii = (char *)data;
+ else if(type == EXIF_BYTE)
+ exif_data[index].tag_entry.data._byte = *(uint8_t *)data;
+ else if((type == EXIF_SHORT) && (count > 1))
+ exif_data[index].tag_entry.data._shorts = (uint16_t *)data;
+ else if((type == EXIF_SHORT) && (count == 1))
+ exif_data[index].tag_entry.data._short = *(uint16_t *)data;
+ // Increase number of entries
+ exif_table_numEntries++;
+}
+
+static void parseLatLong(const char *latlonString, int *pDegrees,
+ int *pMinutes, int *pSeconds ) {
+
+ double value = atof(latlonString);
+ value = fabs(value);
+ int degrees = (int) value;
+
+ double remainder = value - degrees;
+ int minutes = (int) (remainder * 60);
+ int seconds = (int) (((remainder * 60) - minutes) * 60 * 1000);
+
+ *pDegrees = degrees;
+ *pMinutes = minutes;
+ *pSeconds = seconds;
+}
+
+static void setLatLon(exif_tag_id_t tag, const char *latlonString) {
+
+ int degrees, minutes, seconds;
+
+ parseLatLong(latlonString, °rees, &minutes, &seconds);
+
+ rat_t value[3] = { {degrees, 1},
+ {minutes, 1},
+ {seconds, 1000} };
+
+ if(tag == EXIFTAGID_GPS_LATITUDE) {
+ memcpy(latitude, value, sizeof(latitude));
+ addExifTag(EXIFTAGID_GPS_LATITUDE, EXIF_RATIONAL, 3,
+ 1, (void *)latitude);
+ } else {
+ memcpy(longitude, value, sizeof(longitude));
+ addExifTag(EXIFTAGID_GPS_LONGITUDE, EXIF_RATIONAL, 3,
+ 1, (void *)longitude);
+ }
+}
+
+void QualcommCameraHardware::setGpsParameters() {
+ const char *str = NULL;
+
+ str = mParameters.get(QCameraParameters::KEY_GPS_PROCESSING_METHOD);
+
+ if(str!=NULL ){
+ memcpy(gpsProcessingMethod, ExifAsciiPrefix, EXIF_ASCII_PREFIX_SIZE);
+ strncpy(gpsProcessingMethod + EXIF_ASCII_PREFIX_SIZE, str,
+ GPS_PROCESSING_METHOD_SIZE - 1);
+ gpsProcessingMethod[EXIF_ASCII_PREFIX_SIZE + GPS_PROCESSING_METHOD_SIZE-1] = '\0';
+ addExifTag(EXIFTAGID_GPS_PROCESSINGMETHOD, EXIF_ASCII,
+ EXIF_ASCII_PREFIX_SIZE + strlen(gpsProcessingMethod + EXIF_ASCII_PREFIX_SIZE) + 1,
+ 1, (void *)gpsProcessingMethod);
+ }
+
+ str = NULL;
+
+ //Set Latitude
+ str = mParameters.get(QCameraParameters::KEY_GPS_LATITUDE);
+ if(str != NULL) {
+ setLatLon(EXIFTAGID_GPS_LATITUDE, str);
+ //set Latitude Ref
+ float latitudeValue = mParameters.getFloat(QCameraParameters::KEY_GPS_LATITUDE);
+ latref[0] = 'N';
+ if(latitudeValue < 0 ){
+ latref[0] = 'S';
+ }
+ latref[1] = '\0';
+ mParameters.set(QCameraParameters::KEY_GPS_LATITUDE_REF, latref);
+ addExifTag(EXIFTAGID_GPS_LATITUDE_REF, EXIF_ASCII, 2,
+ 1, (void *)latref);
+ }
+
+ //set Longitude
+ str = NULL;
+ str = mParameters.get(QCameraParameters::KEY_GPS_LONGITUDE);
+ if(str != NULL) {
+ setLatLon(EXIFTAGID_GPS_LONGITUDE, str);
+ //set Longitude Ref
+ float longitudeValue = mParameters.getFloat(QCameraParameters::KEY_GPS_LONGITUDE);
+ lonref[0] = 'E';
+ if(longitudeValue < 0){
+ lonref[0] = 'W';
+ }
+ lonref[1] = '\0';
+ mParameters.set(QCameraParameters::KEY_GPS_LONGITUDE_REF, lonref);
+ addExifTag(EXIFTAGID_GPS_LONGITUDE_REF, EXIF_ASCII, 2,
+ 1, (void *)lonref);
+ }
+
+ //set Altitude
+ str = NULL;
+ str = mParameters.get(QCameraParameters::KEY_GPS_ALTITUDE);
+ if(str != NULL) {
+ double value = atof(str);
+ int ref = 0;
+ if(value < 0){
+ ref = 1;
+ value = -value;
+ }
+ uint32_t value_meter = value * 1000;
+ rat_t alt_value = {value_meter, 1000};
+ memcpy(&altitude, &alt_value, sizeof(altitude));
+ addExifTag(EXIFTAGID_GPS_ALTITUDE, EXIF_RATIONAL, 1,
+ 1, (void *)&altitude);
+ //set AltitudeRef
+ mParameters.set(QCameraParameters::KEY_GPS_ALTITUDE_REF, ref);
+ addExifTag(EXIFTAGID_GPS_ALTITUDE_REF, EXIF_BYTE, 1,
+ 1, (void *)&ref);
+ }
+
+ //set Gps TimeStamp
+ str = NULL;
+ str = mParameters.get(QCameraParameters::KEY_GPS_TIMESTAMP);
+ if(str != NULL) {
+
+ long value = atol(str);
+ time_t unixTime;
+ struct tm *UTCTimestamp;
+
+ unixTime = (time_t)value;
+ UTCTimestamp = gmtime(&unixTime);
+
+ strftime(gpsDatestamp, sizeof(gpsDatestamp), "%Y:%m:%d", UTCTimestamp);
+ addExifTag(EXIFTAGID_GPS_DATESTAMP, EXIF_ASCII,
+ strlen(gpsDatestamp)+1 , 1, (void *)&gpsDatestamp);
+
+ rat_t time_value[3] = { {UTCTimestamp->tm_hour, 1},
+ {UTCTimestamp->tm_min, 1},
+ {UTCTimestamp->tm_sec, 1} };
+
+
+ memcpy(&gpsTimestamp, &time_value, sizeof(gpsTimestamp));
+ addExifTag(EXIFTAGID_GPS_TIMESTAMP, EXIF_RATIONAL,
+ 3, 1, (void *)&gpsTimestamp);
+ }
+
+}
+
+
+bool QualcommCameraHardware::initZslParameter(void)
+ { ALOGV("%s: E", __FUNCTION__);
+ mParameters.getPictureSize(&mPictureWidth, &mPictureHeight);
+ ALOGI("initZslParamter E: picture size=%dx%d", mPictureWidth, mPictureHeight);
+ if (updatePictureDimension(mParameters, mPictureWidth, mPictureHeight)) {
+ mDimension.picture_width = mPictureWidth;
+ mDimension.picture_height = mPictureHeight;
+ }
+
+ /* use the default thumbnail sizes */
+ mZslParms.picture_width = mPictureWidth;
+ mZslParms.picture_height = mPictureHeight;
+ mZslParms.preview_width = mDimension.display_width;
+ mZslParms.preview_height = mDimension.display_height;
+ mZslParms.useExternalBuffers = TRUE;
+ /* fill main image size, thumbnail size, postview size into capture_params_t*/
+ memset(&mZslCaptureParms, 0, sizeof(zsl_capture_params_t));
+ mZslCaptureParms.thumbnail_height = mPostviewHeight;
+ mZslCaptureParms.thumbnail_width = mPostviewWidth;
+ ALOGI("Number of snapshot to capture: %d",numCapture);
+ mZslCaptureParms.num_captures = numCapture;
+
+ return true;
+ }
+
+
+bool QualcommCameraHardware::initImageEncodeParameters(int size)
+{
+ ALOGV("%s: E", __FUNCTION__);
+ memset(&mImageEncodeParms, 0, sizeof(encode_params_t));
+ int jpeg_quality = mParameters.getInt("jpeg-quality");
+ bool ret;
+ if (jpeg_quality >= 0) {
+ ALOGI("initJpegParameters, current jpeg main img quality =%d",
+ jpeg_quality);
+ //Application can pass quality of zero
+ //when there is no back sensor connected.
+ //as jpeg quality of zero is not accepted at
+ //camera stack, pass default value.
+ if(jpeg_quality == 0) jpeg_quality = 85;
+ mImageEncodeParms.quality = jpeg_quality;
+ ret = native_set_parms(CAMERA_PARM_JPEG_MAINIMG_QUALITY, sizeof(int), &jpeg_quality);
+ if(!ret){
+ ALOGE("initJpegParametersX: failed to set main image quality");
+ return false;
+ }
+ }
+
+ int thumbnail_quality = mParameters.getInt("jpeg-thumbnail-quality");
+ if (thumbnail_quality >= 0) {
+ //Application can pass quality of zero
+ //when there is no back sensor connected.
+ //as quality of zero is not accepted at
+ //camera stack, pass default value.
+ if(thumbnail_quality == 0) thumbnail_quality = 85;
+ ALOGI("initJpegParameters, current jpeg thumbnail quality =%d",
+ thumbnail_quality);
+ /* TODO: check with mm-camera? */
+ mImageEncodeParms.quality = thumbnail_quality;
+ ret = native_set_parms(CAMERA_PARM_JPEG_THUMB_QUALITY, sizeof(int), &thumbnail_quality);
+ if(!ret){
+ ALOGE("initJpegParameters X: failed to set thumbnail quality");
+ return false;
+ }
+ }
+
+ int rotation = mParameters.getInt("rotation");
+ char mDeviceName[PROPERTY_VALUE_MAX];
+ property_get("ro.hw_plat", mDeviceName, "");
+ if(!strcmp(mDeviceName,"7x25A"))
+ rotation = (rotation + 90)%360;
+
+ if (mIs3DModeOn)
+ rotation = 0;
+ if (rotation >= 0) {
+ ALOGI("initJpegParameters, rotation = %d", rotation);
+ mImageEncodeParms.rotation = rotation;
+ }
+
+ jpeg_set_location();
+
+ //set TimeStamp
+ const char *str = mParameters.get(QCameraParameters::KEY_EXIF_DATETIME);
+ if(str != NULL) {
+ strncpy(dateTime, str, 19);
+ dateTime[19] = '\0';
+ addExifTag(EXIFTAGID_EXIF_DATE_TIME_ORIGINAL, EXIF_ASCII,
+ 20, 1, (void *)dateTime);
+ }
+
+ int focalLengthValue = (int) (mParameters.getFloat(
+ QCameraParameters::KEY_FOCAL_LENGTH) * FOCAL_LENGTH_DECIMAL_PRECISON);
+ rat_t focalLengthRational = {focalLengthValue, FOCAL_LENGTH_DECIMAL_PRECISON};
+ memcpy(&focalLength, &focalLengthRational, sizeof(focalLengthRational));
+ addExifTag(EXIFTAGID_FOCAL_LENGTH, EXIF_RATIONAL, 1,
+ 1, (void *)&focalLength);
+ //Adding ExifTag for ISOSpeedRating
+ const char *iso_str = mParameters.get(QCameraParameters::KEY_ISO_MODE);
+ int iso_value = attr_lookup(iso, sizeof(iso) / sizeof(str_map), iso_str);
+ isoMode = iso_arr[iso_value];
+ addExifTag(EXIFTAGID_ISO_SPEED_RATING,EXIF_SHORT,1,1,(void *)&isoMode);
+
+ if (mUseJpegDownScaling) {
+ ALOGI("initImageEncodeParameters: update main image", __func__);
+ mImageEncodeParms.output_picture_width = mActualPictWidth;
+ mImageEncodeParms.output_picture_height = mActualPictHeight;
+ }
+ mImageEncodeParms.cbcr_offset = mCbCrOffsetRaw;
+ if(mPreviewFormat == CAMERA_YUV_420_NV21_ADRENO)
+ mImageEncodeParms.cbcr_offset = mCbCrOffsetRaw;
+ /* TODO: check this */
+ mImageEncodeParms.y_offset = 0;
+ for(int i = 0; i < size; i++){
+ memset(&mEncodeOutputBuffer[i], 0, sizeof(mm_camera_buffer_t));
+ mEncodeOutputBuffer[i].ptr = (uint8_t *)mJpegMapped[i]->data;
+ mEncodeOutputBuffer[i].filled_size = mJpegMaxSize;
+ mEncodeOutputBuffer[i].size = mJpegMaxSize;
+ mEncodeOutputBuffer[i].fd = mJpegfd[i];
+ mEncodeOutputBuffer[i].offset = 0;
+ }
+ mImageEncodeParms.p_output_buffer = mEncodeOutputBuffer;
+ mImageEncodeParms.exif_data = exif_data;
+ mImageEncodeParms.exif_numEntries = exif_table_numEntries;
+
+ mImageEncodeParms.format3d = mIs3DModeOn;
+ return true;
+}
+
+bool QualcommCameraHardware::native_set_parms(
+ camera_parm_type_t type, uint16_t length, void *value)
+{
+ if(mCfgControl.mm_camera_set_parm(type,value) != MM_CAMERA_SUCCESS) {
+ ALOGE("native_set_parms failed: type %d length %d error %s",
+ type, length, strerror(errno));
+ return false;
+ }
+ return true;
+
+}
+bool QualcommCameraHardware::native_set_parms(
+ camera_parm_type_t type, uint16_t length, void *value, int *result)
+{
+ mm_camera_status_t status;
+ status = mCfgControl.mm_camera_set_parm(type,value);
+ ALOGI("native_set_parms status = %d", status);
+ if( status == MM_CAMERA_SUCCESS || status == MM_CAMERA_ERR_INVALID_OPERATION){
+ *result = status ;
+ return true;
+ }
+ ALOGE("%s: type %d length %d error %s, status %d", __FUNCTION__,
+ type, length, strerror(errno), status);
+ *result = status;
+ return false;
+}
+
+void QualcommCameraHardware::jpeg_set_location()
+{
+ bool encode_location = true;
+ camera_position_type pt;
+
+#define PARSE_LOCATION(what,type,fmt,desc) do { \
+ pt.what = 0; \
+ const char *what##_str = mParameters.get("gps-"#what); \
+ ALOGI("GPS PARM %s --> [%s]", "gps-"#what, what##_str); \
+ if (what##_str) { \
+ type what = 0; \
+ if (sscanf(what##_str, fmt, &what) == 1) \
+ pt.what = what; \
+ else { \
+ ALOGE("GPS " #what " %s could not" \
+ " be parsed as a " #desc, what##_str); \
+ encode_location = false; \
+ } \
+ } \
+ else { \
+ ALOGI("GPS " #what " not specified: " \
+ "defaulting to zero in EXIF header."); \
+ encode_location = false; \
+ } \
+ } while(0)
+
+ PARSE_LOCATION(timestamp, long, "%ld", "long");
+ if (!pt.timestamp) pt.timestamp = time(NULL);
+ PARSE_LOCATION(altitude, short, "%hd", "short");
+ PARSE_LOCATION(latitude, double, "%lf", "double float");
+ PARSE_LOCATION(longitude, double, "%lf", "double float");
+
+#undef PARSE_LOCATION
+
+ if (encode_location) {
+ ALOGI("setting image location ALT %d LAT %lf LON %lf",
+ pt.altitude, pt.latitude, pt.longitude);
+
+ setGpsParameters();
+ /* Disabling until support is available.
+ if (!LINK_jpeg_encoder_setLocation(&pt)) {
+ ALOGE("jpeg_set_location: LINK_jpeg_encoder_setLocation failed.");
+ }
+ */
+ }
+ else ALOGI("not setting image location");
+}
+
+static bool register_buf(int size,
+ int frame_size,
+ int cbcr_offset,
+ int yoffset,
+ int pmempreviewfd,
+ uint32_t offset,
+ uint8_t *buf,
+ int pmem_type,
+ bool vfe_can_write,
+ bool register_buffer,
+ bool use_all_chnls)
+{
+ struct msm_pmem_info pmemBuf;
+ CAMERA_HAL_UNUSED(frame_size);
+
+ memset(&pmemBuf, 0, sizeof(struct msm_pmem_info));
+ pmemBuf.type = pmem_type;
+ pmemBuf.fd = pmempreviewfd;
+ pmemBuf.offset = offset;
+ pmemBuf.len = size;
+ pmemBuf.vaddr = buf;
+ pmemBuf.planar0_off = yoffset;
+ if(!use_all_chnls) {
+ ALOGI("use_all_chnls = %d\n", use_all_chnls);
+ pmemBuf.planar1_off = cbcr_offset;
+ pmemBuf.planar2_off = yoffset;
+ } else {
+ pmemBuf.planar1_off = myv12_params.CbOffset;
+ pmemBuf.planar2_off = myv12_params.CrOffset;
+ }
+ ALOGI("register_buf: CbOff = 0x%x CrOff = 0x%x",
+ pmemBuf.planar1_off, pmemBuf.planar2_off);
+
+ pmemBuf.active = vfe_can_write;
+
+ ALOGI("register_buf: reg = %d buffer = %p",
+ !register_buffer, buf);
+ if(native_start_ops(register_buffer ? CAMERA_OPS_REGISTER_BUFFER :
+ CAMERA_OPS_UNREGISTER_BUFFER ,(void *)&pmemBuf) < 0) {
+ ALOGE("register_buf: MSM_CAM_IOCTL_(UN)REGISTER_PMEM error %s",
+ strerror(errno));
+ return false;
+ }
+
+ return true;
+
+}
+
+static bool register_buf(int size,
+ int frame_size,
+ int cbcr_offset,
+ int yoffset,
+ int pmempreviewfd,
+ uint32_t offset,
+ uint8_t *buf,
+ int pmem_type,
+ bool vfe_can_write,
+ bool register_buffer = true,
+ bool use_all_chnls = false);
+
+void QualcommCameraHardware::runFrameThread(void *data)
+{
+ ALOGV("runFrameThread E");
+ int type;
+ int CbCrOffset = PAD_TO_WORD(previewWidth * previewHeight);
+
+ if(libmmcamera)
+ {
+ LINK_cam_frame(data);
+ }
+ //waiting for preview thread to complete before clearing of the buffers
+ mPreviewThreadWaitLock.lock();
+ while (mPreviewThreadRunning) {
+ ALOGI("runframethread: waiting for preview thread to complete.");
+ mPreviewThreadWait.wait(mPreviewThreadWaitLock);
+ ALOGI("initPreview: old preview thread completed.");
+ }
+ mPreviewThreadWaitLock.unlock();
+
+ // Cancelling previewBuffers and returning them to display before stopping preview
+ // This will ensure that all preview buffers are available for dequeing when
+ //startPreview is called again with the same ANativeWindow object (snapshot case). If the
+ //ANativeWindow is a new one(camera-camcorder switch case) because the app passed a new
+ //surface then buffers will be re-allocated and not returned from the old pool.
+ relinquishBuffers();
+ mPreviewBusyQueue.flush();
+ /* Flush the Free Q */
+ LINK_camframe_release_all_frames(CAM_PREVIEW_FRAME);
+
+ if(mIs3DModeOn != true) {
+#if 0
+ if(mInHFRThread == false)
+ {
+ mPmemWaitLock.lock();
+ //mPreviewHeap.clear();
+// TODO do properly
+ mPrevHeapDeallocRunning = true;
+ mPmemWait.signal();
+ mPmemWaitLock.unlock();
+
+ if(( mPreviewFormat == CAMERA_YUV_420_YV12 )&&
+ ( mCurrentTarget == TARGET_MSM7627A || mCurrentTarget == TARGET_MSM7627) &&
+ previewWidth%32 != 0 )
+ mYV12Heap.clear();
+
+ }
+ else
+#endif
+ {
+ int mBufferSize = previewWidth * previewHeight * 3/2;
+ int mCbCrOffset = PAD_TO_WORD(previewWidth * previewHeight);
+ ALOGI("unregistering all preview buffers");
+ //unregister preview buffers. we are not deallocating here.
+ for (int cnt = 0; cnt < mTotalPreviewBufferCount; ++cnt) {
+ register_buf(mBufferSize,
+ mBufferSize,
+ mCbCrOffset,
+ 0,
+ frames[cnt].fd,
+ 0,
+ (uint8_t *)frames[cnt].buffer,
+ MSM_PMEM_PREVIEW,
+ false,
+ false,
+ true);
+ //mPreviewHeap[cnt].clear();
+ // TODO : clean properly
+ }
+ }
+ }
+ if(!mZslEnable) {
+ if(( mCurrentTarget == TARGET_MSM7630 ) || (mCurrentTarget == TARGET_QSD8250) || (mCurrentTarget == TARGET_MSM8660)){
+ if(mHFRMode != true) {
+#if 0
+ mRecordHeap.clear();
+ mRecordHeap = NULL;
+#endif
+ } else {
+ ALOGI("%s: unregister record buffers with camera driver", __FUNCTION__);
+ register_record_buffers(false);
+ }
+ int CbCrOffset = PAD_TO_2K(mDimension.video_width * mDimension.video_height);
+ for (int cnt = 0; cnt < kRecordBufferCount; cnt++) {
+#if 0
+ if (mRecordfd[cnt] > 0) {
+ ALOGE("Unregistering buffer %d with kernel",cnt);
+ register_buf(mRecordFrameSize,
+ mRecordFrameSize, CbCrOffset, 0,
+ mRecordfd[cnt],
+ 0,
+ (uint8_t *)recordframes[cnt].buffer,
+ MSM_PMEM_VIDEO,
+ false, false);
+ ALOGE("Came back from register call to kernel");
+ }
+#endif
+ type = MSM_PMEM_VIDEO;
+ ALOGI("%s: unregister record buffers[%d] with camera driver", __FUNCTION__, cnt);
+ if(recordframes) {
+ register_buf(mRecordFrameSize,
+ mRecordFrameSize, CbCrOffset, 0,
+ recordframes[cnt].fd,
+ 0,
+ (uint8_t *)recordframes[cnt].buffer,
+ type,
+ false,false);
+ if(mRecordMapped[cnt]) {
+ mRecordMapped[cnt]->release(mRecordMapped[cnt]);
+ mRecordMapped[cnt] = NULL;
+ close(mRecordfd[cnt]);
+ if(mStoreMetaDataInFrame && (metadata_memory[cnt] != NULL)){
+ struct encoder_media_buffer_type * packet =
+ (struct encoder_media_buffer_type *)metadata_memory[cnt]->data;
+ native_handle_delete(const_cast<native_handle_t *>(packet->meta_handle));
+ metadata_memory[cnt]->release(metadata_memory[cnt]);
+ metadata_memory[cnt] = NULL;
+ }
+#ifdef USE_ION
+ deallocate_ion_memory(&record_main_ion_fd[cnt], &record_ion_info_fd[cnt]);
+#endif
+ }
+ }
+ }
+ }
+ }
+
+ mFrameThreadWaitLock.lock();
+ mFrameThreadRunning = false;
+ mFrameThreadWait.signal();
+ mFrameThreadWaitLock.unlock();
+
+ ALOGV("runFrameThread X");
+}
+
+
+void QualcommCameraHardware::runPreviewThread(void *data)
+{
+ static int hfr_count = 0;
+ msm_frame* frame = NULL;
+ status_t retVal = NO_ERROR;
+ CAMERA_HAL_UNUSED(data);
+ android_native_buffer_t *buffer;
+ buffer_handle_t *handle = NULL;
+ int bufferIndex = 0;
+
+ while((frame = mPreviewBusyQueue.get()) != NULL) {
+ if (UNLIKELY(mDebugFps)) {
+ debugShowPreviewFPS();
+ }
+ mCallbackLock.lock();
+ int msgEnabled = mMsgEnabled;
+ camera_data_callback pcb = mDataCallback;
+ void *pdata = mCallbackCookie;
+ camera_data_timestamp_callback rcb = mDataCallbackTimestamp;
+ void *rdata = mCallbackCookie;
+ camera_data_callback mcb = mDataCallback;
+ void *mdata = mCallbackCookie;
+ mCallbackLock.unlock();
+
+ // signal smooth zoom thread , that a new preview frame is available
+ mSmoothzoomThreadWaitLock.lock();
+ if(mSmoothzoomThreadRunning) {
+ mSmoothzoomThreadWait.signal();
+ }
+ mSmoothzoomThreadWaitLock.unlock();
+
+ // Find the offset within the heap of the current buffer.
+ ssize_t offset_addr = 0; // TODO , use proper value
+ // (ssize_t)frame->buffer - (ssize_t)mPreviewHeap->mHeap->base();
+ // ssize_t offset = offset_addr / mPreviewHeap->mAlignedBufferSize;
+ common_crop_t *crop = (common_crop_t *) (frame->cropinfo);
+#ifdef DUMP_PREVIEW_FRAMES
+ static int frameCnt = 0;
+ int written;
+ if (frameCnt >= 0 && frameCnt <= 10 ) {
+ char buf[128];
+ snprintf(buffer, sizeof(buf), "/data/%d_preview.yuv", frameCnt);
+ int file_fd = open(buf, O_RDWR | O_CREAT, 0777);
+ ALOGI("dumping preview frame %d", frameCnt);
+ if (file_fd < 0) {
+ ALOGE("cannot open file\n");
+ }
+ else
+ {
+ ALOGI("dumping data");
+ written = write(file_fd, (uint8_t *)frame->buffer,
+ mPreviewFrameSize );
+ if(written < 0)
+ ALOGE("error in data write");
+ }
+ close(file_fd);
+ }
+ frameCnt++;
+#endif
+ mInPreviewCallback = true;
+ if (crop->in1_w != 0 && crop->in1_h != 0) {
+ zoomCropInfo.left = (crop->out1_w - crop->in1_w + 1) / 2 - 1;
+ zoomCropInfo.top = (crop->out1_h - crop->in1_h + 1) / 2 - 1;
+ /* There can be scenarios where the in1_wXin1_h and
+ * out1_wXout1_h are same. In those cases, reset the
+ * x and y to zero instead of negative for proper zooming
+ */
+ if(zoomCropInfo.left < 0) zoomCropInfo.left = 0;
+ if(zoomCropInfo.top < 0) zoomCropInfo.top = 0;
+ zoomCropInfo.right = zoomCropInfo.left + crop->in1_w;
+ zoomCropInfo.bottom = zoomCropInfo.top + crop->in1_h;
+ mPreviewWindow-> set_crop (mPreviewWindow,
+ zoomCropInfo.left,
+ zoomCropInfo.top,
+ zoomCropInfo.right,
+ zoomCropInfo.bottom);
+ /* Set mResetOverlayCrop to true, so that when there is
+ * no crop information, setCrop will be called
+ * with zero crop values.
+ */
+ mResetWindowCrop = true;
+
+ } else {
+ // Reset zoomCropInfo variables. This will ensure that
+ // stale values wont be used for postview
+ zoomCropInfo.left = 0;
+ zoomCropInfo.top = 0;
+ zoomCropInfo.right = crop->in1_w;
+ zoomCropInfo.bottom = crop->in1_h;
+ /* This reset is required, if not, overlay driver continues
+ * to use the old crop information for these preview
+ * frames which is not the correct behavior. To avoid
+ * multiple calls, reset once.
+ */
+ if(mResetWindowCrop == true){
+ mPreviewWindow-> set_crop (mPreviewWindow,
+ zoomCropInfo.left,
+ zoomCropInfo.top,
+ zoomCropInfo.right,
+ zoomCropInfo.bottom);
+ mResetWindowCrop = false;
+ }
+ }
+ /* To overcome a timing case where we could be having the overlay refer to deallocated
+ mDisplayHeap(and showing corruption), the mDisplayHeap is not deallocated untill the
+ first preview frame is queued to the overlay in 8660. Also adding the condition
+ to check if snapshot is currently in progress ensures that the resources being
+ used by the snapshot thread are not incorrectly deallocated by preview thread*/
+ if ((mCurrentTarget == TARGET_MSM8660)&&(mFirstFrame == true)) {
+ ALOGD(" receivePreviewFrame : first frame queued, display heap being deallocated");
+ mThumbnailHeap.clear();
+ mDisplayHeap.clear();
+ if(!mZslEnable){
+ mDisplayHeap.clear();
+ mPostviewHeap.clear();
+ }
+ mFirstFrame = false;
+ }
+ mLastQueuedFrame = (void *)frame->buffer;
+ bufferIndex = mapBuffer(frame);
+
+ // if 7x27A && yv12 is set as preview format use convert routines to
+ // convert from YUV420sp to YV12
+ yuv_image_type in_buf, out_buf;
+ int conversion_result = 0;
+
+ if(( mPreviewFormat == CAMERA_YUV_420_YV12 ) &&
+ ( mCurrentTarget == TARGET_MSM7627A || mCurrentTarget == TARGET_MSM7627 )){
+ // if the width is not multiple of 32,
+ //we cannot do inplace conversion as sizes of 420sp and YV12 frames differ
+ if(previewWidth%32){
+#if 0 //TODO :
+ ALOGE("YV12::Doing not inplace conversion from 420sp to yv12");
+ in_buf.imgPtr = (unsigned char*)mPreviewMapped[bufferIndex]->data;
+ in_buf.dx = out_buf.dx = previewWidth;
+ in_buf.dy = in_buf.dy = previewHeight;
+ conversion_result = LINK_yuv_convert_ycrcb420sp_to_yv12(&in_buf, &out_buf);
+#endif
+ } else {
+ ALOGI("Doing inplace conversion from 420sp to yv12");
+ in_buf.imgPtr = (unsigned char *)mPreviewMapped[bufferIndex]->data;
+ in_buf.dx = previewWidth;
+ in_buf.dy = previewHeight;
+ conversion_result = LINK_yuv_convert_ycrcb420sp_to_yv12_inplace(&in_buf);
+ }
+ }
+
+ if(bufferIndex >= 0) {
+ //Need to encapsulate this in IMemory object and send
+
+ if (pcb != NULL && (msgEnabled & CAMERA_MSG_PREVIEW_FRAME)) {
+ int previewBufSize;
+ /* for CTS : Forcing preview memory buffer lenth to be
+ 'previewWidth * previewHeight * 3/2'. Needed when gralloc allocated extra memory.*/
+ if( mPreviewFormat == CAMERA_YUV_420_NV21 || mPreviewFormat == CAMERA_YUV_420_YV12) {
+ previewBufSize = previewWidth * previewHeight * 3/2;
+ camera_memory_t *previewMem = mGetMemory(frames[bufferIndex].fd, previewBufSize,
+ 1, mCallbackCookie);
+ if (!previewMem || !previewMem->data) {
+ ALOGE("%s: mGetMemory failed.\n", __func__);
+ } else {
+ pcb(CAMERA_MSG_PREVIEW_FRAME,previewMem,0,NULL,pdata);
+ previewMem->release(previewMem);
+ }
+ } else
+ pcb(CAMERA_MSG_PREVIEW_FRAME,(camera_memory_t *) mPreviewMapped[bufferIndex],0,NULL,pdata);
+ }
+
+ // TODO : may have to reutn proper frame as pcb
+ mDisplayLock.lock();
+ if( mPreviewWindow != NULL) {
+ if (BUFFER_LOCKED == frame_buffer[bufferIndex].lockState) {
+ if (GENLOCK_FAILURE == genlock_unlock_buffer(
+ (native_handle_t*)(*(frame_buffer[bufferIndex].buffer)))) {
+ ALOGE("%s: genlock_unlock_buffer failed", __FUNCTION__);
+ mDisplayLock.unlock();
+ } else {
+ frame_buffer[bufferIndex].lockState = BUFFER_UNLOCKED;
+ }
+ } else {
+ ALOGI("%s: buffer to be enqueued is unlocked", __FUNCTION__);
+ mDisplayLock.unlock();
+ }
+ const char *str = mParameters.get(QCameraParameters::KEY_VIDEO_HIGH_FRAME_RATE);
+ if(str != NULL){
+ int is_hfr_off = 0;
+ hfr_count++;
+ if(!strcmp(str, QCameraParameters::VIDEO_HFR_OFF)) {
+ is_hfr_off = 1;
+ retVal = mPreviewWindow->enqueue_buffer(mPreviewWindow,
+ frame_buffer[bufferIndex].buffer);
+ } else if (!strcmp(str, QCameraParameters::VIDEO_HFR_2X)) {
+ hfr_count %= 2;
+ } else if (!strcmp(str, QCameraParameters::VIDEO_HFR_3X)) {
+ hfr_count %= 3;
+ } else if (!strcmp(str, QCameraParameters::VIDEO_HFR_4X)) {
+ hfr_count %= 4;
+ }
+ if(hfr_count == 0)
+ retVal = mPreviewWindow->enqueue_buffer(mPreviewWindow,
+ frame_buffer[bufferIndex].buffer);
+ else if(!is_hfr_off)
+ retVal = mPreviewWindow->cancel_buffer(mPreviewWindow,
+ frame_buffer[bufferIndex].buffer);
+ } else
+ retVal = mPreviewWindow->enqueue_buffer(mPreviewWindow,
+ frame_buffer[bufferIndex].buffer);
+ if( retVal != NO_ERROR)
+ ALOGE("%s: Failed while queueing buffer %d for display."
+ " Error = %d", __FUNCTION__,
+ frames[bufferIndex].fd, retVal);
+ int stride;
+ retVal = mPreviewWindow->dequeue_buffer(mPreviewWindow,
+ &(handle),&(stride));
+ private_handle_t *bhandle = (private_handle_t *)(*handle);
+ if( retVal != NO_ERROR) {
+ ALOGE("%s: Failed while dequeueing buffer from display."
+ " Error = %d", __FUNCTION__, retVal);
+ } else {
+ retVal = mPreviewWindow->lock_buffer(mPreviewWindow,handle);
+ //yyan todo use handle to find out buffer
+ if(retVal != NO_ERROR)
+ ALOGE("%s: Failed while dequeueing buffer from"
+ "display. Error = %d", __FUNCTION__, retVal);
+ }
+ }
+ mDisplayLock.unlock();
+ } else
+ ALOGE("Could not find the buffer");
+
+ // If output is NOT enabled (targets otherthan 7x30 , 8x50 and 8x60 currently..)
+
+ nsecs_t timeStamp = nsecs_t(frame->ts.tv_sec)*1000000000LL + frame->ts.tv_nsec;
+
+ if( (mCurrentTarget != TARGET_MSM7630 ) && (mCurrentTarget != TARGET_QSD8250) && (mCurrentTarget != TARGET_MSM8660)) {
+ int flagwait = 1;
+ if(rcb != NULL && (msgEnabled & CAMERA_MSG_VIDEO_FRAME) && (record_flag)) {
+ if(mStoreMetaDataInFrame){
+ flagwait = 1;
+ if(metadata_memory[bufferIndex]!= NULL)
+ rcb(timeStamp, CAMERA_MSG_VIDEO_FRAME, metadata_memory[bufferIndex],0,rdata);
+ else flagwait = 0;
+ } else {
+ rcb(timeStamp, CAMERA_MSG_VIDEO_FRAME, mPreviewMapped[bufferIndex],0, rdata);
+ }
+ if(flagwait){
+ Mutex::Autolock rLock(&mRecordFrameLock);
+ if (mReleasedRecordingFrame != true) {
+ mRecordWait.wait(mRecordFrameLock);
+ }
+ mReleasedRecordingFrame = false;
+ }
+ }
+ }
+
+ if ( mCurrentTarget == TARGET_MSM8660 ) {
+ mMetaDataWaitLock.lock();
+ if (mFaceDetectOn == true && mSendMetaData == true) {
+ mSendMetaData = false;
+ fd_roi_t *roi = (fd_roi_t *)(frame->roi_info.info);
+
+ switch (roi->type) {
+ case FD_ROI_TYPE_HEADER:
+ {
+ mNumFDRcvd = 0;
+ memset(mFaceArray, -1, sizeof(mFaceArray));
+ mFaceArray[0] = 0; //faces_detected * 4;
+
+ mFacesDetected = roi->d.hdr.num_face_detected;
+ if(mFacesDetected > MAX_ROI)
+ mFacesDetected = MAX_ROI;
+ }
+ break;
+ case FD_ROI_TYPE_DATA:
+ {
+ int idx = roi->d.data.idx;
+ if (idx < mFacesDetected) {
+ mFaceArray[idx*4+1] = roi->d.data.face.face_boundary.x;
+ mFaceArray[idx*4+2] = roi->d.data.face.face_boundary.y;
+ mFaceArray[idx*4+3] = roi->d.data.face.face_boundary.x;
+ mFaceArray[idx*4+4] = roi->d.data.face.face_boundary.y;
+ mNumFDRcvd++;
+ if (mNumFDRcvd == mFacesDetected) {
+ mFaceArray[0] = mFacesDetected * 4;
+ if(mMetaDataHeap != NULL){
+ ALOGV("runPreviewThread mMetaDataHEap is non-NULL");
+ memcpy((uint32_t *)mMetaDataHeap->mHeap->base(), (uint32_t *)mFaceArray, sizeof(mFaceArray));
+ }
+ }
+ }
+ }
+ break;
+ }
+ }
+ mMetaDataWaitLock.unlock();
+ }
+ bufferIndex = mapFrame(handle);
+ if(bufferIndex >= 0) {
+ LINK_camframe_add_frame(CAM_PREVIEW_FRAME, &frames[bufferIndex]);
+ private_handle_t *bhandle = (private_handle_t *)(*handle);
+ if (GENLOCK_NO_ERROR != genlock_lock_buffer(bhandle, GENLOCK_WRITE_LOCK, GENLOCK_MAX_TIMEOUT)) {
+ ALOGE("%s: genlock_lock_buffer(WRITE) failed", __FUNCTION__);
+ frame_buffer[bufferIndex].lockState = BUFFER_UNLOCKED;
+ } else {
+ frame_buffer[bufferIndex].lockState = BUFFER_LOCKED;
+ }
+ } else {
+ ALOGE("Could not find the Frame");
+
+ // Special Case: Stoppreview is issued which causes thumbnail buffer
+ // to be cancelled. Frame thread has still not exited. In preview thread
+ // dequeue returns incorrect buffer id (previously cancelled thumbnail buffer)
+ // This will throw error "Could not find frame". We need to cancel the incorrectly
+ // dequeued buffer here to ensure that all buffers are available for the next
+ // startPreview call.
+
+ mDisplayLock.lock();
+ ALOGV(" error Cancelling preview buffers ");
+ retVal = mPreviewWindow->cancel_buffer(mPreviewWindow,
+ handle);
+ if(retVal != NO_ERROR)
+ ALOGE("%s: cancelBuffer failed for buffer", __FUNCTION__);
+ mDisplayLock.unlock();
+ }
+ }
+ mPreviewThreadWaitLock.lock();
+ mPreviewThreadRunning = false;
+ mPreviewThreadWait.signal();
+ mPreviewThreadWaitLock.unlock();
+}
+int QualcommCameraHardware::mapBuffer(struct msm_frame *frame) {
+ int ret = -1;
+ for (int cnt = 0; cnt < mTotalPreviewBufferCount; cnt++) {
+ if (frame_buffer[cnt].frame->buffer == frame->buffer) {
+ ret = cnt;
+ break;
+ }
+ }
+ return ret;
+}
+int QualcommCameraHardware::mapvideoBuffer(struct msm_frame *frame)
+{
+ int ret = -1;
+ for (int cnt = 0; cnt < kRecordBufferCount; cnt++) {
+ if ((unsigned int)mRecordMapped[cnt]->data == (unsigned int)frame->buffer) {
+ ret = cnt;
+ ALOGI("found match returning %d", ret);
+ break;
+ }
+ }
+ return ret;
+
+}
+int QualcommCameraHardware::mapRawBuffer(struct msm_frame *frame)
+{
+ int ret = -1;
+ for (int cnt = 0; cnt < (mZslEnable? MAX_SNAPSHOT_BUFFERS : numCapture); cnt++) {
+ if ((unsigned int)mRawMapped[cnt]->data == (unsigned int)frame->buffer) {
+ ret = cnt;
+ ALOGI("found match returning %d", ret);
+ break;
+ }
+ }
+ return ret;
+}
+int QualcommCameraHardware::mapThumbnailBuffer(struct msm_frame *frame)
+{
+ int ret = -1;
+ for (int cnt = 0; cnt < (mZslEnable? MAX_SNAPSHOT_BUFFERS : numCapture); cnt++) {
+ if ((unsigned int)(uint8_t *)mThumbnailMapped[cnt] == (unsigned int)frame->buffer) {
+ ret = cnt;
+ ALOGI("found match returning %d", ret);
+ break;
+ }
+ }
+ if(ret < 0) ALOGE("mapThumbnailBuffer, could not find match");
+ return ret;
+}
+int QualcommCameraHardware::mapJpegBuffer(mm_camera_buffer_t *encode_buffer)
+{
+ int ret = -1;
+ for (int cnt = 0; cnt < (mZslEnable? MAX_SNAPSHOT_BUFFERS : numCapture); cnt++) {
+ if ((unsigned int)mJpegMapped[cnt]->data == (unsigned int)encode_buffer->ptr) {
+ ret = cnt;
+ ALOGI("found match returning %d", ret);
+ break;
+ }
+ }
+ return ret;
+}
+int QualcommCameraHardware::mapFrame(buffer_handle_t *buffer) {
+ int ret = -1;
+ for (int cnt = 0; cnt < mTotalPreviewBufferCount; cnt++) {
+ if (frame_buffer[cnt].buffer == buffer) {
+ ret = cnt;
+ break;
+ }
+ }
+ return ret;
+}
+
+void *preview_thread(void *user)
+{
+ ALOGV("preview_thread E");
+ QualcommCameraHardware *obj = QualcommCameraHardware::getInstance();
+ if (obj != 0) {
+ obj->runPreviewThread(user);
+ }
+ else ALOGE("not starting preview thread: the object went away!");
+ ALOGV("preview_thread X");
+ return NULL;
+}
+
+void *hfr_thread(void *user)
+{
+ ALOGV("hfr_thread E");
+ QualcommCameraHardware *obj = QualcommCameraHardware::getInstance();
+ if (obj != 0) {
+ obj->runHFRThread(user);
+ }
+ else ALOGE("not starting hfr thread: the object went away!");
+ ALOGV("hfr_thread X");
+ return NULL;
+}
+
+void QualcommCameraHardware::runHFRThread(void *data)
+{
+ ALOGV("runHFRThread E");
+ mInHFRThread = true;
+ CAMERA_HAL_UNUSED(data);
+ ALOGI("%s: stopping Preview", __FUNCTION__);
+ stopPreviewInternal();
+
+ // Release thumbnail Buffers
+ if( mPreviewWindow != NULL ) {
+ private_handle_t *handle;
+ for (int cnt = 0; cnt < (mZslEnable? (MAX_SNAPSHOT_BUFFERS-2) : numCapture); cnt++) {
+ if(mPreviewWindow != NULL && mThumbnailBuffer[cnt] != NULL) {
+ handle = (private_handle_t *)(*mThumbnailBuffer[cnt]);
+ ALOGV("%s: Cancelling postview buffer %d ", __FUNCTION__, handle->fd);
+ ALOGV("runHfrThread : display lock");
+ mDisplayLock.lock();
+ if (BUFFER_LOCKED == mThumbnailLockState[cnt]) {
+ if (GENLOCK_FAILURE == genlock_unlock_buffer(handle)) {
+ ALOGE("%s: genlock_unlock_buffer failed", __FUNCTION__);
+ mDisplayLock.unlock();
+ continue;
+ } else {
+ mThumbnailLockState[cnt] = BUFFER_UNLOCKED;
+ }
+ }
+ status_t retVal = mPreviewWindow->cancel_buffer(mPreviewWindow,
+ mThumbnailBuffer[cnt]);
+ if(retVal != NO_ERROR)
+ ALOGE("%s: cancelBuffer failed for postview buffer %d",
+ __FUNCTION__, handle->fd);
+ // unregister , unmap and release as well
+ int mBufferSize = previewWidth * previewHeight * 3/2;
+ int mCbCrOffset = PAD_TO_WORD(previewWidth * previewHeight);
+ if(mThumbnailMapped[cnt] && (mSnapshotFormat == PICTURE_FORMAT_JPEG)) {
+ ALOGI("%s: Unregistering Thumbnail Buffer %d ", __FUNCTION__, handle->fd);
+ register_buf(mBufferSize,
+ mBufferSize, mCbCrOffset, 0,
+ handle->fd,
+ 0,
+ (uint8_t *)mThumbnailMapped[cnt],
+ MSM_PMEM_THUMBNAIL,
+ false, false);
+ if (munmap((void *)(mThumbnailMapped[cnt]),handle->size ) == -1) {
+ ALOGE("StopPreview : Error un-mmapping the thumbnail buffer %d", index);
+ }
+ mThumbnailBuffer[cnt] = NULL;
+ mThumbnailMapped[cnt] = NULL;
+ }
+ ALOGV("runHfrThread : display unlock");
+ mDisplayLock.unlock();
+ }
+ }
+ }
+
+ ALOGV("%s: setting parameters", __FUNCTION__);
+ setParameters(mParameters);
+ ALOGV("%s: starting Preview", __FUNCTION__);
+ if( mPreviewWindow == NULL)
+ {
+ startPreviewInternal();
+ }
+ else {
+ getBuffersAndStartPreview();
+ }
+
+ mHFRMode = false;
+ mInHFRThread = false;
+}
+
+void QualcommCameraHardware::runVideoThread(void *data)
+{
+ ALOGV("runVideoThread E");
+ msm_frame* vframe = NULL;
+ CAMERA_HAL_UNUSED(data);
+
+ while(true) {
+ pthread_mutex_lock(&(g_busy_frame_queue.mut));
+
+ // Exit the thread , in case of stop recording..
+ mVideoThreadWaitLock.lock();
+ if(mVideoThreadExit){
+ ALOGV("Exiting video thread..");
+ mVideoThreadWaitLock.unlock();
+ pthread_mutex_unlock(&(g_busy_frame_queue.mut));
+ break;
+ }
+ mVideoThreadWaitLock.unlock();
+
+ ALOGV("in video_thread : wait for video frame ");
+ // check if any frames are available in busyQ and give callback to
+ // services/video encoder
+ cam_frame_wait_video();
+ ALOGV("video_thread, wait over..");
+
+ // Exit the thread , in case of stop recording..
+ mVideoThreadWaitLock.lock();
+ if(mVideoThreadExit){
+ ALOGV("Exiting video thread..");
+ mVideoThreadWaitLock.unlock();
+ pthread_mutex_unlock(&(g_busy_frame_queue.mut));
+ break;
+ }
+ mVideoThreadWaitLock.unlock();
+
+ // Get the video frame to be encoded
+ vframe = cam_frame_get_video ();
+ pthread_mutex_unlock(&(g_busy_frame_queue.mut));
+ ALOGI("in video_thread : got video frame %x",vframe);
+
+ /*if (UNLIKELY(mDebugFps)) {
+ debugShowVideoFPS();
+ }*/
+
+ if(vframe != NULL) {
+ // Find the offset within the heap of the current buffer.
+ //ALOGV("Got video frame : buffer %d base %d ", vframe->buffer,
+ //(unsigned long int)mRecordHeap->mHeap->base());
+ //ssize_t offset =
+ // (ssize_t)vframe->buffer - (ssize_t)mRecordHeap->mHeap->base();
+ //ALOGV("offset = %d , alignsize = %d , new_offset = %d", (int)offset, mRecordHeap->mAlignedBufferSize,
+ // (int)(offset / mRecordHeap->mAlignedBufferSize));
+
+ //offset /= mRecordHeap->mAlignedBufferSize;
+
+ //set the track flag to true for this video buffer
+ //record_buffers_tracking_flag[offset] = true;
+
+ /* Extract the timestamp of this frame */
+ nsecs_t timeStamp = nsecs_t(vframe->ts.tv_sec)*1000000000LL + vframe->ts.tv_nsec;
+
+ // dump frames for test purpose
+#if 0
+ static int frameCnt = 0;
+ if (frameCnt >= 11 && frameCnt <= 13 ) {
+ char buf[128];
+ sprintf(buf, "/data/%d_v.yuv", frameCnt);
+ int file_fd = open(buf, O_RDWR | O_CREAT, 0777);
+ ALOGV("dumping video frame %d", frameCnt);
+ if (file_fd < 0) {
+ ALOGE("cannot open file\n");
+ }
+ else
+ {
+ write(file_fd, (const void *)vframe->buffer,
+ vframe->cbcr_off * 3 / 2);
+ }
+ close(file_fd);
+ }
+ frameCnt++;
+#endif
+#if 0
+ if(mIs3DModeOn ) {
+ /* VPE will be taking care of zoom, so no need to
+ * use overlay's setCrop interface for zoom
+ * functionality.
+ */
+ /* get the offset of current video buffer for rendering */
+ ssize_t offset_addr = (ssize_t)vframe->buffer -
+ (ssize_t)mRecordHeap->mHeap->base();
+ /* To overcome a timing case where we could be having the overlay refer to deallocated
+ mDisplayHeap(and showing corruption), the mDisplayHeap is not deallocated untill the
+ first preview frame is queued to the overlay in 8660 */
+ if ((mCurrentTarget == TARGET_MSM8660)&&(mFirstFrame == true)) {
+ ALOGD(" receivePreviewFrame : first frame queued, display heap being deallocated");
+ mThumbnailHeap.clear();
+ mDisplayHeap.clear();
+ mFirstFrame = false;
+ mPostviewHeap.clear();
+ }
+ mLastQueuedFrame = (void *)vframe->buffer;
+ }
+#endif
+ // Enable IF block to give frames to encoder , ELSE block for just simulation
+#if 1
+ ALOGV("in video_thread : got video frame, before if check giving frame to services/encoder");
+ mCallbackLock.lock();
+ int msgEnabled = mMsgEnabled;
+ camera_data_timestamp_callback rcb = mDataCallbackTimestamp;
+ void *rdata = mCallbackCookie;
+ mCallbackLock.unlock();
+
+ /* When 3D mode is ON, the video thread will be ON even in preview
+ * mode. We need to distinguish when recording is started. So, when
+ * 3D mode is ON, check for the recordingState (which will be set
+ * with start recording and reset in stop recording), before
+ * calling rcb.
+ */
+ int index = mapvideoBuffer(vframe);
+ if(!mIs3DModeOn) {
+ record_buffers_tracking_flag[index] = true;
+ if(rcb != NULL && (msgEnabled & CAMERA_MSG_VIDEO_FRAME) ) {
+ ALOGV("in video_thread : got video frame, giving frame to services/encoder index = %d", index);
+ if(mStoreMetaDataInFrame){
+ rcb(timeStamp, CAMERA_MSG_VIDEO_FRAME, metadata_memory[index],0,rdata);
+ } else {
+ rcb(timeStamp, CAMERA_MSG_VIDEO_FRAME, mRecordMapped[index],0,rdata);
+ }
+ }
+ }
+#if 0
+ else {
+ mCallbackLock.lock();
+ msgEnabled = mMsgEnabled;
+ data_callback pcb = mDataCallback;
+ void *pdata = mCallbackCookie;
+ mCallbackLock.unlock();
+ if (pcb != NULL) {
+ ALOGE("pcb is not null");
+ static int count = 0;
+ //if(msgEnabled & CAMERA_MSG_PREVIEW_FRAME) {
+ if (!count) {
+ ALOGE("Giving first frame to app");
+ pcb(CAMERA_MSG_PREVIEW_FRAME, mRecordHeap->mBuffers[offset],
+ pdata);
+ count++;
+ }
+ }
+ if(mRecordingState == 1) {
+ if(rcb != NULL && (msgEnabled & CAMERA_MSG_VIDEO_FRAME) ) {
+ ALOGV("in video_thread 3D mode : got video frame, giving frame to services/encoder");
+ rcb(timeStamp, CAMERA_MSG_VIDEO_FRAME, mRecordHeap->mBuffers[offset], rdata);
+ }
+ } else {
+ /* When in preview mode, put the video buffer back into
+ * free Q, for next availability.
+ */
+ ALOGV("in video_thread 3D mode : got video frame, putting frame to Free Q");
+ record_buffers_tracking_flag[offset] = false;
+ LINK_camframe_add_frame(CAM_VIDEO_FRAME,vframe);
+ }
+ }
+#endif
+#else
+ // 720p output2 : simulate release frame here:
+ ALOGI("in video_thread simulation , releasing the video frame");
+ LINK_camframe_add_frame(CAM_VIDEO_FRAME,vframe);
+#endif
+
+ } else ALOGE("in video_thread get frame returned null");
+
+
+ } // end of while loop
+
+ mVideoThreadWaitLock.lock();
+ mVideoThreadRunning = false;
+ mVideoThreadWait.signal();
+ mVideoThreadWaitLock.unlock();
+
+ ALOGV("runVideoThread X");
+}
+
+void *video_thread(void *user)
+{
+ ALOGV("video_thread E");
+ CAMERA_HAL_UNUSED(user);
+
+ QualcommCameraHardware *obj = QualcommCameraHardware::getInstance();
+ if (obj != 0) {
+ obj->runVideoThread(user);
+ }
+ else ALOGE("not starting video thread: the object went away!");
+ ALOGV("video_thread X");
+ return NULL;
+}
+
+void *frame_thread(void *user)
+{
+ ALOGD("frame_thread E");
+ CAMERA_HAL_UNUSED(user);
+ QualcommCameraHardware *obj = QualcommCameraHardware::getInstance();
+ if (obj != 0) {
+ obj->runFrameThread(user);
+ }
+ else ALOGW("not starting frame thread: the object went away!");
+ ALOGD("frame_thread X");
+ return NULL;
+}
+
+static int parse_size(const char *str, int &width, int &height)
+{
+ // Find the width.
+ char *end;
+ int w = (int)strtol(str, &end, 10);
+ // If an 'x' or 'X' does not immediately follow, give up.
+ if ( (*end != 'x') && (*end != 'X') )
+ return -1;
+
+ // Find the height, immediately after the 'x'.
+ int h = (int)strtol(end+1, 0, 10);
+
+ width = w;
+ height = h;
+
+ return 0;
+}
+QualcommCameraHardware* hardware;
+
+int QualcommCameraHardware::allocate_ion_memory(int *main_ion_fd, struct ion_allocation_data* alloc,
+ struct ion_fd_data* ion_info_fd, int ion_type, int size, int *memfd)
+{
+ int rc = 0;
+ struct ion_handle_data handle_data;
+
+ *main_ion_fd = open("/dev/ion", O_RDONLY | O_SYNC);
+ if (*main_ion_fd < 0) {
+ ALOGE("Ion dev open failed\n");
+ ALOGE("Error is %s\n", strerror(errno));
+ goto ION_OPEN_FAILED;
+ }
+ alloc->len = size;
+ /* to make it page size aligned */
+ alloc->len = (alloc->len + 4095) & (~4095);
+ alloc->align = 4096;
+ alloc->flags = 0;
+ alloc->heap_mask = (0x1 << ion_type | 0x1 << ION_IOMMU_HEAP_ID);
+
+ rc = ioctl(*main_ion_fd, ION_IOC_ALLOC, alloc);
+ if (rc < 0) {
+ ALOGE("ION allocation failed\n");
+ goto ION_ALLOC_FAILED;
+ }
+
+ ion_info_fd->handle = alloc->handle;
+ rc = ioctl(*main_ion_fd, ION_IOC_SHARE, ion_info_fd);
+ if (rc < 0) {
+ ALOGE("ION map failed %s\n", strerror(errno));
+ goto ION_MAP_FAILED;
+ }
+ *memfd = ion_info_fd->fd;
+ return 0;
+
+ION_MAP_FAILED:
+ handle_data.handle = ion_info_fd->handle;
+ ioctl(*main_ion_fd, ION_IOC_FREE, &handle_data);
+ION_ALLOC_FAILED:
+ close(*main_ion_fd);
+ION_OPEN_FAILED:
+ return -1;
+}
+int QualcommCameraHardware::deallocate_ion_memory(int *main_ion_fd, struct ion_fd_data* ion_info_fd)
+{
+ struct ion_handle_data handle_data;
+ int rc = 0;
+
+ handle_data.handle = ion_info_fd->handle;
+ ioctl(*main_ion_fd, ION_IOC_FREE, &handle_data);
+ close(*main_ion_fd);
+ return rc;
+}
+
+bool QualcommCameraHardware::initPreview()
+{
+ const char * pmem_region;
+ int CbCrOffset = 0;
+ int ion_heap;
+ mParameters.getPreviewSize(&previewWidth, &previewHeight);
+ const char *recordSize = NULL;
+ recordSize = mParameters.get(QCameraParameters::KEY_VIDEO_SIZE);
+ALOGI("%s Got preview dimension as %d x %d ", __func__, previewWidth, previewHeight);
+ if(!recordSize) {
+ //If application didn't set this parameter string, use the values from
+ //getPreviewSize() as video dimensions.
+ ALOGV("No Record Size requested, use the preview dimensions");
+ videoWidth = previewWidth;
+ videoHeight = previewHeight;
+ } else {
+ //Extract the record witdh and height that application requested.
+ if(!parse_size(recordSize, videoWidth, videoHeight)) {
+ //VFE output1 shouldn't be greater than VFE output2.
+ if( (previewWidth > videoWidth) || (previewHeight > videoHeight)) {
+ //Set preview sizes as record sizes.
+ ALOGI("Preview size %dx%d is greater than record size %dx%d,\
+ resetting preview size to record size",previewWidth,\
+ previewHeight, videoWidth, videoHeight);
+ previewWidth = videoWidth;
+ previewHeight = videoHeight;
+ mParameters.setPreviewSize(previewWidth, previewHeight);
+ }
+ if( (mCurrentTarget != TARGET_MSM7630)
+ && (mCurrentTarget != TARGET_QSD8250)
+ && (mCurrentTarget != TARGET_MSM8660) ) {
+ //For Single VFE output targets, use record dimensions as preview dimensions.
+ previewWidth = videoWidth;
+ previewHeight = videoHeight;
+ mParameters.setPreviewSize(previewWidth, previewHeight);
+ }
+ } else {
+ ALOGE("initPreview X: failed to parse parameter record-size (%s)", recordSize);
+ return false;
+ }
+ }
+
+ mDimension.display_width = previewWidth;
+ mDimension.display_height= previewHeight;
+ mDimension.ui_thumbnail_width =
+ thumbnail_sizes[DEFAULT_THUMBNAIL_SETTING].width;
+ mDimension.ui_thumbnail_height =
+ thumbnail_sizes[DEFAULT_THUMBNAIL_SETTING].height;
+
+ ALOGV("initPreview E: preview size=%dx%d videosize = %d x %d", previewWidth, previewHeight, videoWidth, videoHeight );
+
+ if( ( mCurrentTarget == TARGET_MSM7630 ) || (mCurrentTarget == TARGET_QSD8250) || (mCurrentTarget == TARGET_MSM8660)) {
+ mDimension.video_width = CEILING16(videoWidth);
+ /* Backup the video dimensions, as video dimensions in mDimension
+ * will be modified when DIS is supported. Need the actual values
+ * to pass ap part of VPE config
+ */
+ videoWidth = mDimension.video_width;
+ mDimension.video_height = videoHeight;
+ ALOGV("initPreview : preview size=%dx%d videosize = %d x %d", previewWidth, previewHeight,
+ mDimension.video_width, mDimension.video_height);
+ }
+
+ // See comments in deinitPreview() for why we have to wait for the frame
+ // thread here, and why we can't use pthread_join().
+ mFrameThreadWaitLock.lock();
+ while (mFrameThreadRunning) {
+ ALOGI("initPreview: waiting for old frame thread to complete.");
+ mFrameThreadWait.wait(mFrameThreadWaitLock);
+ ALOGI("initPreview: old frame thread completed.");
+ }
+ mFrameThreadWaitLock.unlock();
+
+ mInSnapshotModeWaitLock.lock();
+ while (mInSnapshotMode) {
+ ALOGI("initPreview: waiting for snapshot mode to complete.");
+ mInSnapshotModeWait.wait(mInSnapshotModeWaitLock);
+ ALOGI("initPreview: snapshot mode completed.");
+ }
+ mInSnapshotModeWaitLock.unlock();
+
+ pmem_region = "/dev/pmem_adsp";
+ ion_heap = ION_CAMERA_HEAP_ID;
+
+ int cnt = 0;
+
+ memset(&myv12_params, 0, sizeof(yv12_format_parms_t));
+ mPreviewFrameSize = previewWidth * previewHeight * 3/2;
+ ALOGI("Width = %d Height = %d \n", previewWidth, previewHeight);
+ if(mPreviewFormat == CAMERA_YUV_420_YV12) {
+ myv12_params.CbOffset = PAD_TO_WORD(previewWidth * previewHeight);
+ myv12_params.CrOffset = myv12_params.CbOffset + PAD_TO_WORD((previewWidth * previewHeight)/4);
+ mDimension.prev_format = CAMERA_YUV_420_YV12;
+ ALOGI("CbOffset = 0x%x CrOffset = 0x%x \n",myv12_params.CbOffset, myv12_params.CrOffset);
+ } else {
+ CbCrOffset = PAD_TO_WORD(previewWidth * previewHeight);
+ }
+
+ //Pass the yuv formats, display dimensions,
+ //so that vfe will be initialized accordingly.
+ mDimension.display_luma_width = previewWidth;
+ mDimension.display_luma_height = previewHeight;
+ mDimension.display_chroma_width = previewWidth;
+ mDimension.display_chroma_height = previewHeight;
+ if(mPreviewFormat == CAMERA_YUV_420_NV21_ADRENO) {
+ mPreviewFrameSize = PAD_TO_4K(CEILING32(previewWidth) * CEILING32(previewHeight)) +
+ 2 * (CEILING32(previewWidth/2) * CEILING32(previewHeight/2));
+ CbCrOffset = PAD_TO_4K(CEILING32(previewWidth) * CEILING32(previewHeight));
+ mDimension.prev_format = CAMERA_YUV_420_NV21_ADRENO;
+ mDimension.display_luma_width = CEILING32(previewWidth);
+ mDimension.display_luma_height = CEILING32(previewHeight);
+ mDimension.display_chroma_width = 2 * CEILING32(previewWidth/2);
+ //Chroma Height is not needed as of now. Just sending with other dimensions.
+ mDimension.display_chroma_height = CEILING32(previewHeight/2);
+ }
+ ALOGV("mDimension.prev_format = %d", mDimension.prev_format);
+ ALOGV("mDimension.display_luma_width = %d", mDimension.display_luma_width);
+ ALOGV("mDimension.display_luma_height = %d", mDimension.display_luma_height);
+ ALOGV("mDimension.display_chroma_width = %d", mDimension.display_chroma_width);
+ ALOGV("mDimension.display_chroma_height = %d", mDimension.display_chroma_height);
+
+ dstOffset = 0;
+ //set DIS value to get the updated video width and height to calculate
+ //the required record buffer size
+ if(mVpeEnabled) {
+ bool status = setDIS();
+ if(status) {
+ ALOGE("Failed to set DIS");
+ return false;
+ }
+ }
+
+ //Pass the original video width and height and get the required width
+ //and height for record buffer allocation
+ mDimension.orig_video_width = videoWidth;
+ mDimension.orig_video_height = videoHeight;
+ if(mZslEnable){
+ //Limitation of ZSL where the thumbnail and display dimensions should be the same
+ mDimension.ui_thumbnail_width = mDimension.display_width;
+ mDimension.ui_thumbnail_height = mDimension.display_height;
+ mParameters.getPictureSize(&mPictureWidth, &mPictureHeight);
+ if (updatePictureDimension(mParameters, mPictureWidth,
+ mPictureHeight)) {
+ mDimension.picture_width = mPictureWidth;
+ mDimension.picture_height = mPictureHeight;
+ }
+ }
+ // mDimension will be filled with thumbnail_width, thumbnail_height,
+ // orig_picture_dx, and orig_picture_dy after this function call. We need to
+ // keep it for jpeg_encoder_encode.
+ bool ret = native_set_parms(CAMERA_PARM_DIMENSION,
+ sizeof(cam_ctrl_dimension_t), &mDimension);
+#if 0
+ if(mIs3DModeOn != true) {
+ if(mInHFRThread == false)
+ {
+ mPrevHeapDeallocRunning = false;
+#ifdef USE_ION
+ mPreviewHeap = new IonPool(ion_heap,
+ MemoryHeapBase::READ_ONLY | MemoryHeapBase::NO_CACHING,
+ MSM_PMEM_PREVIEW, //MSM_PMEM_OUTPUT2,
+ mPreviewFrameSize,
+ kPreviewBufferCountActual,
+ mPreviewFrameSize,
+ CbCrOffset,
+ 0,
+ "preview");
+#else
+ mPreviewHeap = new PmemPool(pmem_region,
+ MemoryHeapBase::READ_ONLY | MemoryHeapBase::NO_CACHING,
+ MSM_PMEM_PREVIEW, //MSM_PMEM_OUTPUT2,
+ mPreviewFrameSize,
+ kPreviewBufferCountActual,
+ mPreviewFrameSize,
+ CbCrOffset,
+ 0,
+ "preview");
+#endif
+ if (!mPreviewHeap->initialized()) {
+ mPreviewHeap.clear();
+ ALOGE("initPreview X: could not initialize Camera preview heap.");
+ return false;
+ }
+ }
+ else
+ {
+ for (int cnt = 0; cnt < kPreviewBufferCountActual; ++cnt) {
+ bool status;
+ int active = (cnt < ACTIVE_PREVIEW_BUFFERS);
+ status = register_buf(mPreviewFrameSize,
+ mPreviewFrameSize,
+ CbCrOffset,
+ 0,
+ mPreviewHeap->mHeap->getHeapID(),
+ mPreviewHeap->mAlignedBufferSize * cnt,
+ (uint8_t *)mPreviewHeap->mHeap->base() + mPreviewHeap->mAlignedBufferSize * cnt,
+ MSM_PMEM_PREVIEW,
+ active,
+ true);
+ if(status == false){
+ ALOGE("Registring Preview Buffers failed for HFR mode");
+ return false;
+ }
+ }
+ }
+ // if 7x27A , YV12 format is set as preview format , if width is not 32
+ // bit aligned , we need seperate buffer to hold YV12 data
+ yv12framesize = (previewWidth*previewHeight)
+ + 2* ( CEILING16(previewWidth/2) * (previewHeight/2)) ;
+ if(( mPreviewFormat == CAMERA_YUV_420_YV12 ) &&
+ ( mCurrentTarget == TARGET_MSM7627A || mCurrentTarget == TARGET_MSM7627 ) &&
+ previewWidth%32 != 0 ){
+ ALOGE("initpreview : creating YV12 heap as previewwidth %d not 32 aligned", previewWidth);
+#ifdef USE_ION
+ mYV12Heap = new IonPool(ion_heap,
+ MemoryHeapBase::READ_ONLY | MemoryHeapBase::NO_CACHING,
+ MSM_PMEM_PREVIEW,
+ yv12framesize,
+ NUM_YV12_FRAMES,
+ yv12framesize,
+ CbCrOffset,
+ 0,
+ "postview");
+#else
+ mYV12Heap = new PmemPool(pmem_region,
+ MemoryHeapBase::READ_ONLY | MemoryHeapBase::NO_CACHING,
+ MSM_PMEM_PREVIEW,
+ yv12framesize,
+ NUM_YV12_FRAMES,
+ yv12framesize,
+ CbCrOffset,
+ 0,
+ "postview");
+#endif
+ if (!mYV12Heap->initialized()) {
+ mYV12Heap.clear();
+ ALOGE("initPreview X: could not initialize YV12 Camera preview heap.");
+ return false;
+ }
+ }
+ }
+#endif
+
+ if( ( mCurrentTarget == TARGET_MSM7630 ) || (mCurrentTarget == TARGET_QSD8250) || (mCurrentTarget == TARGET_MSM8660)) {
+
+ // Allocate video buffers after allocating preview buffers.
+ bool status = initRecord();
+ if(status != true) {
+ ALOGE("Failed to allocate video bufers");
+ return false;
+ }
+ }
+
+ if (ret) {
+ if(mIs3DModeOn != true) {
+ for (cnt = 0; cnt < kPreviewBufferCount; cnt++) {
+#if 0
+ frames[cnt].fd = mPreviewHeap->mHeap->getHeapID();
+ frames[cnt].buffer =
+ (uint32_t)mPreviewHeap->mHeap->base() + mPreviewHeap->mAlignedBufferSize * cnt;
+ frames[cnt].y_off = 0;
+ frames[cnt].cbcr_off = CbCrOffset;
+ frames[cnt].path = OUTPUT_TYPE_P; // MSM_FRAME_ENC;
+#endif
+ }
+
+ mPreviewBusyQueue.init();
+ LINK_camframe_release_all_frames(CAM_PREVIEW_FRAME);
+ for(int i=ACTIVE_PREVIEW_BUFFERS ;i <kPreviewBufferCount; i++)
+ LINK_camframe_add_frame(CAM_PREVIEW_FRAME,&frames[i]);
+
+ mPreviewThreadWaitLock.lock();
+ pthread_attr_t pattr;
+ pthread_attr_init(&pattr);
+ pthread_attr_setdetachstate(&pattr, PTHREAD_CREATE_DETACHED);
+
+ mPreviewThreadRunning = !pthread_create(&mPreviewThread,
+ &pattr,
+ preview_thread,
+ (void*)NULL);
+ ret = mPreviewThreadRunning;
+ mPreviewThreadWaitLock.unlock();
+
+ if(ret == false)
+ return ret;
+ }
+
+
+ mFrameThreadWaitLock.lock();
+ pthread_attr_t attr;
+ pthread_attr_init(&attr);
+ pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_DETACHED);
+ camframeParams.cammode = CAMERA_MODE_2D;
+
+ if (mIs3DModeOn) {
+ camframeParams.cammode = CAMERA_MODE_3D;
+ } else {
+ camframeParams.cammode = CAMERA_MODE_2D;
+ }
+ LINK_cam_frame_set_exit_flag(0);
+
+ mFrameThreadRunning = !pthread_create(&mFrameThread,
+ &attr,
+ frame_thread,
+ &camframeParams);
+ ret = mFrameThreadRunning;
+ mFrameThreadWaitLock.unlock();
+ LINK_wait_cam_frame_thread_ready();
+ }
+ mFirstFrame = true;
+
+ ALOGV("initPreview X: %d", ret);
+ return ret;
+}
+
+void QualcommCameraHardware::deinitPreview(void)
+{
+ ALOGV("deinitPreview E");
+
+ mPreviewBusyQueue.deinit();
+
+ // When we call deinitPreview(), we signal to the frame thread that it
+ // needs to exit, but we DO NOT WAIT for it to complete here. The problem
+ // is that deinitPreview is sometimes called from the frame-thread's
+ // callback, when the refcount on the Camera client reaches zero. If we
+ // called pthread_join(), we would deadlock. So, we just call
+ // LINK_camframe_terminate() in deinitPreview(), which makes sure that
+ // after the preview callback returns, the camframe thread will exit. We
+ // could call pthread_join() in initPreview() to join the last frame
+ // thread. However, we would also have to call pthread_join() in release
+ // as well, shortly before we destroy the object; this would cause the same
+ // deadlock, since release(), like deinitPreview(), may also be called from
+ // the frame-thread's callback. This we have to make the frame thread
+ // detached, and use a separate mechanism to wait for it to complete.
+
+ LINK_camframe_terminate();
+ ALOGV("deinitPreview X");
+}
+
+bool QualcommCameraHardware::initRawSnapshot()
+{
+ ALOGV("initRawSnapshot E");
+ const char * pmem_region;
+
+ //get width and height from Dimension Object
+ bool ret = native_set_parms(CAMERA_PARM_DIMENSION,
+ sizeof(cam_ctrl_dimension_t), &mDimension);
+
+
+ if(!ret){
+ ALOGE("initRawSnapshot X: failed to set dimension");
+ return false;
+ }
+ int rawSnapshotSize = mDimension.raw_picture_height *
+ mDimension.raw_picture_width;
+
+ ALOGI("raw_snapshot_buffer_size = %d, raw_picture_height = %d, "\
+ "raw_picture_width = %d",
+ rawSnapshotSize, mDimension.raw_picture_height,
+ mDimension.raw_picture_width);
+
+ // Create Memory for Raw Snapshot
+ if( createSnapshotMemory(numCapture, numCapture, false, PICTURE_FORMAT_RAW) == false ) // TODO : check if the numbers are correct
+ {
+ ALOGE("ERROR : initRawSnapshot , createSnapshotMemory failed");
+ return false;
+ }
+
+ mRawCaptureParms.num_captures = 1;
+ mRawCaptureParms.raw_picture_width = mDimension.raw_picture_width;
+ mRawCaptureParms.raw_picture_height = mDimension.raw_picture_height;
+
+ ALOGV("initRawSnapshot X");
+ return true;
+
+}
+bool QualcommCameraHardware::initZslBuffers(bool initJpegHeap){
+ ALOGV("Init ZSL buffers E");
+ const char * pmem_region;
+ int ion_heap = ION_CP_MM_HEAP_ID;
+ int postViewBufferSize;
+
+ mPostviewWidth = mDimension.display_width;
+ mPostviewHeight = mDimension.display_height;
+
+ //postview buffer initialization
+ postViewBufferSize = mPostviewWidth * mPostviewHeight * 3 / 2;
+ int CbCrOffsetPostview = PAD_TO_WORD(mPostviewWidth * mPostviewHeight);
+ if(mPreviewFormat == CAMERA_YUV_420_NV21_ADRENO) {
+ postViewBufferSize = PAD_TO_4K(CEILING32(mPostviewWidth) * CEILING32(mPostviewHeight)) +
+ 2 * (CEILING32(mPostviewWidth/2) * CEILING32(mPostviewHeight/2));
+ int CbCrOffsetPostview = PAD_TO_4K(CEILING32(mPostviewWidth) * CEILING32(mPostviewHeight));
+ }
+
+ //Snapshot buffer initialization
+ mRawSize = mPictureWidth * mPictureHeight * 3 / 2;
+ mCbCrOffsetRaw = PAD_TO_WORD(mPictureWidth * mPictureHeight);
+ if(mPreviewFormat == CAMERA_YUV_420_NV21_ADRENO) {
+ mRawSize = PAD_TO_4K(CEILING32(mPictureWidth) * CEILING32(mPictureHeight)) +
+ 2 * (CEILING32(mPictureWidth/2) * CEILING32(mPictureHeight/2));
+ mCbCrOffsetRaw = PAD_TO_4K(CEILING32(mPictureWidth) * CEILING32(mPictureHeight));
+ }
+
+ //Jpeg buffer initialization
+ if( mCurrentTarget == TARGET_MSM7627 ||
+ (mCurrentTarget == TARGET_MSM7625A ||
+ mCurrentTarget == TARGET_MSM7627A))
+ mJpegMaxSize = CEILING16(mPictureWidth) * CEILING16(mPictureHeight) * 3 / 2;
+ else {
+ mJpegMaxSize = mPictureWidth * mPictureHeight * 3 / 2;
+ if(mPreviewFormat == CAMERA_YUV_420_NV21_ADRENO){
+ mJpegMaxSize =
+ PAD_TO_4K(CEILING32(mPictureWidth) * CEILING32(mPictureHeight)) +
+ 2 * (CEILING32(mPictureWidth/2) * CEILING32(mPictureHeight/2));
+ }
+ }
+
+ cam_buf_info_t buf_info;
+ int yOffset = 0;
+ buf_info.resolution.width = mPictureWidth;
+ buf_info.resolution.height = mPictureHeight;
+ if(mPreviewFormat != CAMERA_YUV_420_NV21_ADRENO) {
+ mCfgControl.mm_camera_get_parm(CAMERA_PARM_BUFFER_INFO, (void *)&buf_info);
+ mRawSize = buf_info.size;
+ mJpegMaxSize = mRawSize;
+ mCbCrOffsetRaw = buf_info.cbcr_offset;
+ yOffset = buf_info.yoffset;
+ }
+
+ ALOGV("initZslBuffer: initializing mRawHeap.");
+ if(mCurrentTarget == TARGET_MSM8660) {
+ pmem_region = "/dev/pmem_smipool";
+ } else {
+ pmem_region = "/dev/pmem_adsp";
+ }
+ //Main Raw Image
+ #if 0
+#ifdef USE_ION
+ mRawHeap =
+ new IonPool( ion_heap,
+ MemoryHeapBase::READ_ONLY | MemoryHeapBase::NO_CACHING,
+ MSM_PMEM_MAINIMG,
+ mJpegMaxSize,
+ MAX_SNAPSHOT_BUFFERS,
+ mRawSize,
+ mCbCrOffsetRaw,
+ yOffset,
+ "snapshot camera");
+#else
+ mRawHeap =
+ new PmemPool(pmem_region,
+ MemoryHeapBase::READ_ONLY | MemoryHeapBase::NO_CACHING,
+ MSM_PMEM_MAINIMG,
+ mJpegMaxSize,
+ MAX_SNAPSHOT_BUFFERS,
+ mRawSize,
+ mCbCrOffsetRaw,
+ yOffset,
+ "snapshot camera");
+#endif
+ if (!mRawHeap->initialized()) {
+ ALOGE("initZslBuffer X failed ");
+ mRawHeap.clear();
+ ALOGE("initRaw X: error initializing mRawHeap");
+ return false;
+ }
+
+
+ // Jpeg
+ if (initJpegHeap) {
+ ALOGV("initZslRaw: initializing mJpegHeap.");
+ mJpegHeap =
+ new AshmemPool(mJpegMaxSize,
+ (MAX_SNAPSHOT_BUFFERS - 2), // It is the max number of snapshot supported.
+ 0, // we do not know how big the picture will be
+ "jpeg");
+
+ if (!mJpegHeap->initialized()) {
+ mJpegHeap.clear();
+ mRawHeap.clear();
+ ALOGE("initZslRaw X failed: error initializing mJpegHeap.");
+ return false;
+ }
+ }
+
+ //PostView
+ pmem_region = "/dev/pmem_adsp";
+ ion_heap = ION_HEAP_ADSP_ID;
+#ifdef USE_ION
+ mPostviewHeap =
+ new IonPool(ion_heap,
+ MemoryHeapBase::READ_ONLY | MemoryHeapBase::NO_CACHING,
+ MSM_PMEM_THUMBNAIL,
+ postViewBufferSize,
+ MAX_SNAPSHOT_BUFFERS,
+ postViewBufferSize,
+ CbCrOffsetPostview,
+ 0,
+ "thumbnail");
+#else
+ mPostviewHeap =
+ new PmemPool(pmem_region,
+ MemoryHeapBase::READ_ONLY | MemoryHeapBase::NO_CACHING,
+ MSM_PMEM_THUMBNAIL,
+ postViewBufferSize,
+ MAX_SNAPSHOT_BUFFERS,
+ postViewBufferSize,
+ CbCrOffsetPostview,
+ 0,
+ "thumbnail");
+#endif
+
+ if (!mPostviewHeap->initialized()) {
+ mPostviewHeap.clear();
+ mJpegHeap.clear();
+ mRawHeap.clear();
+ ALOGE("initZslBuffer X failed: error initializing mPostviewHeap.");
+ return false;
+ }
+#endif
+ if( createSnapshotMemory(MAX_SNAPSHOT_BUFFERS, MAX_SNAPSHOT_BUFFERS, initJpegHeap) == false ) // TODO : check if the numbers are correct
+ {
+ ALOGE("ERROR : initZslraw , createSnapshotMemory failed");
+ return false;
+ }
+ /* frame all the exif and encode information into encode_params_t */
+ initImageEncodeParameters(MAX_SNAPSHOT_BUFFERS);
+
+ ALOGV("initZslRaw X");
+ return true;
+}
+
+bool QualcommCameraHardware::deinitZslBuffers()
+{
+ ALOGV("deinitZslBuffers E");
+ for (int cnt = 0; cnt < (mZslEnable? MAX_SNAPSHOT_BUFFERS : numCapture); cnt++) {
+ if(NULL != mRawMapped[cnt]) {
+ ALOGI("Unregister MAIN_IMG");
+ register_buf(mJpegMaxSize,
+ mRawSize,mCbCrOffsetRaw,0,
+ mRawfd[cnt],0,
+ (uint8_t *)mRawMapped[cnt]->data,
+ MSM_PMEM_MAINIMG,
+ 0, 0);
+ mRawMapped[cnt]->release(mRawMapped[cnt]);
+ mRawMapped[cnt] = NULL;
+ close(mRawfd[cnt]);
+#ifdef USE_ION
+ deallocate_ion_memory(&raw_main_ion_fd[cnt], &raw_ion_info_fd[cnt]);
+#endif
+ }
+ }
+ for (int cnt = 0; cnt < (mZslEnable? (MAX_SNAPSHOT_BUFFERS) : numCapture); cnt++) {
+ if(mJpegMapped[cnt]) {
+ mJpegMapped[cnt]->release(mJpegMapped[cnt]);
+ mJpegMapped[cnt] = NULL;
+ }
+ }
+ ALOGV("deinitZslBuffers X");
+ return true;
+}
+
+bool QualcommCameraHardware::createSnapshotMemory (int numberOfRawBuffers, int numberOfJpegBuffers,
+ bool initJpegHeap, int snapshotFormat)
+{
+ char * pmem_region;
+ int ret;
+ int ion_heap = ION_CP_MM_HEAP_ID;
+ if(mCurrentTarget == TARGET_MSM8660) {
+ pmem_region = "/dev/pmem_smipool";
+ } else {
+ pmem_region = "/dev/pmem_adsp";
+ }
+ if( snapshotFormat == PICTURE_FORMAT_JPEG) {
+ // Create Raw memory for snapshot
+ for(int cnt = 0; cnt < numberOfRawBuffers; cnt++)
+ {
+ #ifdef USE_ION
+ if (allocate_ion_memory(&raw_main_ion_fd[cnt], &raw_alloc[cnt], &raw_ion_info_fd[cnt],
+ ion_heap, mJpegMaxSize, &mRawfd[cnt]) < 0){
+ ALOGE("do_mmap: Open device %s failed!\n",pmem_region);
+ return NULL;
+ }
+ #else
+ mRawfd[cnt] = open(pmem_region, O_RDWR|O_SYNC);
+ if (mRawfd[cnt] <= 0) {
+ ALOGE("%s: Open device %s failed!\n",__func__, pmem_region);
+ return false;
+ }
+ #endif
+ ALOGI("%s Raw memory index: %d , fd is %d ", __func__, cnt, mRawfd[cnt]);
+ mRawMapped[cnt]=mGetMemory(mRawfd[cnt], mJpegMaxSize,1,mCallbackCookie);
+ if(mRawMapped[cnt] == NULL) {
+ ALOGE("Failed to get camera memory for mRawMapped heap index: %d", cnt);
+ return false;
+ }else{
+ ALOGI("Received following info for raw mapped data:%p,handle:%p, size:%d,release:%p",
+ mRawMapped[cnt]->data ,mRawMapped[cnt]->handle, mRawMapped[cnt]->size, mRawMapped[cnt]->release);
+ }
+ // Register Raw frames
+ ALOGI("Registering buffer %d with fd :%d with kernel",cnt,mRawfd[cnt]);
+ int active = (cnt < ACTIVE_ZSL_BUFFERS); // TODO check ?
+ register_buf(mJpegMaxSize,
+ mRawSize,
+ mCbCrOffsetRaw,
+ mYOffset,
+ mRawfd[cnt],0,
+ (uint8_t *)mRawMapped[cnt]->data,
+ MSM_PMEM_MAINIMG,
+ active);
+ }
+ // Create Jpeg memory for snapshot
+ if (initJpegHeap)
+ {
+ for(int cnt = 0; cnt < numberOfJpegBuffers; cnt++)
+ {
+ ALOGI("%s Jpeg memory index: %d , fd is %d ", __func__, cnt, mJpegfd[cnt]);
+ mJpegMapped[cnt]=mGetMemory(-1, mJpegMaxSize,1,mCallbackCookie);
+ if(mJpegMapped[cnt] == NULL) {
+ ALOGE("Failed to get camera memory for mJpegMapped heap index: %d", cnt);
+ return false;
+ }else{
+ ALOGI("Received following info for jpeg mapped data:%p,handle:%p, size:%d,release:%p",
+ mJpegMapped[cnt]->data ,mJpegMapped[cnt]->handle, mJpegMapped[cnt]->size, mJpegMapped[cnt]->release);
+ }
+ }
+ }
+ // Lock Thumbnail buffers, and register them
+ ALOGI("Locking and registering Thumbnail buffer(s)");
+ for(int cnt = 0; cnt < (mZslEnable? (MAX_SNAPSHOT_BUFFERS-2) : numCapture); cnt++) {
+ // TODO : change , lock all thumbnail buffers
+ if((mPreviewWindow != NULL) && (mThumbnailBuffer[cnt] != NULL)) {
+ ALOGI("createsnapshotbuffers : display lock");
+ mDisplayLock.lock();
+ /* Lock the postview buffer before use */
+ ALOGI(" Locking thumbnail/postview buffer %d", cnt);
+ if( (ret = mPreviewWindow->lock_buffer(mPreviewWindow,
+ mThumbnailBuffer[cnt])) != NO_ERROR) {
+ ALOGE(" Error locking postview buffer. Error = %d ", ret);
+ ALOGE("createsnapshotbuffers : display unlock error");
+ mDisplayLock.unlock();
+ return false;
+ }
+ if (GENLOCK_NO_ERROR != genlock_lock_buffer((native_handle_t*)(*mThumbnailBuffer[cnt]),
+ GENLOCK_WRITE_LOCK, GENLOCK_MAX_TIMEOUT)) {
+ ALOGE("%s: genlock_lock_buffer(WRITE) failed", __FUNCTION__);
+ mDisplayLock.unlock();
+ return -EINVAL;
+ } else {
+ mThumbnailLockState[cnt] = BUFFER_LOCKED;
+ }
+ mDisplayLock.unlock();
+ ALOGE("createsnapshotbuffers : display unlock");
+ }
+
+ private_handle_t *thumbnailHandle;
+ int mBufferSize = previewWidth * previewHeight * 3/2;
+ int mCbCrOffset = PAD_TO_WORD(previewWidth * previewHeight);
+
+ if(mThumbnailBuffer[cnt]) {
+ thumbnailHandle = (private_handle_t *)(*mThumbnailBuffer[cnt]);
+ ALOGI("fd thumbnailhandle fd %d size %d", thumbnailHandle->fd, thumbnailHandle->size);
+ mThumbnailMapped [cnt]= (unsigned int) mmap(0, thumbnailHandle->size, PROT_READ|PROT_WRITE,
+ MAP_SHARED, thumbnailHandle->fd, 0);
+ if((void *)mThumbnailMapped[cnt] == MAP_FAILED){
+ ALOGE(" Couldnt map Thumbnail buffer %d", errno);
+ return false;
+ }
+ register_buf(mBufferSize,
+ mBufferSize, mCbCrOffset, 0,
+ thumbnailHandle->fd,
+ 0,
+ (uint8_t *)mThumbnailMapped[cnt],
+ MSM_PMEM_THUMBNAIL,
+ (cnt < ACTIVE_ZSL_BUFFERS));
+ }
+ } // for loop locking and registering thumbnail buffers
+ } else { // End if Format is Jpeg , start if format is RAW
+ if(numberOfRawBuffers ==1) {
+ int rawSnapshotSize = mDimension.raw_picture_height * mDimension.raw_picture_width;
+#ifdef USE_ION
+ if (allocate_ion_memory(&raw_snapshot_main_ion_fd, &raw_snapshot_alloc, &raw_snapshot_ion_info_fd,
+ ion_heap, rawSnapshotSize, &mRawSnapshotfd) < 0){
+ ALOGE("do_mmap: Open device %s failed!\n",pmem_region);
+ return false;
+ }
+#else
+ mRawSnapshotfd = open(pmem_region, O_RDWR|O_SYNC);
+ if (mRawSnapshotfd <= 0) {
+ ALOGE("%s: Open device %s failed for rawnspashot!\n",__func__, pmem_region);
+ return false;
+ }
+#endif
+ ALOGI("%s Raw snapshot memory , fd is %d ", __func__, mRawSnapshotfd);
+ mRawSnapshotMapped=mGetMemory(mRawSnapshotfd,
+ rawSnapshotSize,
+ 1,
+ mCallbackCookie);
+ if(mRawSnapshotMapped == NULL) {
+ ALOGE("Failed to get camera memory for mRawSnapshotMapped ");
+ return false;
+ }else{
+ ALOGI("Received following info for raw mapped data:%p,handle:%p, size:%d,release:%p",
+ mRawSnapshotMapped->data ,mRawSnapshotMapped->handle, mRawSnapshotMapped->size, mRawSnapshotMapped->release);
+ }
+ // Register Raw frames
+ ALOGI("Registering RawSnapshot buffer with fd :%d with kernel",mRawSnapshotfd);
+ int active = 1; // TODO check ?
+ register_buf( rawSnapshotSize,
+ rawSnapshotSize,
+ 0,
+ 0,
+ mRawSnapshotfd,
+ 0,
+ (uint8_t *)mRawSnapshotMapped->data,
+ MSM_PMEM_RAW_MAINIMG,
+ active);
+ } else {
+ ALOGE("Multiple raw snapshot capture not supported for now....");
+ return false;
+ }
+ } // end else , if RAW format
+ return true;
+}
+bool QualcommCameraHardware::initRaw(bool initJpegHeap)
+{
+ const char * pmem_region;
+ int ion_heap;
+ int postViewBufferSize;
+ uint32_t pictureAspectRatio;
+ uint32_t i;
+ mParameters.getPictureSize(&mPictureWidth, &mPictureHeight);
+ mActualPictWidth = mPictureWidth;
+ mActualPictHeight = mPictureHeight;
+ if (updatePictureDimension(mParameters, mPictureWidth, mPictureHeight)) {
+ mDimension.picture_width = mPictureWidth;
+ mDimension.picture_height = mPictureHeight;
+ }
+ ALOGV("initRaw E: picture size=%dx%d", mPictureWidth, mPictureHeight);
+ int w_scale_factor = (mIs3DModeOn && mSnapshot3DFormat == SIDE_BY_SIDE_FULL) ? 2 : 1;
+
+ /* use the default thumbnail sizes */
+ mThumbnailHeight = thumbnail_sizes[DEFAULT_THUMBNAIL_SETTING].height;
+ mThumbnailWidth = (mThumbnailHeight * mPictureWidth)/ mPictureHeight;
+ /* see if we can get better thumbnail sizes (not mandatory?) */
+ pictureAspectRatio = (uint32_t)((mPictureWidth * Q12) / mPictureHeight);
+ for(i = 0; i < THUMBNAIL_SIZE_COUNT; i++ ){
+ if(thumbnail_sizes[i].aspect_ratio == pictureAspectRatio)
+ {
+ mThumbnailWidth = thumbnail_sizes[i].width;
+ mThumbnailHeight = thumbnail_sizes[i].height;
+ break;
+ }
+ }
+ /* calculate thumbnail aspect ratio */
+ if(mCurrentTarget == TARGET_MSM7627 ) {
+ int thumbnail_aspect_ratio =
+ (uint32_t)((mThumbnailWidth * Q12) / mThumbnailHeight);
+
+ if (thumbnail_aspect_ratio < pictureAspectRatio) {
+
+ /* if thumbnail is narrower than main image, in other words wide mode
+ * snapshot then we want to adjust the height of the thumbnail to match
+ * the main image aspect ratio. */
+ mThumbnailHeight =
+ (mThumbnailWidth * Q12) / pictureAspectRatio;
+ } else if (thumbnail_aspect_ratio != pictureAspectRatio) {
+
+ /* if thumbnail is wider than main image we want to adjust width of the
+ * thumbnail to match main image aspect ratio */
+ mThumbnailWidth =
+ (mThumbnailHeight * pictureAspectRatio) / Q12;
+ }
+ /* make the dimensions multiple of 16 - JPEG requirement */
+ mThumbnailWidth = FLOOR16(mThumbnailWidth);
+ mThumbnailHeight = FLOOR16(mThumbnailHeight);
+ ALOGV("the thumbnail sizes are %dx%d",mThumbnailWidth,mThumbnailHeight);
+ }
+
+ /* calculate postView size */
+ mPostviewWidth = mThumbnailWidth;
+ mPostviewHeight = mThumbnailHeight;
+ /* Try to keep the postview dimensions near to preview for better
+ * performance and userexperience. If the postview and preview dimensions
+ * are same, then we can try to use the same overlay of preview for
+ * postview also. If not, we need to reset the overlay for postview.
+ * we will be getting the same dimensions for preview and postview
+ * in most of the cases. The only exception is for applications
+ * which won't use optimalPreviewSize based on picture size.
+ */
+ if((mPictureHeight >= previewHeight) &&
+ (mCurrentTarget != TARGET_MSM7627) && !mIs3DModeOn) {
+ mPostviewHeight = previewHeight;
+ mPostviewWidth = (previewHeight * mPictureWidth) / mPictureHeight;
+ }else if(mActualPictHeight < mThumbnailHeight){
+ mPostviewHeight = THUMBNAIL_SMALL_HEIGHT;
+ mPostviewWidth = (THUMBNAIL_SMALL_HEIGHT * mActualPictWidth)/ mActualPictHeight;
+ mThumbnailWidth = mPostviewWidth;
+ mThumbnailHeight = mPostviewHeight;
+ }
+
+ if(mPreviewFormat == CAMERA_YUV_420_NV21_ADRENO){
+ mDimension.main_img_format = CAMERA_YUV_420_NV21_ADRENO;
+ mDimension.thumb_format = CAMERA_YUV_420_NV21_ADRENO;
+ }
+
+ mDimension.ui_thumbnail_width = mPostviewWidth;
+ mDimension.ui_thumbnail_height = mPostviewHeight;
+
+ // mDimension will be filled with thumbnail_width, thumbnail_height,
+ // orig_picture_dx, and orig_picture_dy after this function call. We need to
+ // keep it for jpeg_encoder_encode.
+ bool ret = native_set_parms(CAMERA_PARM_DIMENSION,
+ sizeof(cam_ctrl_dimension_t), &mDimension);
+
+ if(!ret) {
+ ALOGE("initRaw X: failed to set dimension");
+ return false;
+ }
+#if 0
+ if (mJpegHeap != NULL) {
+ ALOGV("initRaw: clearing old mJpegHeap.");
+ mJpegHeap.clear();
+ }
+#endif
+ //postview buffer initialization
+ postViewBufferSize = mPostviewWidth * w_scale_factor * mPostviewHeight * 3 / 2;
+ int CbCrOffsetPostview = PAD_TO_WORD(mPostviewWidth * w_scale_factor * mPostviewHeight);
+
+ //Snapshot buffer initialization
+ mRawSize = mPictureWidth * w_scale_factor * mPictureHeight * 3 / 2;
+ mCbCrOffsetRaw = PAD_TO_WORD(mPictureWidth * w_scale_factor * mPictureHeight);
+ if(mPreviewFormat == CAMERA_YUV_420_NV21_ADRENO) {
+ mRawSize = PAD_TO_4K(CEILING32(mPictureWidth * w_scale_factor) * CEILING32(mPictureHeight)) +
+ 2 * (CEILING32(mPictureWidth * w_scale_factor/2) * CEILING32(mPictureHeight/2));
+ mCbCrOffsetRaw = PAD_TO_4K(CEILING32(mPictureWidth * w_scale_factor) * CEILING32(mPictureHeight));
+ }
+
+ //Jpeg buffer initialization
+ if( mCurrentTarget == TARGET_MSM7627 ||
+ (mCurrentTarget == TARGET_MSM7625A ||
+ mCurrentTarget == TARGET_MSM7627A))
+ mJpegMaxSize = CEILING16(mPictureWidth * w_scale_factor) * CEILING16(mPictureHeight) * 3 / 2;
+ else {
+ mJpegMaxSize = mPictureWidth * w_scale_factor * mPictureHeight * 3 / 2;
+ if(mPreviewFormat == CAMERA_YUV_420_NV21_ADRENO){
+ mJpegMaxSize =
+ PAD_TO_4K(CEILING32(mPictureWidth * w_scale_factor) * CEILING32(mPictureHeight)) +
+ 2 * (CEILING32(mPictureWidth * w_scale_factor/2) * CEILING32(mPictureHeight/2));
+ }
+ }
+
+ int rotation = mParameters.getInt("rotation");
+ char mDeviceName[PROPERTY_VALUE_MAX];
+ property_get("ro.hw_plat", mDeviceName, "");
+ if(!strcmp(mDeviceName,"7x25A"))
+ rotation = (rotation + 90)%360;
+
+ if (mIs3DModeOn)
+ rotation = 0;
+ ret = native_set_parms(CAMERA_PARM_JPEG_ROTATION, sizeof(int), &rotation);
+ if(!ret){
+ ALOGE("setting camera id failed");
+ return false;
+ }
+ cam_buf_info_t buf_info;
+ if(mIs3DModeOn == false)
+ {
+ buf_info.resolution.width = mPictureWidth * w_scale_factor;
+ buf_info.resolution.height = mPictureHeight;
+ mCfgControl.mm_camera_get_parm(CAMERA_PARM_BUFFER_INFO, (void *)&buf_info);
+ mRawSize = buf_info.size;
+ mJpegMaxSize = mRawSize;
+ mCbCrOffsetRaw = buf_info.cbcr_offset;
+ mYOffset = buf_info.yoffset;
+ }
+ int mBufferSize;
+ int CbCrOffset;
+ if(mCurrentTarget != TARGET_MSM7627 && mCurrentTarget != TARGET_MSM7627A){
+ mParameters.getPreviewSize(&previewWidth, &previewHeight);
+ mBufferSize = previewWidth * previewHeight * 3/2;
+ CbCrOffset = PAD_TO_WORD(previewWidth * previewHeight);
+ }
+ else {
+ mBufferSize = mPostviewWidth * mPostviewHeight * 3/2;
+ CbCrOffset = PAD_TO_WORD(mPostviewWidth * mPostviewHeight);
+ }
+
+ ALOGV("initRaw: initializing mRawHeap.");
+
+ //PostView
+ pmem_region = "/dev/pmem_adsp";
+ ion_heap = ION_CAMERA_HEAP_ID;
+ // Create memory for Raw YUV frames and Jpeg images
+ if( createSnapshotMemory(numCapture, numCapture, initJpegHeap) == false )
+ {
+ ALOGE("ERROR : initraw , createSnapshotMemory failed");
+ return false;
+ }
+ /* frame all the exif and encode information into encode_params_t */
+
+ initImageEncodeParameters(numCapture);
+ /* fill main image size, thumbnail size, postview size into capture_params_t*/
+ memset(&mImageCaptureParms, 0, sizeof(capture_params_t));
+ mImageCaptureParms.num_captures = numCapture;
+ mImageCaptureParms.picture_width = mPictureWidth;
+ mImageCaptureParms.picture_height = mPictureHeight;
+ mImageCaptureParms.postview_width = mPostviewWidth;
+ mImageCaptureParms.postview_height = mPostviewHeight;
+
+ int width = mParameters.getInt(QCameraParameters::KEY_JPEG_THUMBNAIL_WIDTH);
+ int height = mParameters.getInt(QCameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT);
+ if((width != 0) && (height != 0)) {
+ mImageCaptureParms.thumbnail_width = mThumbnailWidth;
+ mImageCaptureParms.thumbnail_height = mThumbnailHeight;
+ } else {
+ mImageCaptureParms.thumbnail_width = 0;
+ mImageCaptureParms.thumbnail_height = 0;
+ }
+
+ ALOGI("%s: picture size=%dx%d",__FUNCTION__,
+ mImageCaptureParms.picture_width, mImageCaptureParms.picture_height);
+ ALOGI("%s: postview size=%dx%d",__FUNCTION__,
+ mImageCaptureParms.postview_width, mImageCaptureParms.postview_height);
+ ALOGI("%s: thumbnail size=%dx%d",__FUNCTION__,
+ mImageCaptureParms.thumbnail_width, mImageCaptureParms.thumbnail_height);
+
+ ALOGV("initRaw X");
+ return true;
+}
+
+
+void QualcommCameraHardware::deinitRawSnapshot()
+{
+ ALOGV("deinitRawSnapshot E");
+
+ int rawSnapshotSize = mDimension.raw_picture_height * mDimension.raw_picture_width;
+ // Unregister and de allocated memory for Raw Snapshot
+ if(mRawSnapshotMapped) {
+ register_buf( rawSnapshotSize,
+ rawSnapshotSize,
+ 0,
+ 0,
+ mRawSnapshotfd,
+ 0,
+ (uint8_t *)mRawSnapshotMapped->data,
+ MSM_PMEM_RAW_MAINIMG,
+ false,
+ false);
+ mRawSnapshotMapped->release(mRawSnapshotMapped);
+ mRawSnapshotMapped = NULL;
+ close(mRawSnapshotfd);
+#ifdef USE_ION
+ deallocate_ion_memory(&raw_snapshot_main_ion_fd, &raw_snapshot_ion_info_fd);
+#endif
+ }
+ ALOGV("deinitRawSnapshot X");
+}
+
+void QualcommCameraHardware::deinitRaw()
+{
+ ALOGV("deinitRaw E");
+ ALOGV("deinitRaw , clearing raw memory and jpeg memory");
+ for (int cnt = 0; cnt < (mZslEnable? MAX_SNAPSHOT_BUFFERS : numCapture); cnt++) {
+ if(NULL != mRawMapped[cnt]) {
+ ALOGI("Unregister MAIN_IMG");
+ register_buf(mJpegMaxSize,
+ mRawSize,mCbCrOffsetRaw,0,
+ mRawfd[cnt],0,
+ (uint8_t *)mRawMapped[cnt]->data,
+ MSM_PMEM_MAINIMG,
+ 0, 0);
+ mRawMapped[cnt]->release(mRawMapped[cnt]);
+ mRawMapped[cnt] = NULL;
+ close(mRawfd[cnt]);
+#ifdef USE_ION
+ deallocate_ion_memory(&raw_main_ion_fd[cnt], &raw_ion_info_fd[cnt]);
+#endif
+ }
+ }
+ for (int cnt = 0; cnt < (mZslEnable? (MAX_SNAPSHOT_BUFFERS) : numCapture); cnt++) {
+ if(NULL != mJpegMapped[cnt]) {
+ mJpegMapped[cnt]->release(mJpegMapped[cnt]);
+ mJpegMapped[cnt] = NULL;
+ }
+ }
+ if( mPreviewWindow != NULL ) {
+ ALOGI("deinitRaw , clearing/cancelling thumbnail buffers:");
+ private_handle_t *handle;
+ for (int cnt = 0; cnt < (mZslEnable? (MAX_SNAPSHOT_BUFFERS-2) : numCapture); cnt++) {
+ if(mPreviewWindow != NULL && mThumbnailBuffer[cnt] != NULL) {
+ handle = (private_handle_t *)(*mThumbnailBuffer[cnt]);
+ ALOGI("%s: Cancelling postview buffer %d ", __FUNCTION__, handle->fd);
+ ALOGI("deinitraw : display lock");
+ mDisplayLock.lock();
+ if (BUFFER_LOCKED == mThumbnailLockState[cnt]) {
+ if (GENLOCK_FAILURE == genlock_unlock_buffer(handle)) {
+ ALOGE("%s: genlock_unlock_buffer failed", __FUNCTION__);
+ } else {
+ mThumbnailLockState[cnt] = BUFFER_UNLOCKED;
+ }
+ }
+ status_t retVal = mPreviewWindow->cancel_buffer(mPreviewWindow,
+ mThumbnailBuffer[cnt]);
+ if(retVal != NO_ERROR)
+ ALOGE("%s: cancelBuffer failed for postview buffer %d",
+ __FUNCTION__, handle->fd);
+ if(mStoreMetaDataInFrame && (metadata_memory[cnt] != NULL)){
+ struct encoder_media_buffer_type * packet =
+ (struct encoder_media_buffer_type *)metadata_memory[cnt]->data;
+ native_handle_delete(const_cast<native_handle_t *>(packet->meta_handle));
+ metadata_memory[cnt]->release(metadata_memory[cnt]);
+ metadata_memory[cnt] = NULL;
+ }
+ // unregister , unmap and release as well
+
+ int mBufferSize = previewWidth * previewHeight * 3/2;
+ int mCbCrOffset = PAD_TO_WORD(previewWidth * previewHeight);
+ if(mThumbnailMapped[cnt]) {
+ ALOGI("%s: Unregistering Thumbnail Buffer %d ", __FUNCTION__, handle->fd);
+ register_buf(mBufferSize,
+ mBufferSize, mCbCrOffset, 0,
+ handle->fd,
+ 0,
+ (uint8_t *)mThumbnailMapped[cnt],
+ MSM_PMEM_THUMBNAIL,
+ false, false);
+ if (munmap((void *)(mThumbnailMapped[cnt]),handle->size ) == -1) {
+ ALOGE("deinitraw : Error un-mmapping the thumbnail buffer %d", index);
+ }
+ mThumbnailBuffer[cnt] = NULL;
+ mThumbnailMapped[cnt] = NULL;
+ }
+ ALOGI("deinitraw : display unlock");
+ mDisplayLock.unlock();
+ }
+ }
+ }
+ ALOGV("deinitRaw X");
+}
+
+void QualcommCameraHardware::relinquishBuffers()
+{
+ status_t retVal;
+ ALOGV("%s: E ", __FUNCTION__);
+ mDisplayLock.lock();
+ if( mPreviewWindow != NULL) {
+ for(int cnt = 0; cnt < mTotalPreviewBufferCount; cnt++) {
+ if (BUFFER_LOCKED == frame_buffer[cnt].lockState) {
+ ALOGI(" Cancelling preview buffers %d ",frames[cnt].fd);
+ if (GENLOCK_FAILURE == genlock_unlock_buffer((native_handle_t *)
+ (*(frame_buffer[cnt].buffer)))) {
+ ALOGE("%s: genlock_unlock_buffer failed", __FUNCTION__);
+ } else {
+ frame_buffer[cnt].lockState = BUFFER_UNLOCKED;
+ }
+ }
+ retVal = mPreviewWindow->cancel_buffer(mPreviewWindow,
+ frame_buffer[cnt].buffer);
+ mPreviewMapped[cnt]->release(mPreviewMapped[cnt]);
+ if(mStoreMetaDataInFrame && (metadata_memory[cnt] != NULL)){
+ struct encoder_media_buffer_type * packet =
+ (struct encoder_media_buffer_type *)metadata_memory[cnt]->data;
+ native_handle_delete(const_cast<native_handle_t *>(packet->meta_handle));
+ metadata_memory[cnt]->release(metadata_memory[cnt]);
+ metadata_memory[cnt] = NULL;
+ }
+ ALOGI("release preview buffers");
+ if(retVal != NO_ERROR)
+ ALOGE("%s: cancelBuffer failed for preview buffer %d ",
+ __FUNCTION__, frames[cnt].fd);
+ }
+ } else {
+ ALOGV(" PreviewWindow is null, will not cancelBuffers ");
+ }
+ mDisplayLock.unlock();
+ ALOGV("%s: X ", __FUNCTION__);
+}
+status_t QualcommCameraHardware::set_PreviewWindow(void* param)
+{
+ ALOGV(": set_preview_window");
+ preview_stream_ops_t* window = (preview_stream_ops_t*)param;
+ return setPreviewWindow(window);
+}
+
+status_t QualcommCameraHardware::setPreviewWindow(preview_stream_ops_t* window)
+{
+ status_t retVal = NO_ERROR;
+ ALOGV(" %s: E ", __FUNCTION__);
+ if( window == NULL) {
+ ALOGW(" Setting NULL preview window ");
+ /* Current preview window will be invalidated.
+ * Release all the buffers back */
+ //@TODO: We may need to this to avoid leak
+ /*if(mPreviewWindow!=NULL)
+ relinquishBuffers();*/
+ }
+ ALOGI("Set preview window:: ");
+ mDisplayLock.lock();
+ mPreviewWindow = window;
+ mDisplayLock.unlock();
+
+ if( (mPreviewWindow != NULL) && mCameraRunning) {
+ /* Initial preview in progress. Stop it and start
+ * the actual preview */
+ stopInitialPreview();
+ retVal = getBuffersAndStartPreview();
+ }
+ ALOGV(" %s : X ", __FUNCTION__ );
+ return retVal;
+}
+
+status_t QualcommCameraHardware::getBuffersAndStartPreview() {
+ status_t retVal = NO_ERROR;
+ int stride;
+ bool all_chnls = false;
+ ALOGI(" %s : E ", __FUNCTION__);
+ mFrameThreadWaitLock.lock();
+ while (mFrameThreadRunning) {
+ ALOGV("%s: waiting for old frame thread to complete.", __FUNCTION__);
+ mFrameThreadWait.wait(mFrameThreadWaitLock);
+ ALOGV("%s: old frame thread completed.",__FUNCTION__);
+ }
+ mFrameThreadWaitLock.unlock();
+
+ if( mPreviewWindow!= NULL) {
+ ALOGV("%s: Calling native_window_set_buffer", __FUNCTION__);
+
+ android_native_buffer_t *mPreviewBuffer;
+ int32_t previewFormat;
+ const char *str = mParameters.getPreviewFormat();
+ int numMinUndequeuedBufs = 0;
+
+ int err = mPreviewWindow->get_min_undequeued_buffer_count(mPreviewWindow,
+ &numMinUndequeuedBufs);
+
+ if (err != 0) {
+ ALOGW("NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS query failed: %s (%d)",
+ strerror(-err), -err);
+ return err;
+ }
+ mTotalPreviewBufferCount = kPreviewBufferCount + numMinUndequeuedBufs;
+
+ previewFormat = attr_lookup(app_preview_formats,
+ sizeof(app_preview_formats) / sizeof(str_map), str);
+ if (previewFormat == NOT_FOUND) {
+ previewFormat = HAL_PIXEL_FORMAT_YCrCb_420_SP;
+ }
+
+ retVal = mPreviewWindow->set_buffer_count(mPreviewWindow,
+ mTotalPreviewBufferCount +
+ (mZslEnable? (MAX_SNAPSHOT_BUFFERS-2) : numCapture) ); //1);
+
+ if(retVal != NO_ERROR) {
+ ALOGE("%s: Error while setting buffer count to %d ", __FUNCTION__, kPreviewBufferCount + 1);
+ return retVal;
+ }
+ mParameters.getPreviewSize(&previewWidth, &previewHeight);
+
+ retVal = mPreviewWindow->set_buffers_geometry(mPreviewWindow,
+ previewWidth, previewHeight, previewFormat);
+
+ if(retVal != NO_ERROR) {
+ ALOGE("%s: Error while setting buffer geometry ", __FUNCTION__);
+ return retVal;
+ }
+
+#ifdef USE_ION
+ mPreviewWindow->set_usage (mPreviewWindow,
+ GRALLOC_USAGE_PRIVATE_CAMERA_HEAP |
+ GRALLOC_USAGE_PRIVATE_UNCACHED);
+#else
+ mPreviewWindow->set_usage (mPreviewWindow,
+ GRALLOC_USAGE_PRIVATE_ADSP_HEAP |
+ GRALLOC_USAGE_PRIVATE_UNCACHED);
+#endif
+ int CbCrOffset = PAD_TO_WORD(previewWidth * previewHeight);
+ int cnt = 0, active = 1;
+ int mBufferSize = previewWidth * previewHeight * 3/2;
+ for (cnt = 0; cnt < mTotalPreviewBufferCount; cnt++) {
+ //const native_handle *nh = (native_handle *)malloc (sizeof(native_handle));
+ buffer_handle_t *bhandle =NULL;// &nh; ;
+ //buffer_handle_t *bh_handle=&handle;
+ retVal = mPreviewWindow->dequeue_buffer(mPreviewWindow,
+ &(bhandle),
+ &(stride));
+
+ if((retVal == NO_ERROR)) {
+ /* Acquire lock on the buffer if it was successfully
+ * dequeued from gralloc */
+ ALOGV(" Locking buffer %d ", cnt);
+ retVal = mPreviewWindow->lock_buffer(mPreviewWindow,
+ bhandle);
+ // lock the buffer using genlock
+ if (GENLOCK_NO_ERROR != genlock_lock_buffer((native_handle_t *)(*bhandle),
+ GENLOCK_WRITE_LOCK, GENLOCK_MAX_TIMEOUT)) {
+ ALOGE("%s: genlock_lock_buffer(WRITE) failed", __FUNCTION__);
+ return -EINVAL;
+ }
+ ALOGI(" Locked buffer %d successfully", cnt);
+ //yyan todo use handle to find out mPreviewBuffer
+
+ } else {
+ ALOGE("%s: dequeueBuffer failed for preview buffer. Error = %d",
+ __FUNCTION__, retVal);
+ return retVal;
+ }
+ if(retVal == NO_ERROR) {
+ private_handle_t *handle = (private_handle_t *)(*bhandle);//(private_handle_t *)mPreviewBuffer->handle;
+ ALOGI("Handle %p, Fd passed:%d, Base:%p, Size %p",
+ handle,handle->fd,handle->base,handle->size);
+
+ if(handle) {
+
+ //thumbnailHandle = (private_handle_t *)mThumbnailBuffer->handle;
+ ALOGV("fd mmap fd %d size %d", handle->fd, handle->size/*thumbnailHandle->size*/);
+ mPreviewMapped[cnt]= mGetMemory(handle->fd,handle->size,1,mCallbackCookie);
+
+ if((void *)mPreviewMapped[cnt] == NULL){
+ ALOGE(" Failed to get camera memory for Preview buffer %d ",cnt);
+ }else{
+ ALOGI(" Mapped Preview buffer %d", cnt);
+ }
+ ALOGI("Got the following from get_mem data: %p, handle :%d, release : %p, size: %d",
+ mPreviewMapped[cnt]->data,
+ mPreviewMapped[cnt]->handle,
+ mPreviewMapped[cnt]->release,
+ mPreviewMapped[cnt]->size);
+ ALOGI(" getbuffersandrestartpreview deQ %d", handle->fd);
+ frames[cnt].fd = handle->fd;
+ frames[cnt].buffer = (unsigned int)mPreviewMapped[cnt]->data;//(unsigned int)mPreviewHeap[cnt]->mHeap->base();
+ if(((void *)frames[cnt].buffer == MAP_FAILED)
+ || (frames[cnt].buffer == 0)) {
+ ALOGE("%s: Couldnt map preview buffers", __FUNCTION__);
+ return UNKNOWN_ERROR;
+ }
+
+ if(mPreviewFormat == CAMERA_YUV_420_YV12 && mCurrentTarget != TARGET_MSM7627A) {
+ myv12_params.CbOffset = PAD_TO_WORD(previewWidth * previewHeight);
+ myv12_params.CrOffset = myv12_params.CbOffset + PAD_TO_WORD((previewWidth * previewHeight)/4);
+ ALOGI("CbOffset = 0x%x CrOffset = 0x%x \n",myv12_params.CbOffset, myv12_params.CrOffset);
+ frames[cnt].planar0_off = 0;
+ frames[cnt].planar1_off = myv12_params.CbOffset;
+ frames[cnt].planar2_off = myv12_params.CrOffset;
+ frames[cnt].path = OUTPUT_TYPE_P; // MSM_FRAME_ENC;
+ all_chnls = true;
+ }else{
+ frames[cnt].planar0_off = 0;
+ frames[cnt].planar1_off= CbCrOffset;
+ frames[cnt].planar2_off = 0;
+ frames[cnt].path = OUTPUT_TYPE_P; // MSM_FRAME_ENC;
+ }
+ frame_buffer[cnt].frame = &frames[cnt];
+ frame_buffer[cnt].buffer = bhandle;
+ frame_buffer[cnt].size = handle->size;
+ frame_buffer[cnt].lockState = BUFFER_LOCKED;
+ active = (cnt < ACTIVE_PREVIEW_BUFFERS);
+
+ ALOGI("Registering buffer %d with fd :%d with kernel",cnt,handle->fd);
+ register_buf(mBufferSize,
+ mBufferSize, CbCrOffset, 0,
+ handle->fd,
+ 0,
+ (uint8_t *)frames[cnt].buffer/*(uint8_t *)mThumbnailMapped*/,
+ MSM_PMEM_PREVIEW,
+ active,true,all_chnls);
+ ALOGI("Came back from register call to kernel");
+ } else
+ ALOGE("%s: setPreviewWindow: Could not get buffer handle", __FUNCTION__);
+ } else {
+ ALOGE("%s: lockBuffer failed for preview buffer. Error = %d",
+ __FUNCTION__, retVal);
+ return retVal;
+ }
+ }
+
+
+ // Dequeue Thumbnail/Postview Buffers here , Consider ZSL/Multishot cases
+ for (cnt = 0; cnt < (mZslEnable? (MAX_SNAPSHOT_BUFFERS-2) : numCapture); cnt++) {
+
+ retVal = mPreviewWindow->dequeue_buffer(mPreviewWindow,
+ &mThumbnailBuffer[cnt], &(stride));
+ private_handle_t* handle = (private_handle_t *)(*mThumbnailBuffer[cnt]);
+ ALOGI(" : dequeing thumbnail buffer fd %d", handle->fd);
+ if(retVal != NO_ERROR) {
+ ALOGE("%s: dequeueBuffer failed for postview buffer. Error = %d ",
+ __FUNCTION__, retVal);
+ return retVal;
+ }
+ }
+
+ // Cancel minUndequeuedBufs.
+ for (cnt = kPreviewBufferCount; cnt < mTotalPreviewBufferCount; cnt++) {
+ if (GENLOCK_FAILURE == genlock_unlock_buffer((native_handle_t*)(*(frame_buffer[cnt].buffer)))) {
+ ALOGE("%s: genlock_unlock_buffer failed", __FUNCTION__);
+ return -EINVAL;
+ }
+ frame_buffer[cnt].lockState = BUFFER_UNLOCKED;
+ status_t retVal = mPreviewWindow->cancel_buffer(mPreviewWindow,
+ frame_buffer[cnt].buffer);
+ ALOGI(" Cancelling preview buffers %d ",frame_buffer[cnt].frame->fd);
+ }
+ } else {
+ ALOGE("%s: Could not get Buffer from Surface", __FUNCTION__);
+ return UNKNOWN_ERROR;
+ }
+ mPreviewBusyQueue.init();
+ LINK_camframe_release_all_frames(CAM_PREVIEW_FRAME);
+ for(int i=ACTIVE_PREVIEW_BUFFERS ;i < kPreviewBufferCount; i++)
+ LINK_camframe_add_frame(CAM_PREVIEW_FRAME,&frames[i]);
+
+ mBuffersInitialized = true;
+
+ //Starting preview now as the preview buffers are allocated
+ // if(!mPreviewInitialized && !mCameraRunning) { // TODO just for testing
+ ALOGI("setPreviewWindow: Starting preview after buffer allocation");
+ startPreviewInternal();
+ // }
+ ALOGV(" %s : X ",__FUNCTION__);
+ return NO_ERROR;
+}
+void QualcommCameraHardware::release()
+{
+ ALOGV("release E");
+ Mutex::Autolock l(&mLock);
+#if 0
+ {
+ Mutex::Autolock checkLock(&singleton_lock);
+ if(singleton_releasing){
+ ALOGE("ERROR: multiple release!");
+ return;
+ }
+ }
+#endif
+ ALOGI("release: mCameraRunning = %d", mCameraRunning);
+ if (mCameraRunning) {
+ if(mDataCallbackTimestamp && (mMsgEnabled & CAMERA_MSG_VIDEO_FRAME)) {
+ mRecordFrameLock.lock();
+ mReleasedRecordingFrame = true;
+ mRecordWait.signal();
+ mRecordFrameLock.unlock();
+ }
+ stopPreviewInternal();
+ ALOGI("release: stopPreviewInternal done.");
+ }
+ LINK_jpeg_encoder_join();
+ mm_camera_ops_type_t current_ops_type = (mSnapshotFormat
+ == PICTURE_FORMAT_JPEG) ? CAMERA_OPS_CAPTURE_AND_ENCODE
+ : CAMERA_OPS_RAW_CAPTURE;
+ mCamOps.mm_camera_deinit(current_ops_type, NULL, NULL);
+
+ //Signal the snapshot thread
+ mJpegThreadWaitLock.lock();
+ mJpegThreadRunning = false;
+ mJpegThreadWait.signal();
+ mJpegThreadWaitLock.unlock();
+
+ // Wait for snapshot thread to complete before clearing the
+ // resources.
+ mSnapshotThreadWaitLock.lock();
+ while (mSnapshotThreadRunning) {
+ ALOGV("release: waiting for old snapshot thread to complete.");
+ mSnapshotThreadWait.wait(mSnapshotThreadWaitLock);
+ ALOGV("release: old snapshot thread completed.");
+ }
+ mSnapshotThreadWaitLock.unlock();
+
+ {
+ Mutex::Autolock l (&mRawPictureHeapLock);
+ deinitRaw();
+ }
+
+ deinitRawSnapshot();
+ ALOGI("release: clearing resources done.");
+ if(mCurrentTarget == TARGET_MSM8660) {
+ ALOGV("release : Clearing the mThumbnailHeap and mDisplayHeap");
+ mLastPreviewFrameHeap.clear();
+ mLastPreviewFrameHeap = NULL;
+ mThumbnailHeap.clear();
+ mThumbnailHeap = NULL;
+ mPostviewHeap.clear();
+ mPostviewHeap = NULL;
+ mDisplayHeap.clear();
+ mDisplayHeap = NULL;
+ }
+ LINK_mm_camera_deinit();
+ if(fb_fd >= 0) {
+ close(fb_fd);
+ fb_fd = -1;
+ }
+ singleton_lock.lock();
+ singleton_releasing = true;
+ singleton_releasing_start_time = systemTime();
+ singleton_lock.unlock();
+
+ ALOGI("release X: mCameraRunning = %d, mFrameThreadRunning = %d", mCameraRunning, mFrameThreadRunning);
+ ALOGI("mVideoThreadRunning = %d, mSnapshotThreadRunning = %d, mJpegThreadRunning = %d", mVideoThreadRunning, mSnapshotThreadRunning, mJpegThreadRunning);
+ ALOGI("camframe_timeout_flag = %d, mAutoFocusThreadRunning = %d", camframe_timeout_flag, mAutoFocusThreadRunning);
+ mFrameThreadWaitLock.lock();
+ while (mFrameThreadRunning) {
+ ALOGV("release: waiting for old frame thread to complete.");
+ mFrameThreadWait.wait(mFrameThreadWaitLock);
+ ALOGV("release: old frame thread completed.");
+ }
+ mFrameThreadWaitLock.unlock();
+
+}
+
+QualcommCameraHardware::~QualcommCameraHardware()
+{
+ ALOGI("~QualcommCameraHardware E");
+
+ //singleton_lock.lock();
+ if( mCurrentTarget == TARGET_MSM7630 || mCurrentTarget == TARGET_QSD8250 || mCurrentTarget == TARGET_MSM8660 ) {
+ delete [] recordframes;
+ recordframes = NULL;
+ delete [] record_buffers_tracking_flag;
+ }
+ mMMCameraDLRef.clear();
+ //singleton.clear();
+ //singleton_releasing = false;
+ //singleton_releasing_start_time = 0;
+ //singleton_wait.signal();
+ //singleton_lock.unlock();
+ ALOGI("~QualcommCameraHardware X");
+}
+#if 0
+IMemoryHeap* QualcommCameraHardware::getRawHeap() const
+{
+#if 0
+ ALOGV("getRawHeap");
+ return mDisplayHeap != NULL ? mDisplayHeap->mHeap : NULL;
+#endif
+}
+
+IMemoryHeap* QualcommCameraHardware::getPreviewHeap() const
+{
+#if 0
+ ALOGV("getPreviewHeap");
+ return mPreviewHeap[0] != NULL ? mPreviewHeap[0]->mHeap : NULL;
+ if(mIs3DModeOn != true) {
+ if(( mPreviewFormat == CAMERA_YUV_420_YV12 ) &&
+ ( mCurrentTarget == TARGET_MSM7627A || mCurrentTarget == TARGET_MSM7627 ) &&
+ previewWidth%32 != 0 )
+ return mYV12Heap->mHeap;
+
+ return mPreviewHeap != NULL ? mPreviewHeap->mHeap : NULL;
+ } else
+ return mRecordHeap != NULL ? mRecordHeap->mHeap : NULL;
+
+#endif
+}
+#endif
+#if 0
+status_t QualcommCameraHardware::startInitialPreview() {
+ ALOGV(" %s : E", __FUNCTION__);
+ const char * pmem_region = "/dev/pmem_smipool";
+ int initialPreviewWidth = INITIAL_PREVIEW_WIDTH;
+ int initialPreviewHeight = INITIAL_PREVIEW_HEIGHT;
+ int previewFrameSize = initialPreviewWidth * initialPreviewHeight * 3/2;
+ int CbCrOffset = PAD_TO_WORD(initialPreviewWidth * initialPreviewHeight);
+ mFrameThreadWaitLock.lock();
+ while (mFrameThreadRunning) {
+ ALOGV("%s: waiting for old frame thread to complete.", __FUNCTION__);
+ mFrameThreadWait.wait(mFrameThreadWaitLock);
+ ALOGV("%s: old frame thread completed.",__FUNCTION__);
+ }
+ mFrameThreadWaitLock.unlock();
+
+ mInitialPreviewHeap = new PmemPool(pmem_region,
+ MemoryHeapBase::READ_ONLY | MemoryHeapBase::NO_CACHING,
+ MSM_PMEM_PREVIEW,
+ previewFrameSize,
+ kPreviewBufferCount,
+ previewFrameSize,
+ CbCrOffset,
+ 0,
+ "initial preview");
+
+ mDimension.display_width = initialPreviewWidth;
+ mDimension.display_height = initialPreviewHeight;
+ mDimension.video_width = initialPreviewWidth;
+ mDimension.video_height = initialPreviewHeight;
+ mDimension.display_luma_width = initialPreviewWidth;
+ mDimension.display_luma_height = initialPreviewHeight;
+ mDimension.display_chroma_width = initialPreviewWidth;
+ mDimension.display_chroma_height = initialPreviewHeight;
+ mDimension.orig_video_width = initialPreviewWidth;
+ mDimension.orig_video_height = initialPreviewHeight;
+ ALOGV("mDimension.prev_format = %d", mDimension.prev_format);
+ ALOGV("mDimension.display_luma_width = %d", mDimension.display_luma_width);
+ ALOGV("mDimension.display_luma_height = %d", mDimension.display_luma_height);
+ ALOGV("mDimension.display_chroma_width = %d", mDimension.display_chroma_width);
+ ALOGV("mDimension.display_chroma_height = %d", mDimension.display_chroma_height);
+
+ native_set_parms(CAMERA_PARM_DIMENSION,
+ sizeof(cam_ctrl_dimension_t), &mDimension);
+ ALOGV(" %s : mDimension.video_width = %d mDimension.video_height = %d", __FUNCTION__,
+ mDimension.video_width, mDimension.video_height);
+ mRecordFrameSize = previewFrameSize;
+ ALOGV("mRecordFrameSize = %d", mRecordFrameSize);
+
+ mRecordHeap = new PmemPool(pmem_region,
+ MemoryHeapBase::READ_ONLY | MemoryHeapBase::NO_CACHING,
+ MSM_PMEM_VIDEO,
+ previewFrameSize,
+ kRecordBufferCount,
+ previewFrameSize,
+ CbCrOffset,
+ 0,
+ "initial record");
+
+ if (!mRecordHeap->initialized()) {
+ mRecordHeap.clear();
+ ALOGE("%s X: could not initialize record heap.", __FUNCTION__);
+ return false;
+ }
+ {
+ Mutex::Autolock cameraRunningLock(&mCameraRunningLock);
+ mCameraRunning = native_start_ops(CAMERA_OPS_STREAMING_VIDEO, NULL);
+ }
+
+ ALOGV(" %s : X", __FUNCTION__);
+ return NO_ERROR;
+}
+#endif
+status_t QualcommCameraHardware::startPreviewInternal()
+{
+ ALOGV("in startPreviewInternal : E");
+ if (!mBuffersInitialized) {
+ ALOGE("startPreviewInternal: Buffers not allocated. Cannot start preview");
+ return NO_ERROR;
+ }
+ mPreviewStopping = false;
+#if 0
+ if(mZslEnable && !mZslPanorama){
+ ALOGE("start zsl Preview called");
+ mCamOps.mm_camera_start(CAMERA_OPS_ZSL_STREAMING_CB,NULL, NULL);
+ if (mCurrentTarget == TARGET_MSM8660) {
+ if(mLastPreviewFrameHeap != NULL)
+ mLastPreviewFrameHeap.clear();
+ }
+ }
+#endif
+ if(mCameraRunning) {
+ ALOGV("startPreview X: preview already running.");
+ return NO_ERROR;
+ }
+ if(mZslEnable){
+ //call init
+ ALOGI("ZSL Enable called");
+ uint8_t is_zsl = 1;
+ mm_camera_status_t status;
+ if(MM_CAMERA_SUCCESS != mCfgControl.mm_camera_set_parm(CAMERA_PARM_ZSL_ENABLE,
+ (void *)&is_zsl)){
+ ALOGE("ZSL Enable failed");
+ return UNKNOWN_ERROR;
+ }
+ }
+
+ if (!mPreviewInitialized) {
+ mLastQueuedFrame = NULL;
+ mPreviewInitialized = initPreview();
+ if (!mPreviewInitialized) {
+ ALOGE("startPreview X initPreview failed. Not starting preview.");
+ mPreviewBusyQueue.deinit();
+ return UNKNOWN_ERROR;
+ }
+ }
+
+ /* For 3D mode, start the video output, as this need to be
+ * used for display also.
+ */
+ if(mIs3DModeOn) {
+ startRecordingInternal();
+ if(!mVideoThreadRunning) {
+ ALOGE("startPreview X startRecording failed. Not starting preview.");
+ return UNKNOWN_ERROR;
+ }
+ }
+
+ {
+ Mutex::Autolock cameraRunningLock(&mCameraRunningLock);
+ if(( mCurrentTarget != TARGET_MSM7630 ) &&
+ (mCurrentTarget != TARGET_QSD8250) && (mCurrentTarget != TARGET_MSM8660))
+ mCameraRunning = native_start_ops(CAMERA_OPS_STREAMING_PREVIEW, NULL);
+ else {
+ if(!mZslEnable){
+ ALOGI("Calling CAMERA_OPS_STREAMING_VIDEO");
+ mCameraRunning = native_start_ops(CAMERA_OPS_STREAMING_VIDEO, NULL);
+ ALOGI(": Calling CAMERA_OPS_STREAMING_VIDEO %d", mCameraRunning);
+ }else {
+ initZslParameter();
+ mCameraRunning = false;
+ if (MM_CAMERA_SUCCESS == mCamOps.mm_camera_init(CAMERA_OPS_STREAMING_ZSL,
+ (void *)&mZslParms, NULL)) {
+ //register buffers for ZSL
+ bool status = initZslBuffers(true);
+ if(status != true) {
+ ALOGE("Failed to allocate ZSL buffers");
+ return false;
+ }
+ if(MM_CAMERA_SUCCESS == mCamOps.mm_camera_start(CAMERA_OPS_STREAMING_ZSL,NULL, NULL)){
+ mCameraRunning = true;
+ }
+ }
+ if(mCameraRunning == false)
+ ALOGE("Starting ZSL CAMERA_OPS_STREAMING_ZSL failed!!!");
+ }
+ }
+ }
+
+ if(!mCameraRunning) {
+ deinitPreview();
+ if(mZslEnable){
+ //deinit
+ ALOGI("ZSL DISABLE called");
+ uint8_t is_zsl = 0;
+ mm_camera_status_t status;
+ if( MM_CAMERA_SUCCESS != mCfgControl.mm_camera_set_parm(CAMERA_PARM_ZSL_ENABLE,
+ (void *)&is_zsl)){
+ ALOGE("ZSL_Disable failed!!");
+ return UNKNOWN_ERROR;
+ }
+ }
+ /* Flush the Busy Q */
+ cam_frame_flush_video();
+ /* Need to flush the free Qs as these are initalized in initPreview.*/
+ LINK_camframe_release_all_frames(CAM_VIDEO_FRAME);
+ LINK_camframe_release_all_frames(CAM_PREVIEW_FRAME);
+ mPreviewInitialized = false;
+ mOverlayLock.lock();
+ //mOverlay = NULL;
+ mOverlayLock.unlock();
+ ALOGE("startPreview X: native_start_ops: CAMERA_OPS_STREAMING_PREVIEW ioctl failed!");
+ return UNKNOWN_ERROR;
+ }
+
+ //Reset the Gps Information
+ exif_table_numEntries = 0;
+ previewWidthToNativeZoom = previewWidth;
+ previewHeightToNativeZoom = previewHeight;
+
+ ALOGV("startPreviewInternal X");
+ return NO_ERROR;
+}
+status_t QualcommCameraHardware::startInitialPreview() {
+ mCameraRunning = DUMMY_CAMERA_STARTED;
+ return NO_ERROR;
+}
+status_t QualcommCameraHardware::startPreview()
+{
+ status_t result;
+ ALOGV("startPreview E");
+ Mutex::Autolock l(&mLock);
+ if( mPreviewWindow == NULL) {
+ /* startPreview has been called before setting the preview
+ * window. Start the camera with initial buffers because the
+ * CameraService expects the preview to be enabled while
+ * setting a valid preview window */
+ ALOGV(" %s : Starting preview with initial buffers ", __FUNCTION__);
+ result = startInitialPreview();
+ } else {
+ /* startPreview has been issued after a valid preview window
+ * is set. Get the preview buffers from gralloc and start
+ * preview normally */
+ ALOGV(" %s : Starting normal preview ", __FUNCTION__);
+ result = getBuffersAndStartPreview();
+ }
+ ALOGV("startPreview X");
+ return result;
+}
+
+void QualcommCameraHardware::stopInitialPreview() {
+ mCameraRunning = 0;//!native_stop_ops(CAMERA_OPS_STREAMING_VIDEO, NULL);
+#if 0
+ ALOGV(" %s : E ", __FUNCTION__);
+ if (mCameraRunning) {
+ ALOGV(" %s : Camera was running. Stopping ", __FUNCTION__);
+ {
+ Mutex::Autolock l(&mCamframeTimeoutLock);
+ {
+ Mutex::Autolock cameraRunningLock(&mCameraRunningLock);
+ if(!camframe_timeout_flag) {
+ mCameraRunning = !native_stop_ops(CAMERA_OPS_STREAMING_VIDEO, NULL);
+ }
+ }
+ }
+ mInitialPreviewHeap.clear();
+ mRecordHeap.clear();
+ }
+ ALOGV(" %s : X ", __FUNCTION__);
+#endif
+}
+
+void QualcommCameraHardware::stopPreviewInternal()
+{
+ ALOGV("stopPreviewInternal E: %d", mCameraRunning);
+ mPreviewStopping = true;
+ if (mCameraRunning && mPreviewWindow!=NULL) {
+ /* For 3D mode, we need to exit the video thread.*/
+ if(mIs3DModeOn) {
+ mRecordingState = 0;
+ mVideoThreadWaitLock.lock();
+ ALOGI("%s: 3D mode, exit video thread", __FUNCTION__);
+ mVideoThreadExit = 1;
+ mVideoThreadWaitLock.unlock();
+
+ pthread_mutex_lock(&(g_busy_frame_queue.mut));
+ pthread_cond_signal(&(g_busy_frame_queue.wait));
+ pthread_mutex_unlock(&(g_busy_frame_queue.mut));
+ }
+
+ // Cancel auto focus.
+ {
+ if (mNotifyCallback && (mMsgEnabled & CAMERA_MSG_FOCUS)) {
+ cancelAutoFocusInternal();
+ }
+ }
+
+ // make mSmoothzoomThreadExit true
+ mSmoothzoomThreadLock.lock();
+ mSmoothzoomThreadExit = true;
+ mSmoothzoomThreadLock.unlock();
+ // singal smooth zoom thread , so that it can exit gracefully
+ mSmoothzoomThreadWaitLock.lock();
+ if(mSmoothzoomThreadRunning)
+ mSmoothzoomThreadWait.signal();
+
+ mSmoothzoomThreadWaitLock.unlock();
+
+ Mutex::Autolock l(&mCamframeTimeoutLock);
+ {
+ Mutex::Autolock cameraRunningLock(&mCameraRunningLock);
+ if(!camframe_timeout_flag) {
+ if (( mCurrentTarget != TARGET_MSM7630 ) &&
+ (mCurrentTarget != TARGET_QSD8250) && (mCurrentTarget != TARGET_MSM8660))
+ mCameraRunning = !native_stop_ops(CAMERA_OPS_STREAMING_PREVIEW, NULL);
+ else{
+ if(!mZslEnable){
+ ALOGI("%s ops_streaming mCameraRunning b= %d",__FUNCTION__, mCameraRunning);
+ mCameraRunning = !native_stop_ops(CAMERA_OPS_STREAMING_VIDEO, NULL);
+ ALOGI("%s ops_streaming mCameraRunning = %d",__FUNCTION__, mCameraRunning);
+ }else {
+ mCameraRunning = true;
+ if(MM_CAMERA_SUCCESS == mCamOps.mm_camera_stop(CAMERA_OPS_STREAMING_ZSL,NULL, NULL)){
+ deinitZslBuffers();
+ if (MM_CAMERA_SUCCESS == mCamOps.mm_camera_deinit(CAMERA_OPS_STREAMING_ZSL,
+ (void *)&mZslParms, NULL)) {
+ mCameraRunning = false;
+ }
+ }
+ if(mCameraRunning ==true)
+ ALOGE("Starting ZSL CAMERA_OPS_STREAMING_ZSL failed!!!");
+ }
+ }
+ } else {
+ /* This means that the camframetimeout was issued.
+ * But we did not issue native_stop_preview(), so we
+ * need to update mCameraRunning to indicate that
+ * Camera is no longer running. */
+ ALOGE("%s, : MAKE MCAMER_RUNNING FALSE!!!",__FUNCTION__);
+ mCameraRunning = 0;
+ }
+ }
+ }
+ /* in 3D mode, wait for the video thread before clearing resources.*/
+ if(mIs3DModeOn) {
+ mVideoThreadWaitLock.lock();
+ while (mVideoThreadRunning) {
+ ALOGI("%s: waiting for video thread to complete.", __FUNCTION__);
+ mVideoThreadWait.wait(mVideoThreadWaitLock);
+ ALOGI("%s : video thread completed.", __FUNCTION__);
+ }
+ mVideoThreadWaitLock.unlock();
+ }
+ ALOGI("%s, J_mCameraRunning = %d", __FUNCTION__, mCameraRunning);
+ if (!mCameraRunning) {
+ ALOGI("%s, before calling deinitpre mPreviewInitialized = %d", __FUNCTION__, mPreviewInitialized);
+ if(mPreviewInitialized) {
+ ALOGI("before calling deinitpreview");
+ deinitPreview();
+ if( ( mCurrentTarget == TARGET_MSM7630 ) ||
+ (mCurrentTarget == TARGET_QSD8250) ||
+ (mCurrentTarget == TARGET_MSM8660)) {
+ mVideoThreadWaitLock.lock();
+ ALOGV("in stopPreviewInternal: making mVideoThreadExit 1");
+ mVideoThreadExit = 1;
+ mVideoThreadWaitLock.unlock();
+ //720p : signal the video thread , and check in video thread
+ //if stop is called, if so exit video thread.
+ pthread_mutex_lock(&(g_busy_frame_queue.mut));
+ pthread_cond_signal(&(g_busy_frame_queue.wait));
+ pthread_mutex_unlock(&(g_busy_frame_queue.mut));
+
+ ALOGI(" flush video and release all frames");
+ /* Flush the Busy Q */
+ cam_frame_flush_video();
+ /* Flush the Free Q */
+ LINK_camframe_release_all_frames(CAM_VIDEO_FRAME);
+ }
+ mPreviewInitialized = false;
+ }
+ }
+ else ALOGI("stopPreviewInternal: Preview is stopped already");
+
+ ALOGV("stopPreviewInternal X: %d", mCameraRunning);
+}
+
+void QualcommCameraHardware::stopPreview()
+{
+ ALOGV("stopPreview: E");
+ Mutex::Autolock l(&mLock);
+ {
+ if (mDataCallbackTimestamp && (mMsgEnabled & CAMERA_MSG_VIDEO_FRAME))
+ return;
+ }
+ if( mSnapshotThreadRunning ) {
+ ALOGV("In stopPreview during snapshot");
+ return;
+ }
+ if( mPreviewWindow != NULL ) {
+ private_handle_t *handle;
+ for (int cnt = 0; cnt < (mZslEnable? (MAX_SNAPSHOT_BUFFERS-2) : numCapture); cnt++) {
+ if(mPreviewWindow != NULL && mThumbnailBuffer[cnt] != NULL) {
+ handle = (private_handle_t *)(*mThumbnailBuffer[cnt]);
+ ALOGI("%s: Cancelling postview buffer %d ", __FUNCTION__, handle->fd);
+ ALOGI("stoppreview : display lock");
+ mDisplayLock.lock();
+ if (BUFFER_LOCKED == mThumbnailLockState[cnt]) {
+ if (GENLOCK_FAILURE == genlock_unlock_buffer(handle)) {
+ ALOGE("%s: genlock_unlock_buffer failed", __FUNCTION__);
+ mDisplayLock.unlock();
+ continue;
+ } else {
+ mThumbnailLockState[cnt] = BUFFER_UNLOCKED;
+ }
+ }
+ status_t retVal = mPreviewWindow->cancel_buffer(mPreviewWindow,
+ mThumbnailBuffer[cnt]);
+ ALOGI("stopPreview : after cancelling thumbnail buffer");
+ if(retVal != NO_ERROR)
+ ALOGE("%s: cancelBuffer failed for postview buffer %d",
+ __FUNCTION__, handle->fd);
+ // unregister , unmap and release as well
+ int mBufferSize = previewWidth * previewHeight * 3/2;
+ int mCbCrOffset = PAD_TO_WORD(previewWidth * previewHeight);
+ if(mThumbnailMapped[cnt] && (mSnapshotFormat == PICTURE_FORMAT_JPEG)
+ || mZslEnable) {
+ ALOGI("%s: Unregistering Thumbnail Buffer %d ", __FUNCTION__, handle->fd);
+ register_buf(mBufferSize,
+ mBufferSize, mCbCrOffset, 0,
+ handle->fd,
+ 0,
+ (uint8_t *)mThumbnailMapped[cnt],
+ MSM_PMEM_THUMBNAIL,
+ false, false);
+ if (munmap((void *)(mThumbnailMapped[cnt]),handle->size ) == -1) {
+ ALOGE("StopPreview : Error un-mmapping the thumbnail buffer %d", index);
+ }
+ mThumbnailMapped[cnt] = NULL;
+ }
+ mThumbnailBuffer[cnt] = NULL;
+ ALOGI("stoppreview : display unlock");
+ mDisplayLock.unlock();
+ }
+ }
+ }
+ stopPreviewInternal();
+ ALOGV("stopPreview: X");
+}
+
+void QualcommCameraHardware::runAutoFocus()
+{
+ bool status = true;
+ void *libhandle = NULL;
+ isp3a_af_mode_t afMode = AF_MODE_AUTO;
+
+ mAutoFocusThreadLock.lock();
+ // Skip autofocus if focus mode is infinity.
+
+ const char * focusMode = mParameters.get(QCameraParameters::KEY_FOCUS_MODE);
+ if ((mParameters.get(QCameraParameters::KEY_FOCUS_MODE) == 0)
+ || (strcmp(focusMode, QCameraParameters::FOCUS_MODE_INFINITY) == 0)
+ || (strcmp(focusMode, QCameraParameters::FOCUS_MODE_CONTINUOUS_VIDEO) == 0)) {
+ goto done;
+ }
+
+ if(!libmmcamera){
+ ALOGE("FATAL ERROR: could not dlopen liboemcamera.so: %s", dlerror());
+ mAutoFocusThreadRunning = false;
+ mAutoFocusThreadLock.unlock();
+ return;
+ }
+
+ afMode = (isp3a_af_mode_t)attr_lookup(focus_modes,
+ sizeof(focus_modes) / sizeof(str_map),
+ mParameters.get(QCameraParameters::KEY_FOCUS_MODE));
+
+ /* This will block until either AF completes or is cancelled. */
+ ALOGV("af start (mode %d)", afMode);
+ status_t err;
+ err = mAfLock.tryLock();
+ if(err == NO_ERROR) {
+ {
+ Mutex::Autolock cameraRunningLock(&mCameraRunningLock);
+ if(mCameraRunning){
+ ALOGV("Start AF");
+ status = native_start_ops(CAMERA_OPS_FOCUS ,(void *)&afMode);
+ }else{
+ ALOGV("As Camera preview is not running, AF not issued");
+ status = false;
+ }
+ }
+ mAfLock.unlock();
+ }
+ else{
+ //AF Cancel would have acquired the lock,
+ //so, no need to perform any AF
+ ALOGV("As Cancel auto focus is in progress, auto focus request "
+ "is ignored");
+ status = FALSE;
+ }
+ {
+ Mutex::Autolock pl(&mParametersLock);
+ if(mHasAutoFocusSupport && (updateFocusDistances(focusMode) != NO_ERROR)) {
+ ALOGE("%s: updateFocusDistances failed for %s", __FUNCTION__, focusMode);
+ }
+ }
+
+ ALOGV("af done: %d", (int)status);
+
+done:
+ mAutoFocusThreadRunning = false;
+ mAutoFocusThreadLock.unlock();
+
+ mCallbackLock.lock();
+ bool autoFocusEnabled = mNotifyCallback && (mMsgEnabled & CAMERA_MSG_FOCUS);
+ camera_notify_callback cb = mNotifyCallback;
+ void *data = mCallbackCookie;
+ mCallbackLock.unlock();
+ if (autoFocusEnabled)
+ cb(CAMERA_MSG_FOCUS, status, 0, data);
+
+}
+
+status_t QualcommCameraHardware::cancelAutoFocusInternal()
+{
+ ALOGV("cancelAutoFocusInternal E");
+ bool afRunning = true;
+
+ if(!mHasAutoFocusSupport){
+ ALOGV("cancelAutoFocusInternal X");
+ return NO_ERROR;
+ }
+
+ status_t rc = NO_ERROR;
+ status_t err;
+
+ do {
+ err = mAfLock.tryLock();
+ if(err == NO_ERROR) {
+ //Got Lock, means either AF hasn't started or
+ // AF is done. So no need to cancel it, just change the state
+ ALOGV("Auto Focus is not in progress, Cancel Auto Focus is ignored");
+ mAfLock.unlock();
+
+ mAutoFocusThreadLock.lock();
+ afRunning = mAutoFocusThreadRunning;
+ mAutoFocusThreadLock.unlock();
+ if(afRunning) {
+ usleep( 5000 );
+ }
+ }
+ } while ( err == NO_ERROR && afRunning );
+ if(afRunning) {
+ //AF is in Progess, So cancel it
+ ALOGV("Lock busy...cancel AF");
+ rc = native_stop_ops(CAMERA_OPS_FOCUS, NULL) ?
+ NO_ERROR : UNKNOWN_ERROR;
+
+ /*now just wait for auto focus thread to be finished*/
+ mAutoFocusThreadLock.lock();
+ mAutoFocusThreadLock.unlock();
+ }
+ ALOGV("cancelAutoFocusInternal X: %d", rc);
+ return rc;
+}
+
+void *auto_focus_thread(void *user)
+{
+ ALOGV("auto_focus_thread E");
+ CAMERA_HAL_UNUSED(user);
+ QualcommCameraHardware *obj = QualcommCameraHardware::getInstance();
+ if (obj != 0) {
+ obj->runAutoFocus();
+ }
+ else ALOGW("not starting autofocus: the object went away!");
+ ALOGV("auto_focus_thread X");
+ return NULL;
+}
+
+status_t QualcommCameraHardware::autoFocus()
+{
+ ALOGV("autoFocus E");
+ Mutex::Autolock l(&mLock);
+
+ if(!mHasAutoFocusSupport){
+ /*
+ * If autofocus is not supported HAL defaults
+ * focus mode to infinity and supported mode to
+ * infinity also. In this mode and fixed mode app
+ * should not call auto focus.
+ */
+ ALOGI("Auto Focus not supported");
+ ALOGV("autoFocus X");
+ return INVALID_OPERATION;
+ }
+ {
+ mAutoFocusThreadLock.lock();
+ if (!mAutoFocusThreadRunning) {
+
+ // Create a detached thread here so that we don't have to wait
+ // for it when we cancel AF.
+ pthread_t thr;
+ pthread_attr_t attr;
+ pthread_attr_init(&attr);
+ pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_DETACHED);
+ mAutoFocusThreadRunning =
+ !pthread_create(&thr, &attr,
+ auto_focus_thread, NULL);
+ if (!mAutoFocusThreadRunning) {
+ ALOGE("failed to start autofocus thread");
+ mAutoFocusThreadLock.unlock();
+ return UNKNOWN_ERROR;
+ }
+ }
+ mAutoFocusThreadLock.unlock();
+ }
+
+ ALOGV("autoFocus X");
+ return NO_ERROR;
+}
+
+status_t QualcommCameraHardware::cancelAutoFocus()
+{
+ ALOGV("cancelAutoFocus E");
+ Mutex::Autolock l(&mLock);
+
+ int rc = NO_ERROR;
+ if (mCameraRunning && mNotifyCallback && (mMsgEnabled & CAMERA_MSG_FOCUS)) {
+ rc = cancelAutoFocusInternal();
+ }
+
+ ALOGV("cancelAutoFocus X");
+ return rc;
+}
+
+void QualcommCameraHardware::runSnapshotThread(void *data)
+{
+ bool ret = true;
+ CAMERA_HAL_UNUSED(data);
+ ALOGI("runSnapshotThread E");
+
+ if(!libmmcamera){
+ ALOGE("FATAL ERROR: could not dlopen liboemcamera.so: %s", dlerror());
+ }
+ mSnapshotCancelLock.lock();
+ if(mSnapshotCancel == true) {
+ mSnapshotCancel = false;
+ mSnapshotCancelLock.unlock();
+ ALOGI("%s: cancelpicture has been called..so abort taking snapshot", __FUNCTION__);
+ deinitRaw();
+ mInSnapshotModeWaitLock.lock();
+ mInSnapshotMode = false;
+ mInSnapshotModeWait.signal();
+ mInSnapshotModeWaitLock.unlock();
+ mSnapshotThreadWaitLock.lock();
+ mSnapshotThreadRunning = false;
+ mSnapshotThreadWait.signal();
+ mSnapshotThreadWaitLock.unlock();
+ return;
+ }
+ mSnapshotCancelLock.unlock();
+
+ mJpegThreadWaitLock.lock();
+ mJpegThreadRunning = true;
+ mJpegThreadWait.signal();
+ mJpegThreadWaitLock.unlock();
+ mm_camera_ops_type_t current_ops_type = (mSnapshotFormat == PICTURE_FORMAT_JPEG) ?
+ CAMERA_OPS_CAPTURE_AND_ENCODE :
+ CAMERA_OPS_RAW_CAPTURE;
+ if(strTexturesOn == true) {
+ current_ops_type = CAMERA_OPS_CAPTURE;
+ mCamOps.mm_camera_start(current_ops_type,(void *)&mImageCaptureParms,
+ NULL);
+ } else if(mSnapshotFormat == PICTURE_FORMAT_JPEG){
+ if(!mZslEnable || mZslFlashEnable){
+ mCamOps.mm_camera_start(current_ops_type,(void *)&mImageCaptureParms,
+ (void *)&mImageEncodeParms);
+ }else{
+ notifyShutter(TRUE);
+ initZslParameter();
+ ALOGI("snapshot mZslCapture.thumbnail %d %d %d",mZslCaptureParms.thumbnail_width,
+ mZslCaptureParms.thumbnail_height,mZslCaptureParms.num_captures);
+ mCamOps.mm_camera_start(current_ops_type,(void *)&mZslCaptureParms,
+ (void *)&mImageEncodeParms);
+ }
+ mJpegThreadWaitLock.lock();
+ while (mJpegThreadRunning) {
+ ALOGV("%s: waiting for jpeg callback.", __FUNCTION__);
+ mJpegThreadWait.wait(mJpegThreadWaitLock);
+ ALOGV("%s: jpeg callback received.", __FUNCTION__);
+ }
+ mJpegThreadWaitLock.unlock();
+
+ //cleanup
+ if(!mZslEnable || mZslFlashEnable)
+ deinitRaw();
+ }else if(mSnapshotFormat == PICTURE_FORMAT_RAW){
+ notifyShutter(TRUE);
+ mCamOps.mm_camera_start(current_ops_type,(void *)&mRawCaptureParms,
+ NULL);
+ // Waiting for callback to come
+ ALOGV("runSnapshotThread : waiting for callback to come");
+ mJpegThreadWaitLock.lock();
+ while (mJpegThreadRunning) {
+ ALOGV("%s: waiting for jpeg callback.", __FUNCTION__);
+ mJpegThreadWait.wait(mJpegThreadWaitLock);
+ ALOGV("%s: jpeg callback received.", __FUNCTION__);
+ }
+ mJpegThreadWaitLock.unlock();
+ ALOGV("runSnapshotThread : calling deinitRawSnapshot");
+ deinitRawSnapshot();
+
+ }
+
+ if(!mZslEnable || mZslFlashEnable)
+ mCamOps.mm_camera_deinit(current_ops_type, NULL, NULL);
+ mZslFlashEnable = false;
+ mSnapshotThreadWaitLock.lock();
+ mSnapshotThreadRunning = false;
+ mSnapshotThreadWait.signal();
+ mSnapshotThreadWaitLock.unlock();
+ ALOGV("runSnapshotThread X");
+}
+
+void *snapshot_thread(void *user)
+{
+ ALOGD("snapshot_thread E");
+ CAMERA_HAL_UNUSED(user);
+ QualcommCameraHardware *obj = QualcommCameraHardware::getInstance();
+ if (obj != 0) {
+ obj->runSnapshotThread(user);
+ }
+ else ALOGW("not starting snapshot thread: the object went away!");
+ ALOGD("snapshot_thread X");
+ return NULL;
+}
+
+status_t QualcommCameraHardware::takePicture()
+{
+ ALOGE("takePicture(%d)", mMsgEnabled);
+ Mutex::Autolock l(&mLock);
+ if(mRecordingState ) {
+ return takeLiveSnapshotInternal( );
+ }
+
+ if(strTexturesOn == true){
+ mEncodePendingWaitLock.lock();
+ while(mEncodePending) {
+ ALOGI("takePicture: Frame given to application, waiting for encode call");
+ mEncodePendingWait.wait(mEncodePendingWaitLock);
+ ALOGI("takePicture: Encode of the application data is done");
+ }
+ mEncodePendingWaitLock.unlock();
+ }
+
+ // Wait for old snapshot thread to complete.
+ mSnapshotThreadWaitLock.lock();
+ while (mSnapshotThreadRunning) {
+ ALOGV("takePicture: waiting for old snapshot thread to complete.");
+ mSnapshotThreadWait.wait(mSnapshotThreadWaitLock);
+ ALOGV("takePicture: old snapshot thread completed.");
+ }
+ // if flash is enabled then run snapshot as normal mode and not zsl mode.
+ // App should expect only 1 callback as multi snapshot in normal mode is not supported
+ mZslFlashEnable = false;
+ if(mZslEnable){
+ int is_flash_needed = 0;
+ mm_camera_status_t status;
+ status = mCfgControl.mm_camera_get_parm(CAMERA_PARM_QUERY_FALSH4SNAP,
+ (void *)&is_flash_needed);
+ if(is_flash_needed) {
+ mZslFlashEnable = true;
+ }
+ }
+ //Adding ExifTag for Flash
+ const char *flash_str = mParameters.get(QCameraParameters::KEY_FLASH_MODE);
+ if(flash_str){
+ int is_flash_fired = 0;
+ if(mCfgControl.mm_camera_get_parm(CAMERA_PARM_QUERY_FALSH4SNAP,
+ (void *)&is_flash_fired) != MM_CAMERA_SUCCESS){
+ flashMode = FLASH_SNAP ; //for No Flash support,bit 5 will be 1
+ } else {
+ if(!strcmp(flash_str,"on"))
+ flashMode = 1;
+
+ if(!strcmp(flash_str,"off"))
+ flashMode = 0;
+
+ if(!strcmp(flash_str,"auto")){
+ //for AUTO bits 3 and 4 will be 1
+ //for flash fired bit 0 will be 1, else 0
+ flashMode = FLASH_AUTO;
+ if(is_flash_fired)
+ flashMode = (is_flash_fired>>1) | flashMode ;
+ }
+ }
+ addExifTag(EXIFTAGID_FLASH,EXIF_SHORT,1,1,(void *)&flashMode);
+ }
+
+ if(mParameters.getPictureFormat() != 0 &&
+ !strcmp(mParameters.getPictureFormat(),
+ QCameraParameters::PIXEL_FORMAT_RAW)){
+ mSnapshotFormat = PICTURE_FORMAT_RAW;
+ {
+ // HACK: Raw ZSL capture is not supported yet
+ mZslFlashEnable = true;
+ }
+ }
+ else
+ mSnapshotFormat = PICTURE_FORMAT_JPEG;
+
+ if(!mZslEnable || mZslFlashEnable){
+ if((mSnapshotFormat == PICTURE_FORMAT_JPEG)){
+ if(!native_start_ops(CAMERA_OPS_PREPARE_SNAPSHOT, NULL)) {
+ mSnapshotThreadWaitLock.unlock();
+ ALOGE("PREPARE SNAPSHOT: CAMERA_OPS_PREPARE_SNAPSHOT ioctl Failed");
+ return UNKNOWN_ERROR;
+ }
+ }
+ }
+ else {
+ int rotation = mParameters.getInt("rotation");
+ native_set_parms(CAMERA_PARM_JPEG_ROTATION, sizeof(int), &rotation);
+ }
+#if 0 // TODO for ICS
+ if(mCurrentTarget == TARGET_MSM8660) {
+ /* Store the last frame queued for preview. This
+ * shall be used as postview */
+ if (!(storePreviewFrameForPostview()))
+ return UNKNOWN_ERROR;
+ }
+#endif
+ if(!mZslEnable || mZslFlashEnable)
+ stopPreviewInternal();
+#if 0
+ else if(mZslEnable && !mZslPanorama) {
+ /* Dont stop preview if ZSL Panorama is enabled for
+ * Continuous viewfinder support*/
+ ALOGE("Calling stop preview");
+ mCamOps.mm_camera_stop(CAMERA_OPS_ZSL_STREAMING_CB,NULL, NULL);
+ }
+#endif
+
+
+ mFrameThreadWaitLock.unlock();
+
+ mm_camera_ops_type_t current_ops_type = (mSnapshotFormat == PICTURE_FORMAT_JPEG) ?
+ CAMERA_OPS_CAPTURE_AND_ENCODE :
+ CAMERA_OPS_RAW_CAPTURE;
+ if(strTexturesOn == true)
+ current_ops_type = CAMERA_OPS_CAPTURE;
+
+ if( !mZslEnable || mZslFlashEnable)
+ mCamOps.mm_camera_init(current_ops_type, NULL, NULL);
+
+ if(mSnapshotFormat == PICTURE_FORMAT_JPEG){
+ if(!mZslEnable || mZslFlashEnable) {
+ if (!initRaw(mDataCallback && (mMsgEnabled & CAMERA_MSG_COMPRESSED_IMAGE))) {
+ ALOGE("initRaw failed. Not taking picture.");
+ mSnapshotThreadWaitLock.unlock();
+ return UNKNOWN_ERROR;
+ }
+ }
+ } else if(mSnapshotFormat == PICTURE_FORMAT_RAW ){
+ if(!initRawSnapshot()){
+ ALOGE("initRawSnapshot failed. Not taking picture.");
+ mSnapshotThreadWaitLock.unlock();
+ return UNKNOWN_ERROR;
+ }
+ }
+
+ mShutterLock.lock();
+ mShutterPending = true;
+ mShutterLock.unlock();
+
+ mSnapshotCancelLock.lock();
+ mSnapshotCancel = false;
+ mSnapshotCancelLock.unlock();
+
+ numJpegReceived = 0;
+ pthread_attr_t attr;
+ pthread_attr_init(&attr);
+ pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_DETACHED);
+ mSnapshotThreadRunning = !pthread_create(&mSnapshotThread,
+ &attr,
+ snapshot_thread,
+ NULL);
+ mSnapshotThreadWaitLock.unlock();
+
+ mInSnapshotModeWaitLock.lock();
+ mInSnapshotMode = true;
+ mInSnapshotModeWaitLock.unlock();
+
+ ALOGV("takePicture: X");
+ return mSnapshotThreadRunning ? NO_ERROR : UNKNOWN_ERROR;
+}
+
+void QualcommCameraHardware::set_liveshot_exifinfo()
+{
+
+ setGpsParameters();
+ //set TimeStamp
+ const char *str = mParameters.get(QCameraParameters::KEY_EXIF_DATETIME);
+ if(str != NULL) {
+ strncpy(dateTime, str, 19);
+ dateTime[19] = '\0';
+ addExifTag(EXIFTAGID_EXIF_DATE_TIME_ORIGINAL, EXIF_ASCII,
+ 20, 1, (void *)dateTime);
+ }
+}
+
+
+status_t QualcommCameraHardware::takeLiveSnapshotInternal()
+{
+ ALOGV("takeLiveSnapshotInternal : E");
+ if(liveshot_state == LIVESHOT_IN_PROGRESS || !mRecordingState) {
+ return NO_ERROR;
+ }
+
+ if( (mCurrentTarget != TARGET_MSM7630) && (mCurrentTarget != TARGET_MSM8660) && (mCurrentTarget != TARGET_MSM7627A)) {
+ ALOGI("LiveSnapshot not supported on this target");
+ liveshot_state = LIVESHOT_STOPPED;
+ return NO_ERROR;
+ }
+
+ liveshot_state = LIVESHOT_IN_PROGRESS;
+
+ if (!initLiveSnapshot(videoWidth, videoHeight)) {
+ ALOGE("takeLiveSnapshot: Jpeg Heap Memory allocation failed. Not taking Live Snapshot.");
+ liveshot_state = LIVESHOT_STOPPED;
+ return UNKNOWN_ERROR;
+ }
+ uint32_t maxjpegsize = videoWidth * videoHeight *1.5;
+ set_liveshot_exifinfo();
+ if(!LINK_set_liveshot_params(videoWidth, videoHeight,
+ exif_data, exif_table_numEntries,
+ (uint8_t *)mJpegLiveSnapMapped->data, maxjpegsize)) {
+ ALOGE("Link_set_liveshot_params failed.");
+ if (NULL != mJpegLiveSnapMapped) {
+ ALOGV("initLiveSnapshot: clearing old mJpegHeap.");
+ mJpegLiveSnapMapped->release(mJpegLiveSnapMapped);
+ mJpegLiveSnapMapped = NULL;
+ }
+ return NO_ERROR;
+ }
+ if((mCurrentTarget == TARGET_MSM7630) || (mCurrentTarget == TARGET_MSM8660)) {
+ if(!native_start_ops(CAMERA_OPS_LIVESHOT, NULL)) {
+ ALOGE("start_liveshot ioctl failed");
+ liveshot_state = LIVESHOT_STOPPED;
+ if (NULL != mJpegLiveSnapMapped) {
+ ALOGV("initLiveSnapshot: clearing old mJpegHeap.");
+ mJpegLiveSnapMapped->release(mJpegLiveSnapMapped);
+ mJpegLiveSnapMapped = NULL;
+ }
+ return UNKNOWN_ERROR;
+ }
+ }
+
+ ALOGV("takeLiveSnapshotInternal: X");
+ return NO_ERROR;
+}
+
+status_t QualcommCameraHardware::takeLiveSnapshot()
+{
+ ALOGV("takeLiveSnapshot: E ");
+ Mutex::Autolock l(&mLock);
+ ALOGV("takeLiveSnapshot: X ");
+ return takeLiveSnapshotInternal( );
+}
+
+bool QualcommCameraHardware::initLiveSnapshot(int videowidth, int videoheight)
+{
+ ALOGV("initLiveSnapshot E");
+
+ if (NULL != mJpegLiveSnapMapped) {
+ ALOGV("initLiveSnapshot: clearing old mJpegHeap.");
+ mJpegLiveSnapMapped->release(mJpegLiveSnapMapped);
+ mJpegLiveSnapMapped = NULL;
+ }
+
+ mJpegMaxSize = videowidth * videoheight * 1.5;
+ ALOGV("initLiveSnapshot: initializing mJpegHeap.");
+ mJpegLiveSnapMapped = mGetMemory(-1, mJpegMaxSize,1,mCallbackCookie);
+ if(mJpegLiveSnapMapped == NULL) {
+ ALOGE("Failed to get camera memory for mJpegLibeSnapMapped" );
+ return false;
+ }
+ ALOGV("initLiveSnapshot X");
+ return true;
+}
+
+
+status_t QualcommCameraHardware::cancelPicture()
+{
+ status_t rc;
+ ALOGV("cancelPicture: E");
+
+ mSnapshotCancelLock.lock();
+ ALOGI("%s: setting mSnapshotCancel to true", __FUNCTION__);
+ mSnapshotCancel = true;
+ mSnapshotCancelLock.unlock();
+
+ if (mCurrentTarget == TARGET_MSM7627 ||
+ (mCurrentTarget == TARGET_MSM7625A ||
+ mCurrentTarget == TARGET_MSM7627A)) {
+ mSnapshotDone = TRUE;
+ mSnapshotThreadWaitLock.lock();
+ while (mSnapshotThreadRunning) {
+ ALOGV("cancelPicture: waiting for snapshot thread to complete.");
+ mSnapshotThreadWait.wait(mSnapshotThreadWaitLock);
+ ALOGV("cancelPicture: snapshot thread completed.");
+ }
+ mSnapshotThreadWaitLock.unlock();
+ }
+ rc = native_stop_ops(CAMERA_OPS_CAPTURE, NULL) ? NO_ERROR : UNKNOWN_ERROR;
+ mSnapshotDone = FALSE;
+ ALOGV("cancelPicture: X: %d", rc);
+ return rc;
+}
+
+status_t QualcommCameraHardware::setParameters(const QCameraParameters& params)
+{
+ ALOGV("setParameters: E params = %p", ¶ms);
+
+ Mutex::Autolock l(&mLock);
+ Mutex::Autolock pl(&mParametersLock);
+ status_t rc, final_rc = NO_ERROR;
+ if (mSnapshotThreadRunning) {
+ if ((rc = setCameraMode(params))) final_rc = rc;
+ if ((rc = setPreviewSize(params))) final_rc = rc;
+ if ((rc = setRecordSize(params))) final_rc = rc;
+ if ((rc = setPictureSize(params))) final_rc = rc;
+ if ((rc = setJpegThumbnailSize(params))) final_rc = rc;
+ if ((rc = setJpegQuality(params))) final_rc = rc;
+ return final_rc;
+ }
+ if ((rc = setCameraMode(params))) final_rc = rc;
+ if ((rc = setPreviewSize(params))) final_rc = rc;
+ if ((rc = setRecordSize(params))) final_rc = rc;
+ if ((rc = setPictureSize(params))) final_rc = rc;
+ if ((rc = setJpegThumbnailSize(params))) final_rc = rc;
+ if ((rc = setJpegQuality(params))) final_rc = rc;
+ if ((rc = setPictureFormat(params))) final_rc = rc;
+ if ((rc = setRecordSize(params))) final_rc = rc;
+ if ((rc = setPreviewFormat(params))) final_rc = rc;
+ if ((rc = setEffect(params))) final_rc = rc;
+ if ((rc = setGpsLocation(params))) final_rc = rc;
+ if ((rc = setRotation(params))) final_rc = rc;
+ if ((rc = setZoom(params))) final_rc = rc;
+ if ((rc = setOrientation(params))) final_rc = rc;
+ if ((rc = setLensshadeValue(params))) final_rc = rc;
+ if ((rc = setMCEValue(params))) final_rc = rc;
+ //if ((rc = setHDRImaging(params))) final_rc = rc;
+ if ((rc = setExpBracketing(params))) final_rc = rc;
+ if ((rc = setPictureFormat(params))) final_rc = rc;
+ if ((rc = setSharpness(params))) final_rc = rc;
+ if ((rc = setSaturation(params))) final_rc = rc;
+ if ((rc = setTouchAfAec(params))) final_rc = rc;
+ if ((rc = setSceneMode(params))) final_rc = rc;
+ if ((rc = setContrast(params))) final_rc = rc;
+ if ((rc = setRecordSize(params))) final_rc = rc;
+ if ((rc = setSceneDetect(params))) final_rc = rc;
+ if ((rc = setStrTextures(params))) final_rc = rc;
+ if ((rc = setPreviewFormat(params))) final_rc = rc;
+ if ((rc = setSkinToneEnhancement(params))) final_rc = rc;
+ if ((rc = setAntibanding(params))) final_rc = rc;
+ if ((rc = setRedeyeReduction(params))) final_rc = rc;
+ if ((rc = setDenoise(params))) final_rc = rc;
+ if ((rc = setPreviewFpsRange(params))) final_rc = rc;
+ if ((rc = setZslParam(params))) final_rc = rc;
+ if ((rc = setSnapshotCount(params))) final_rc = rc;
+ if((rc = setRecordingHint(params))) final_rc = rc;
+ const char *str = params.get(QCameraParameters::KEY_SCENE_MODE);
+ int32_t value = attr_lookup(scenemode, sizeof(scenemode) / sizeof(str_map), str);
+
+ if((value != NOT_FOUND) && (value == CAMERA_BESTSHOT_OFF)) {
+ if ((rc = setPreviewFrameRate(params))) final_rc = rc;
+ // if ((rc = setPreviewFrameRateMode(params))) final_rc = rc;
+ if ((rc = setAutoExposure(params))) final_rc = rc;
+ if ((rc = setExposureCompensation(params))) final_rc = rc;
+ if ((rc = setWhiteBalance(params))) final_rc = rc;
+ if ((rc = setFlash(params))) final_rc = rc;
+ if ((rc = setFocusMode(params))) final_rc = rc;
+ if ((rc = setBrightness(params))) final_rc = rc;
+ if ((rc = setISOValue(params))) final_rc = rc;
+ if ((rc = setFocusAreas(params))) final_rc = rc;
+ if ((rc = setMeteringAreas(params))) final_rc = rc;
+ }
+ //selectableZoneAF needs to be invoked after continuous AF
+ if ((rc = setSelectableZoneAf(params))) final_rc = rc;
+ // setHighFrameRate needs to be done at end, as there can
+ // be a preview restart, and need to use the updated parameters
+ if ((rc = setHighFrameRate(params))) final_rc = rc;
+
+ ALOGV("setParameters: X");
+ return final_rc;
+}
+
+QCameraParameters QualcommCameraHardware::getParameters() const
+{
+ ALOGV("getParameters: EX");
+ return mParameters;
+}
+status_t QualcommCameraHardware::setHistogramOn()
+{
+ ALOGV("setHistogramOn: EX");
+ mStatsWaitLock.lock();
+ mSendData = true;
+ if(mStatsOn == CAMERA_HISTOGRAM_ENABLE) {
+ mStatsWaitLock.unlock();
+ return NO_ERROR;
+ }
+#if 0
+ if (mStatHeap != NULL) {
+ ALOGV("setHistogram on: clearing old mStatHeap.");
+ mStatHeap.clear();
+ }
+#endif
+
+ mStatSize = sizeof(uint32_t)* HISTOGRAM_STATS_SIZE;
+ mCurrent = -1;
+ /*Currently the Ashmem is multiplying the buffer size with total number
+ of buffers and page aligning. This causes a crash in JNI as each buffer
+ individually expected to be page aligned */
+ int page_size_minus_1 = getpagesize() - 1;
+ int32_t mAlignedStatSize = ((mStatSize + page_size_minus_1) & (~page_size_minus_1));
+#if 0
+ mStatHeap =
+ new AshmemPool(mAlignedStatSize,
+ 3,
+ mStatSize,
+ "stat");
+ if (!mStatHeap->initialized()) {
+ ALOGE("Stat Heap X failed ");
+ mStatHeap.clear();
+ ALOGE("setHistogramOn X: error initializing mStatHeap");
+ mStatsWaitLock.unlock();
+ return UNKNOWN_ERROR;
+ }
+#endif
+ for(int cnt = 0; cnt<3; cnt++) {
+ mStatsMapped[cnt]=mGetMemory(-1, mStatSize,1,mCallbackCookie);
+ if(mStatsMapped[cnt] == NULL) {
+ ALOGE("Failed to get camera memory for stats heap index: %d", cnt);
+ mStatsWaitLock.unlock();
+ return false;
+ }else{
+ ALOGV("Received following info for stats mapped data:%p,handle:%p, size:%d,release:%p",
+ mStatsMapped[cnt]->data ,mStatsMapped[cnt]->handle, mStatsMapped[cnt]->size, mStatsMapped[cnt]->release);
+ }
+ }
+ mStatsOn = CAMERA_HISTOGRAM_ENABLE;
+ mStatsWaitLock.unlock();
+ mCfgControl.mm_camera_set_parm(CAMERA_PARM_HISTOGRAM, &mStatsOn);
+ return NO_ERROR;
+}
+
+status_t QualcommCameraHardware::setHistogramOff()
+{
+ ALOGV("setHistogramOff: EX");
+ mStatsWaitLock.lock();
+ if(mStatsOn == CAMERA_HISTOGRAM_DISABLE) {
+ mStatsWaitLock.unlock();
+ return NO_ERROR;
+ }
+ mStatsOn = CAMERA_HISTOGRAM_DISABLE;
+ mStatsWaitLock.unlock();
+
+ mCfgControl.mm_camera_set_parm(CAMERA_PARM_HISTOGRAM, &mStatsOn);
+
+ mStatsWaitLock.lock();
+// mStatHeap.clear();
+ for(int i=0; i<3; i++){
+ if(mStatsMapped[i] != NULL){
+ mStatsMapped[i]->release(mStatsMapped[i]);
+ mStatsMapped[i] = NULL;
+ }
+ }
+
+ mStatsWaitLock.unlock();
+ return NO_ERROR;
+}
+
+
+status_t QualcommCameraHardware::runFaceDetection()
+{
+ bool ret = true;
+#if 0
+ const char *str = mParameters.get(QCameraParameters::KEY_FACE_DETECTION);
+ if (str != NULL) {
+ int value = attr_lookup(facedetection,
+ sizeof(facedetection) / sizeof(str_map), str);
+
+ mMetaDataWaitLock.lock();
+ if (value == true) {
+ if(mMetaDataHeap != NULL)
+ mMetaDataHeap.clear();
+
+ mMetaDataHeap =
+ new AshmemPool((sizeof(int)*(MAX_ROI*4+1)),
+ 1,
+ (sizeof(int)*(MAX_ROI*4+1)),
+ "metadata");
+ if (!mMetaDataHeap->initialized()) {
+ ALOGE("Meta Data Heap allocation failed ");
+ mMetaDataHeap.clear();
+ ALOGE("runFaceDetection X: error initializing mMetaDataHeap");
+ mMetaDataWaitLock.unlock();
+ return UNKNOWN_ERROR;
+ }
+ mSendMetaData = true;
+ } else {
+ if(mMetaDataHeap != NULL)
+ mMetaDataHeap.clear();
+ }
+ mMetaDataWaitLock.unlock();
+ ret = native_set_parms(CAMERA_PARM_FD, sizeof(int8_t), (void *)&value);
+ return ret ? NO_ERROR : UNKNOWN_ERROR;
+ }
+ ALOGE("Invalid Face Detection value: %s", (str == NULL) ? "NULL" : str);
+ #endif
+ return BAD_VALUE;
+}
+
+void* smoothzoom_thread(void* user)
+{
+ // call runsmoothzoomthread
+ ALOGV("smoothzoom_thread E");
+ CAMERA_HAL_UNUSED(user);
+
+ QualcommCameraHardware* obj = QualcommCameraHardware::getInstance();
+ if (obj != 0) {
+ obj->runSmoothzoomThread(user);
+ }
+ else ALOGE("not starting smooth zoom thread: the object went away!");
+ ALOGV("Smoothzoom_thread X");
+ return NULL;
+}
+
+status_t QualcommCameraHardware::sendCommand(int32_t command, int32_t arg1,
+ int32_t arg2)
+{
+ ALOGV("sendCommand: EX");
+ CAMERA_HAL_UNUSED(arg1);
+ CAMERA_HAL_UNUSED(arg2);
+ Mutex::Autolock l(&mLock);
+
+ switch(command) {
+ case CAMERA_CMD_HISTOGRAM_ON:
+ ALOGV("histogram set to on");
+ return setHistogramOn();
+ case CAMERA_CMD_HISTOGRAM_OFF:
+ ALOGV("histogram set to off");
+ return setHistogramOff();
+ case CAMERA_CMD_HISTOGRAM_SEND_DATA:
+ mStatsWaitLock.lock();
+ if(mStatsOn == CAMERA_HISTOGRAM_ENABLE)
+ mSendData = true;
+ mStatsWaitLock.unlock();
+ return NO_ERROR;
+#if 0
+ case CAMERA_CMD_FACE_DETECTION_ON:
+ if(supportsFaceDetection() == false){
+ ALOGI("face detection support is not available");
+ return NO_ERROR;
+ }
+
+ setFaceDetection("on");
+ return runFaceDetection();
+ case CAMERA_CMD_FACE_DETECTION_OFF:
+ if(supportsFaceDetection() == false){
+ ALOGI("face detection support is not available");
+ return NO_ERROR;
+ }
+ setFaceDetection("off");
+ return runFaceDetection();
+ case CAMERA_CMD_SEND_META_DATA:
+ mMetaDataWaitLock.lock();
+ if(mFaceDetectOn == true) {
+ mSendMetaData = true;
+ }
+ mMetaDataWaitLock.unlock();
+ return NO_ERROR;
+ case CAMERA_CMD_START_SMOOTH_ZOOM :
+ ALOGV("HAL sendcmd start smooth zoom %d %d", arg1 , arg2);
+ mTargetSmoothZoom = arg1;
+ if(!mPreviewStopping) {
+ // create smooth zoom thread
+ mSmoothzoomThreadLock.lock();
+ mSmoothzoomThreadExit = false;
+ pthread_attr_t attr;
+ pthread_attr_init(&attr);
+ pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_DETACHED);
+ pthread_create(&mSmoothzoomThread,
+ &attr,
+ smoothzoom_thread,
+ NULL);
+ mSmoothzoomThreadLock.unlock();
+ } else
+ ALOGV(" Not creating smooth zoom thread "
+ " since preview is stopping ");
+ mTargetSmoothZoom = arg1;
+ return NO_ERROR;
+
+ case CAMERA_CMD_STOP_SMOOTH_ZOOM :
+ mSmoothzoomThreadLock.lock();
+ mSmoothzoomThreadExit = true;
+ mSmoothzoomThreadLock.unlock();
+ ALOGV("HAL sendcmd stop smooth zoom");
+ return NO_ERROR;
+#endif
+ }
+ return BAD_VALUE;
+}
+
+void QualcommCameraHardware::runSmoothzoomThread(void * data) {
+
+ ALOGV("runSmoothzoomThread: Current zoom %d - "
+ "Target %d", mParameters.getInt("zoom"), mTargetSmoothZoom);
+ int current_zoom = mParameters.getInt("zoom");
+ int step = (current_zoom > mTargetSmoothZoom)? -1: 1;
+
+ if(current_zoom == mTargetSmoothZoom) {
+ ALOGV("Smoothzoom target zoom value is same as "
+ "current zoom value, return...");
+ if(!mPreviewStopping)
+ mNotifyCallback(CAMERA_MSG_ZOOM,
+ current_zoom, 1, mCallbackCookie);
+ else
+ ALOGV("Not issuing callback since preview is stopping");
+ return;
+ }
+
+ QCameraParameters p = getParameters();
+
+ mSmoothzoomThreadWaitLock.lock();
+ mSmoothzoomThreadRunning = true;
+ mSmoothzoomThreadWaitLock.unlock();
+
+ int i = current_zoom;
+ while(1) { // Thread loop
+ mSmoothzoomThreadLock.lock();
+ if(mSmoothzoomThreadExit) {
+ ALOGV("Exiting smoothzoom thread, as stop smoothzoom called");
+ mSmoothzoomThreadLock.unlock();
+ break;
+ }
+ mSmoothzoomThreadLock.unlock();
+
+ if((i < 0) || (i > mMaxZoom)) {
+ ALOGE(" ERROR : beyond supported zoom values, break..");
+ break;
+ }
+ // update zoom
+ p.set("zoom", i);
+ setZoom(p);
+ if(!mPreviewStopping) {
+ // give call back to zoom listener in app
+ mNotifyCallback(CAMERA_MSG_ZOOM, i, (mTargetSmoothZoom-i == 0)?1:0,
+ mCallbackCookie);
+ } else {
+ ALOGV("Preview is stopping. Breaking out of smooth zoom loop");
+ break;
+ }
+ if(i == mTargetSmoothZoom)
+ break;
+
+ i+=step;
+
+ /* wait on singal, which will be signalled on
+ * receiving next preview frame */
+ mSmoothzoomThreadWaitLock.lock();
+ mSmoothzoomThreadWait.wait(mSmoothzoomThreadWaitLock);
+ mSmoothzoomThreadWaitLock.unlock();
+ } // while loop over, exiting thread
+
+ mSmoothzoomThreadWaitLock.lock();
+ mSmoothzoomThreadRunning = false;
+ mSmoothzoomThreadWaitLock.unlock();
+ ALOGV("Exiting Smooth Zoom Thread");
+}
+
+extern "C" QualcommCameraHardware* HAL_openCameraHardware(int cameraId)
+{
+ int i;
+ ALOGI("openCameraHardware: call createInstance");
+ for(i = 0; i < HAL_numOfCameras; i++) {
+ if(i == cameraId) {
+ ALOGI("openCameraHardware:Valid camera ID %d", cameraId);
+ parameter_string_initialized = false;
+ HAL_currentCameraId = cameraId;
+ /* The least significant two bits of mode parameter indicates the sensor mode
+ of 2D or 3D. The next two bits indicates the snapshot mode of
+ ZSL or NONZSL
+ */
+#if 0
+ int sensorModeMask = 0x03 & mode;
+ if(sensorModeMask & HAL_cameraInfo[i].modes_supported){
+ HAL_currentCameraMode = sensorModeMask;
+ }else{
+ ALOGE("openCameraHardware:Invalid camera mode (%d) requested", mode);
+ return NULL;
+ }
+#endif
+ HAL_currentCameraMode = CAMERA_MODE_2D;
+ HAL_currentSnapshotMode = CAMERA_SNAPSHOT_NONZSL;
+ //Remove values set by app other than supported values
+ //mode = mode & HAL_cameraInfo[cameraId].modes_supported;
+ //if((mode & CAMERA_SNAPSHOT_ZSL) == CAMERA_SNAPSHOT_ZSL)
+ // HAL_currentSnapshotMode = CAMERA_SNAPSHOT_ZSL;
+ ALOGI("%s: HAL_currentSnapshotMode = %d HAL_currentCameraMode = %d", __FUNCTION__, HAL_currentSnapshotMode,
+ HAL_currentCameraMode);
+ return QualcommCameraHardware::createInstance();
+ }
+ }
+ ALOGE("openCameraHardware:Invalid camera ID %d", cameraId);
+ return NULL;
+}
+
+//wp<QualcommCameraHardware> QualcommCameraHardware::singleton;
+
+// If the hardware already exists, return a strong pointer to the current
+// object. If not, create a new hardware object, put it in the singleton,
+// and return it.
+QualcommCameraHardware* QualcommCameraHardware::createInstance()
+{
+ ALOGV("createInstance: E");
+#if 0
+ singleton_lock.lock();
+
+ // Wait until the previous release is done.
+ while (singleton_releasing) {
+ if((singleton_releasing_start_time != 0) &&
+ (systemTime() - singleton_releasing_start_time) > SINGLETON_RELEASING_WAIT_TIME){
+ ALOGV("in createinstance system time is %lld %lld %lld ",
+ systemTime(), singleton_releasing_start_time, SINGLETON_RELEASING_WAIT_TIME);
+ singleton_lock.unlock();
+ ALOGE("Previous singleton is busy and time out exceeded. Returning null");
+ return NULL;
+ }
+ ALOGI("Wait for previous release.");
+ singleton_wait.waitRelative(singleton_lock, SINGLETON_RELEASING_RECHECK_TIMEOUT);
+ ALOGI("out of Wait for previous release.");
+ }
+
+ if (singleton != 0) {
+ sp<CameraHardwareInterface> hardware = singleton.promote();
+ if (hardware != 0) {
+ ALOGD("createInstance: X return existing hardware=%p", &(*hardware));
+ singleton_lock.unlock();
+ return hardware;
+ }
+ }
+#endif
+ {
+ struct stat st;
+ int rc = stat("/dev/oncrpc", &st);
+ if (rc < 0) {
+ ALOGD("createInstance: X failed to create hardware: %s", strerror(errno));
+ singleton_lock.unlock();
+ return NULL;
+ }
+ }
+
+ QualcommCameraHardware *cam = new QualcommCameraHardware();
+ hardware=cam;
+
+
+ ALOGI("createInstance: created hardware=%p", cam);
+ if (!cam->startCamera()) {
+ ALOGE("%s: startCamera failed!", __FUNCTION__);
+ //singleton_lock.unlock();
+ delete cam;
+ return NULL;
+ }
+
+ cam->initDefaultParameters();
+ //singleton_lock.unlock();
+ ALOGV("createInstance: X");
+ return cam;
+}
+
+// For internal use only, hence the strong pointer to the derived type.
+QualcommCameraHardware* QualcommCameraHardware::getInstance()
+{
+ //QualcommCameraHardware* hardware = singleton.promote();
+ if (hardware != 0) {
+ // ALOGV("getInstance: X old instance of hardware");
+ // return sp<QualcommCameraHardware>(static_cast<QualcommCameraHardware*>(hardware.get()));
+ return hardware;
+ } else {
+ ALOGV("getInstance: X new instance of hardware");
+ return new QualcommCameraHardware();
+ }
+}
+void QualcommCameraHardware::receiveRecordingFrame(struct msm_frame *frame)
+{
+ ALOGV("receiveRecordingFrame E");
+ // post busy frame
+ if (frame)
+ {
+ cam_frame_post_video (frame);
+ }
+ else ALOGE("in receiveRecordingFrame frame is NULL");
+ ALOGV("receiveRecordingFrame X");
+}
+
+
+bool QualcommCameraHardware::native_zoom_image(int fd, int srcOffset, int dstOffSet, common_crop_t *crop)
+{
+ int result = 0;
+ struct mdp_blit_req *e;
+
+ /* Initialize yuv structure */
+ zoomImage.list.count = 1;
+
+ e = &zoomImage.list.req[0];
+
+ e->src.width = previewWidth;
+ e->src.height = previewHeight;
+ e->src.format = MDP_Y_CBCR_H2V2;
+ e->src.offset = srcOffset;
+ e->src.memory_id = fd;
+
+ e->dst.width = previewWidth;
+ e->dst.height = previewHeight;
+ e->dst.format = MDP_Y_CBCR_H2V2;
+ e->dst.offset = dstOffSet;
+ e->dst.memory_id = fd;
+
+ e->transp_mask = 0xffffffff;
+ e->flags = 0;
+ e->alpha = 0xff;
+ if (crop->in1_w != 0 && crop->in1_h != 0) {
+ e->src_rect.x = (crop->out1_w - crop->in1_w + 1) / 2 - 1;
+ e->src_rect.y = (crop->out1_h - crop->in1_h + 1) / 2 - 1;
+ e->src_rect.w = crop->in1_w;
+ e->src_rect.h = crop->in1_h;
+ } else {
+ e->src_rect.x = 0;
+ e->src_rect.y = 0;
+ e->src_rect.w = previewWidth;
+ e->src_rect.h = previewHeight;
+ }
+ //ALOGV(" native_zoom : SRC_RECT : x,y = %d,%d \t w,h = %d, %d",
+ // e->src_rect.x, e->src_rect.y, e->src_rect.w, e->src_rect.h);
+
+ e->dst_rect.x = 0;
+ e->dst_rect.y = 0;
+ e->dst_rect.w = previewWidth;
+ e->dst_rect.h = previewHeight;
+
+ result = ioctl(fb_fd, MSMFB_BLIT, &zoomImage.list);
+ if (result < 0) {
+ ALOGE("MSM_FBIOBLT failed! line=%d\n", __LINE__);
+ return FALSE;
+ }
+ return TRUE;
+}
+
+void QualcommCameraHardware::debugShowPreviewFPS() const
+{
+ static int mFrameCount;
+ static int mLastFrameCount = 0;
+ static nsecs_t mLastFpsTime = 0;
+ static float mFps = 0;
+ mFrameCount++;
+ nsecs_t now = systemTime();
+ nsecs_t diff = now - mLastFpsTime;
+ if (diff > ms2ns(250)) {
+ mFps = ((mFrameCount - mLastFrameCount) * float(s2ns(1))) / diff;
+ ALOGI("Preview Frames Per Second: %.4f", mFps);
+ mLastFpsTime = now;
+ mLastFrameCount = mFrameCount;
+ }
+}
+
+void QualcommCameraHardware::debugShowVideoFPS() const
+{
+ static int mFrameCount;
+ static int mLastFrameCount = 0;
+ static nsecs_t mLastFpsTime = 0;
+ static float mFps = 0;
+ mFrameCount++;
+ nsecs_t now = systemTime();
+ nsecs_t diff = now - mLastFpsTime;
+ if (diff > ms2ns(250)) {
+ mFps = ((mFrameCount - mLastFrameCount) * float(s2ns(1))) / diff;
+ ALOGI("Video Frames Per Second: %.4f", mFps);
+ mLastFpsTime = now;
+ mLastFrameCount = mFrameCount;
+ }
+}
+
+void QualcommCameraHardware::receiveLiveSnapshot(uint32_t jpeg_size)
+{
+ ALOGV("receiveLiveSnapshot E");
+#if DUMP_LIVESHOT_JPEG_FILE
+ int file_fd = open("/data/LiveSnapshot.jpg", O_RDWR | O_CREAT, 0777);
+ ALOGV("dumping live shot image in /data/LiveSnapshot.jpg");
+ if (file_fd < 0) {
+ ALOGE("cannot open file\n");
+ }
+ else
+ {
+ write(file_fd, (uint8_t *)mJpegLiveSnapMapped->data,jpeg_size);
+ }
+ close(file_fd);
+#endif
+ Mutex::Autolock cbLock(&mCallbackLock);
+ if (mDataCallback && (mMsgEnabled & CAMERA_MSG_COMPRESSED_IMAGE)) {
+ mDataCallback(CAMERA_MSG_COMPRESSED_IMAGE, mJpegLiveSnapMapped ,data_counter,
+ NULL, mCallbackCookie);
+
+ }
+ else ALOGV("JPEG callback was cancelled--not delivering image.");
+
+ //Reset the Gps Information & relieve memory
+ exif_table_numEntries = 0;
+ mJpegHeap.clear();
+
+ liveshot_state = LIVESHOT_DONE;
+
+ ALOGV("receiveLiveSnapshot X");
+}
+void QualcommCameraHardware::receivePreviewFrame(struct msm_frame *frame)
+{
+ ALOGV("receivePreviewFrame E");
+ if (!mCameraRunning) {
+ ALOGI("ignoring preview callback--camera has been stopped");
+ LINK_camframe_add_frame(CAM_PREVIEW_FRAME,frame);
+ return;
+ }
+ if((mCurrentTarget == TARGET_MSM7627A) && ( liveshot_state == LIVESHOT_IN_PROGRESS)) {
+ LINK_set_liveshot_frame(frame);
+ }
+ if(mPreviewBusyQueue.add(frame) == false)
+ LINK_camframe_add_frame(CAM_PREVIEW_FRAME,frame);
+
+
+ ALOGV("receivePreviewFrame X");
+}
+void QualcommCameraHardware::receiveCameraStats(camstats_type stype, camera_preview_histogram_info* histinfo)
+{
+ // ALOGV("receiveCameraStats E");
+ CAMERA_HAL_UNUSED(stype);
+
+ if (!mCameraRunning) {
+ ALOGE("ignoring stats callback--camera has been stopped");
+ return;
+ }
+
+ mCallbackLock.lock();
+ int msgEnabled = mMsgEnabled;
+ camera_data_callback scb = mDataCallback;
+ void *sdata = mCallbackCookie;
+ mCallbackLock.unlock();
+ mStatsWaitLock.lock();
+ if(mStatsOn == CAMERA_HISTOGRAM_DISABLE) {
+ mStatsWaitLock.unlock();
+ return;
+ }
+ if(!mSendData) {
+ mStatsWaitLock.unlock();
+ } else {
+ mSendData = false;
+ mCurrent = (mCurrent+1)%3;
+ // The first element of the array will contain the maximum hist value provided by driver.
+ // *(uint32_t *)((unsigned int)mStatHeap->mHeap->base()+ (mStatHeap->mBufferSize * mCurrent)) = histinfo->max_value;
+ // memcpy((uint32_t *)((unsigned int)mStatHeap->mHeap->base()+ (mStatHeap->mBufferSize * mCurrent)+ sizeof(int32_t)), (uint32_t *)histinfo->buffer,(sizeof(int32_t) * 256));
+ *(uint32_t *)((unsigned int)(mStatsMapped[mCurrent]->data)) = histinfo->max_value;
+ memcpy((uint32_t *)((unsigned int)mStatsMapped[mCurrent]->data + sizeof(int32_t)), (uint32_t *)histinfo->buffer,(sizeof(int32_t) * 256));
+
+ mStatsWaitLock.unlock();
+
+ if (scb != NULL && (msgEnabled & CAMERA_MSG_STATS_DATA))
+ scb(CAMERA_MSG_STATS_DATA, mStatsMapped[mCurrent], data_counter, NULL,sdata);
+
+ }
+ // ALOGV("receiveCameraStats X");
+}
+/*===========================================================================
+ * FUNCTION - do_mmap -
+ *
+ * DESCRIPTION: retured virtual addresss
+ *==========================================================================*/
+uint8_t *mm_camera_do_mmap(uint32_t size, int *pmemFd)
+{
+ void *ret; /* returned virtual address */
+ int pmem_fd;
+
+ if(mCurrentTarget == TARGET_MSM8660)
+ pmem_fd = open("/dev/pmem_smipool", O_RDWR|O_SYNC);
+ else
+ pmem_fd = open("/dev/pmem_adsp", O_RDWR|O_SYNC);
+ if (pmem_fd <= 0) {
+ ALOGE("do_mmap: Open device /dev/pmem_smipool failed!\n");
+ return NULL;
+ }
+ /* to make it page size aligned */
+ size = (size + 4095) & (~4095);
+ ret = mmap(NULL,
+ size,
+ PROT_READ | PROT_WRITE,
+ MAP_SHARED,
+ pmem_fd,
+ 0);
+ if (ret == MAP_FAILED) {
+ ALOGE("do_mmap: pmem mmap() failed: %s (%d)\n", strerror(errno), errno);
+ close(pmem_fd);
+ return NULL;
+ }
+ ALOGI("do_mmap: pmem mmap fd %d ptr %p len %u\n", pmem_fd, ret, size);
+ *pmemFd = pmem_fd;
+ return(uint8_t *)ret;
+}
+
+
+bool QualcommCameraHardware::initRecord()
+{
+ const char *pmem_region;
+ int ion_heap = ION_CP_MM_HEAP_ID;
+ int CbCrOffset;
+ int recordBufferSize;
+ int active, type =0;
+
+ ALOGV("initREcord E");
+ if(mZslEnable){
+ ALOGV("initRecord X.. Not intializing Record buffers in ZSL mode");
+ return true;
+ }
+
+ if(mCurrentTarget == TARGET_MSM8660) {
+ pmem_region = "/dev/pmem_smipool";
+ } else {
+ pmem_region = "/dev/pmem_adsp";
+ }
+
+ ALOGI("initRecord: mDimension.video_width = %d mDimension.video_height = %d",
+ mDimension.video_width, mDimension.video_height);
+ // for 8x60 the Encoder expects the CbCr offset should be aligned to 2K.
+ if(mCurrentTarget == TARGET_MSM8660) {
+ CbCrOffset = PAD_TO_2K(mDimension.video_width * mDimension.video_height);
+ recordBufferSize = CbCrOffset + PAD_TO_2K((mDimension.video_width * mDimension.video_height)/2);
+ } else {
+ CbCrOffset = PAD_TO_WORD(mDimension.video_width * mDimension.video_height);
+ recordBufferSize = (mDimension.video_width * mDimension.video_height *3)/2;
+ }
+
+ /* Buffersize and frameSize will be different when DIS is ON.
+ * We need to pass the actual framesize with video heap, as the same
+ * is used at camera MIO when negotiating with encoder.
+ */
+ mRecordFrameSize = PAD_TO_4K(recordBufferSize);
+ bool dis_disable = 0;
+ const char *str = mParameters.get(QCameraParameters::KEY_VIDEO_HIGH_FRAME_RATE);
+ if((str != NULL) && (strcmp(str, QCameraParameters::VIDEO_HFR_OFF))) {
+ ALOGI("%s: HFR is ON, DIS has to be OFF", __FUNCTION__);
+ dis_disable = 1;
+ }
+ if((mVpeEnabled && mDisEnabled && (!dis_disable))|| mIs3DModeOn){
+ mRecordFrameSize = videoWidth * videoHeight * 3 / 2;
+ if(mCurrentTarget == TARGET_MSM8660){
+ mRecordFrameSize = PAD_TO_4K(PAD_TO_2K(videoWidth * videoHeight)
+ + PAD_TO_2K((videoWidth * videoHeight)/2));
+ }
+ }
+ ALOGV("mRecordFrameSize = %d", mRecordFrameSize);
+ //if(mRecordHeap == NULL) {
+ #if 0
+#ifdef USE_ION
+ mRecordHeap = new IonPool(ion_heap,
+ MemoryHeapBase::READ_ONLY | MemoryHeapBase::NO_CACHING,
+ MSM_PMEM_VIDEO,
+ recordBufferSize,
+ kRecordBufferCount,
+ mRecordFrameSize,
+ CbCrOffset,
+ 0,
+ "record");
+#endif
+
+ mRecordHeap = new PmemPool(pmem_region,
+ MemoryHeapBase::READ_ONLY | MemoryHeapBase::NO_CACHING,
+ MSM_PMEM_VIDEO,
+ recordBufferSize,
+ kRecordBufferCount,
+ mRecordFrameSize,
+ CbCrOffset,
+ 0,
+ "record");
+
+ if (!mRecordHeap->initialized()) {
+ mRecordHeap.clear();
+ mRecordHeap = NULL;
+ ALOGE("initRecord X: could not initialize record heap.");
+ return false;
+ }
+
+ } else {
+ if(mHFRMode == true) {
+ ALOGI("%s: register record buffers with camera driver", __FUNCTION__);
+ register_record_buffers(true);
+ mHFRMode = false;
+ }
+ }
+#endif
+
+ for (int cnt = 0; cnt < kRecordBufferCount; cnt++) {
+#if 0
+ //recordframes[cnt].fd = mRecordHeap->mHeap->getHeapID();
+ recordframes[cnt].buffer = (unsigned long)mm_camera_do_mmap(mRecordFrameSize, &(recordframes[cnt].fd));
+ //(uint32_t)mRecordHeap->mHeap->base() + mRecordHeap->mAlignedBufferSize * cnt;
+ if(!recordframes[cnt].buffer)
+ {
+ ALOGE("Buffer allocation for record fram %d failed",cnt);
+ return false;
+ }
+#endif
+#ifdef USE_ION
+ if (allocate_ion_memory(&record_main_ion_fd[cnt], &record_alloc[cnt], &record_ion_info_fd[cnt],
+ ion_heap, mRecordFrameSize, &mRecordfd[cnt]) < 0){
+ ALOGE("do_mmap: Open device %s failed!\n",pmem_region);
+ return NULL;
+ }
+#else
+ mRecordfd[cnt] = open(pmem_region, O_RDWR|O_SYNC);
+ if (mRecordfd[cnt] <= 0) {
+ ALOGE("%s: Open device %s failed!\n",__func__, pmem_region);
+ return NULL;
+ }
+#endif
+ ALOGI("%s Record fd is %d ", __func__, mRecordfd[cnt]);
+ mRecordMapped[cnt]=mGetMemory(mRecordfd[cnt], mRecordFrameSize,1,mCallbackCookie);
+ if(mRecordMapped[cnt]==NULL) {
+ ALOGE("Failed to get camera memory for mRecordMapped heap");
+ }else{
+ ALOGI("Received following info for record mapped data:%p,handle:%p, size:%d,release:%p",
+ mRecordMapped[cnt]->data ,mRecordMapped[cnt]->handle, mRecordMapped[cnt]->size, mRecordMapped[cnt]->release);
+ }
+#if 1
+ recordframes[cnt].buffer = (unsigned int)mRecordMapped[cnt]->data;
+ recordframes[cnt].fd = mRecordfd[cnt];
+#endif
+ recordframes[cnt].planar0_off = 0;
+ recordframes[cnt].planar1_off = CbCrOffset;
+ recordframes[cnt].planar2_off = 0;
+ recordframes[cnt].path = OUTPUT_TYPE_V;
+ record_buffers_tracking_flag[cnt] = false;
+ ALOGV ("initRecord : record heap , video buffers buffer=%lu fd=%d y_off=%d cbcr_off=%d \n",
+ (unsigned long)recordframes[cnt].buffer, recordframes[cnt].fd, recordframes[cnt].planar0_off,
+ recordframes[cnt].planar1_off);
+ active=(cnt<ACTIVE_VIDEO_BUFFERS);
+ type = MSM_PMEM_VIDEO;
+ if((mVpeEnabled) && (cnt == kRecordBufferCount-1)) {
+ type = MSM_PMEM_VIDEO_VPE;
+ active = 1;
+ }
+ ALOGI("Registering buffer %d with kernel",cnt);
+ register_buf(mRecordFrameSize,
+ mRecordFrameSize, CbCrOffset, 0,
+ recordframes[cnt].fd,
+ 0,
+ (uint8_t *)recordframes[cnt].buffer,
+ type,
+ active);
+ ALOGI("Came back from register call to kernel");
+ }
+
+ // initial setup : buffers 1,2,3 with kernel , 4 with camframe , 5,6,7,8 in free Q
+ // flush the busy Q
+ cam_frame_flush_video();
+
+ mVideoThreadWaitLock.lock();
+ while (mVideoThreadRunning) {
+ ALOGV("initRecord: waiting for old video thread to complete.");
+ mVideoThreadWait.wait(mVideoThreadWaitLock);
+ ALOGV("initRecord : old video thread completed.");
+ }
+ mVideoThreadWaitLock.unlock();
+
+ // flush free queue and add 5,6,7,8 buffers.
+ LINK_camframe_release_all_frames(CAM_VIDEO_FRAME);
+ if(mVpeEnabled) {
+ //If VPE is enabled, the VPE buffer shouldn't be added to Free Q initally.
+ for(int i=ACTIVE_VIDEO_BUFFERS;i <kRecordBufferCount-1; i++)
+ LINK_camframe_add_frame(CAM_VIDEO_FRAME,&recordframes[i]);
+ } else {
+ for(int i=ACTIVE_VIDEO_BUFFERS;i <kRecordBufferCount; i++)
+ LINK_camframe_add_frame(CAM_VIDEO_FRAME,&recordframes[i]);
+ }
+ ALOGV("initREcord X");
+
+ return true;
+}
+
+
+status_t QualcommCameraHardware::setDIS() {
+ ALOGV("setDIS E");
+
+ video_dis_param_ctrl_t disCtrl;
+ bool ret = true;
+ ALOGV("mDisEnabled = %d", mDisEnabled);
+
+ int video_frame_cbcroffset;
+ video_frame_cbcroffset = PAD_TO_WORD(videoWidth * videoHeight);
+ if(mCurrentTarget == TARGET_MSM8660)
+ video_frame_cbcroffset = PAD_TO_2K(videoWidth * videoHeight);
+
+ disCtrl.dis_enable = mDisEnabled;
+ const char *str = mParameters.get(QCameraParameters::KEY_VIDEO_HIGH_FRAME_RATE);
+ if((str != NULL) && (strcmp(str, QCameraParameters::VIDEO_HFR_OFF))) {
+ ALOGI("%s: HFR is ON, setting DIS as OFF", __FUNCTION__);
+ disCtrl.dis_enable = 0;
+ }
+ disCtrl.video_rec_width = videoWidth;
+ disCtrl.video_rec_height = videoHeight;
+ disCtrl.output_cbcr_offset = video_frame_cbcroffset;
+
+ ret = native_set_parms( CAMERA_PARM_VIDEO_DIS,
+ sizeof(disCtrl), &disCtrl);
+
+ ALOGV("setDIS X (%d)", ret);
+ return ret ? NO_ERROR : UNKNOWN_ERROR;
+}
+
+status_t QualcommCameraHardware::setVpeParameters()
+{
+ ALOGV("setVpeParameters E");
+
+ video_rotation_param_ctrl_t rotCtrl;
+ bool ret = true;
+ ALOGV("videoWidth = %d, videoHeight = %d", videoWidth, videoHeight);
+ int rotation = (mRotation + sensor_rotation)%360;
+ rotCtrl.rotation = (rotation == 0) ? ROT_NONE :
+ ((rotation == 90) ? ROT_CLOCKWISE_90 :
+ ((rotation == 180) ? ROT_CLOCKWISE_180 : ROT_CLOCKWISE_270));
+
+ if( ((videoWidth == 1280 && videoHeight == 720) || (videoWidth == 800 && videoHeight == 480))
+ && (rotation == 90 || rotation == 270) ){
+ /* Due to a limitation at video core to support heights greater than 720, adding this check.
+ * This is a temporary hack, need to be removed once video core support is available
+ */
+ ALOGI("video resolution (%dx%d) with rotation (%d) is not supported, setting rotation to NONE",
+ videoWidth, videoHeight, rotation);
+ rotCtrl.rotation = ROT_NONE;
+ }
+ ALOGV("rotCtrl.rotation = %d", rotCtrl.rotation);
+
+ ret = native_set_parms(CAMERA_PARM_VIDEO_ROT,
+ sizeof(rotCtrl), &rotCtrl);
+
+ ALOGV("setVpeParameters X (%d)", ret);
+ return ret ? NO_ERROR : UNKNOWN_ERROR;
+}
+
+status_t QualcommCameraHardware::startRecording()
+{
+ ALOGV("startRecording E");
+ int ret;
+ Mutex::Autolock l(&mLock);
+ mReleasedRecordingFrame = false;
+ if( (ret=startPreviewInternal())== NO_ERROR){
+ if(mVpeEnabled){
+ ALOGI("startRecording: VPE enabled, setting vpe parameters");
+ bool status = setVpeParameters();
+ if(status) {
+ ALOGE("Failed to set VPE parameters");
+ return status;
+ }
+ }
+ if( ( mCurrentTarget == TARGET_MSM7630 ) || (mCurrentTarget == TARGET_QSD8250) ||
+ (mCurrentTarget == TARGET_MSM8660)) {
+ for (int cnt = 0; cnt < kRecordBufferCount; cnt++) {
+ if(mStoreMetaDataInFrame)
+ {
+ ALOGI("startRecording : meta data mode enabled");
+ metadata_memory[cnt] = mGetMemory(-1, sizeof(struct encoder_media_buffer_type), 1, mCallbackCookie);
+ struct encoder_media_buffer_type * packet =
+ (struct encoder_media_buffer_type *)metadata_memory[cnt]->data;
+ packet->meta_handle = native_handle_create(1, 2); //1 fd, 1 offset and 1 size
+ packet->buffer_type = kMetadataBufferTypeCameraSource;
+ native_handle_t * nh = const_cast<native_handle_t *>(packet->meta_handle);
+ nh->data[0] = mRecordfd[cnt];
+ nh->data[1] = 0;
+ nh->data[2] = mRecordFrameSize;
+ }
+ }
+ ALOGV(" in startREcording : calling start_recording");
+ native_start_ops(CAMERA_OPS_VIDEO_RECORDING, NULL);
+ mRecordingState = 1;
+ // Remove the left out frames in busy Q and them in free Q.
+ // this should be done before starting video_thread so that,
+ // frames in previous recording are flushed out.
+ ALOGV("frames in busy Q = %d", g_busy_frame_queue.num_of_frames);
+ while((g_busy_frame_queue.num_of_frames) >0){
+ msm_frame* vframe = cam_frame_get_video ();
+ LINK_camframe_add_frame(CAM_VIDEO_FRAME,vframe);
+ }
+ ALOGV("frames in busy Q = %d after deQueing", g_busy_frame_queue.num_of_frames);
+ //Clear the dangling buffers and put them in free queue
+ for(int cnt = 0; cnt < kRecordBufferCount; cnt++) {
+ if(record_buffers_tracking_flag[cnt] == true) {
+ ALOGI("Dangling buffer: offset = %d, buffer = %d", cnt,
+ (unsigned int)recordframes[cnt].buffer);
+ LINK_camframe_add_frame(CAM_VIDEO_FRAME,&recordframes[cnt]);
+ record_buffers_tracking_flag[cnt] = false;
+ }
+ }
+ mVideoThreadWaitLock.lock();
+ mVideoThreadExit = 0;
+ pthread_attr_t attr;
+ pthread_attr_init(&attr);
+ pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_DETACHED);
+ mVideoThreadRunning = pthread_create(&mVideoThread,
+ &attr,
+ video_thread,
+ NULL);
+ mVideoThreadWaitLock.unlock();
+ } else if ( mCurrentTarget == TARGET_MSM7627A ) {
+ for (int cnt = 0; cnt < mTotalPreviewBufferCount; cnt++) {
+ if(mStoreMetaDataInFrame
+ && (metadata_memory[cnt] == NULL))
+ {
+ ALOGI("startRecording : meta data mode enabled filling metadata memory ");
+ metadata_memory[cnt] = mGetMemory(-1, sizeof(struct encoder_media_buffer_type), 1, mCallbackCookie);
+ struct encoder_media_buffer_type * packet =
+ (struct encoder_media_buffer_type *)metadata_memory[cnt]->data;
+ packet->meta_handle = native_handle_create(1, 3); //1 fd, 1 offset and 1 size
+ packet->buffer_type = kMetadataBufferTypeCameraSource;
+ native_handle_t * nh = const_cast<native_handle_t *>(packet->meta_handle);
+ nh->data[0] = frames[cnt].fd;
+ nh->data[1] = 0;
+ nh->data[2] = previewWidth * previewHeight * 3/2;
+ nh->data[3] = (unsigned int)mPreviewMapped[cnt]->data;
+ }
+ }
+ }
+ record_flag = 1;
+ }
+ return ret;
+}
+
+status_t QualcommCameraHardware::startRecordingInternal()
+{
+ ALOGV("%s: E", __FUNCTION__);
+ mReleasedRecordingFrame = false;
+
+ /* In 3D mode, the video thread has to be started as part
+ * of preview itself, because video buffers and video callback
+ * need to be used for both display and encoding.
+ * startRecordingInternal() will be called as part of startPreview().
+ * This check is needed to support both 3D and non-3D mode.
+ */
+ if(mVideoThreadRunning) {
+ ALOGI("Video Thread is in progress");
+ return NO_ERROR;
+ }
+
+ if(mVpeEnabled){
+ ALOGI("startRecording: VPE enabled, setting vpe parameters");
+ bool status = setVpeParameters();
+ if(status) {
+ ALOGE("Failed to set VPE parameters");
+ return status;
+ }
+ }
+ if( ( mCurrentTarget == TARGET_MSM7630 ) || (mCurrentTarget == TARGET_QSD8250) || (mCurrentTarget == TARGET_MSM8660)) {
+ // Remove the left out frames in busy Q and them in free Q.
+ // this should be done before starting video_thread so that,
+ // frames in previous recording are flushed out.
+ ALOGV("frames in busy Q = %d", g_busy_frame_queue.num_of_frames);
+ while((g_busy_frame_queue.num_of_frames) >0){
+ msm_frame* vframe = cam_frame_get_video ();
+ LINK_camframe_add_frame(CAM_VIDEO_FRAME,vframe);
+ }
+ ALOGV("frames in busy Q = %d after deQueing", g_busy_frame_queue.num_of_frames);
+
+ //Clear the dangling buffers and put them in free queue
+ for(int cnt = 0; cnt < kRecordBufferCount; cnt++) {
+ if(record_buffers_tracking_flag[cnt] == true) {
+ ALOGI("Dangling buffer: offset = %d, buffer = %d", cnt, (unsigned int)recordframes[cnt].buffer);
+ LINK_camframe_add_frame(CAM_VIDEO_FRAME,&recordframes[cnt]);
+ record_buffers_tracking_flag[cnt] = false;
+ }
+ }
+
+ ALOGI(" in startREcording : calling start_recording");
+ if(!mIs3DModeOn)
+ native_start_ops(CAMERA_OPS_VIDEO_RECORDING, NULL);
+
+ // Start video thread and wait for busy frames to be encoded, this thread
+ // should be closed in stopRecording
+ mVideoThreadWaitLock.lock();
+ mVideoThreadExit = 0;
+ pthread_attr_t attr;
+ pthread_attr_init(&attr);
+ pthread_attr_setdetachstate(&attr, PTHREAD_CREATE_DETACHED);
+ mVideoThreadRunning = !pthread_create(&mVideoThread,
+ &attr,
+ video_thread,
+ NULL);
+ mVideoThreadWaitLock.unlock();
+ // Remove the left out frames in busy Q and them in free Q.
+ }
+ ALOGV("%s: E", __FUNCTION__);
+ return NO_ERROR;
+}
+
+void QualcommCameraHardware::stopRecording()
+{
+ ALOGV("stopRecording: E");
+ record_flag = 0;
+ Mutex::Autolock l(&mLock);
+ {
+ mRecordFrameLock.lock();
+ mReleasedRecordingFrame = true;
+ mRecordWait.signal();
+ mRecordFrameLock.unlock();
+
+ if(mDataCallback && !(mCurrentTarget == TARGET_QSD8250) &&
+ (mMsgEnabled & CAMERA_MSG_PREVIEW_FRAME)) {
+ ALOGV("stopRecording: X, preview still in progress");
+ return;
+ }
+ }
+ if (NULL != mJpegLiveSnapMapped) {
+ ALOGI("initLiveSnapshot: clearing old mJpegHeap.");
+ mJpegLiveSnapMapped->release(mJpegLiveSnapMapped);
+ mJpegLiveSnapMapped = NULL;
+ }
+
+ // If output2 enabled, exit video thread, invoke stop recording ioctl
+ if( ( mCurrentTarget == TARGET_MSM7630 ) || (mCurrentTarget == TARGET_QSD8250) || (mCurrentTarget == TARGET_MSM8660)) {
+ /* when 3D mode is ON, don't exit the video thread, as
+ * we need to support the preview mode. Just set the recordingState
+ * to zero, so that there won't be any rcb callbacks. video thread
+ * will be terminated as part of stop preview.
+ */
+ if(mIs3DModeOn) {
+ ALOGV("%s: 3D mode on, so don't exit video thread", __FUNCTION__);
+ mRecordingState = 0;
+ return;
+ }
+
+ mVideoThreadWaitLock.lock();
+ mVideoThreadExit = 1;
+ mVideoThreadWaitLock.unlock();
+ native_stop_ops(CAMERA_OPS_VIDEO_RECORDING, NULL);
+
+ pthread_mutex_lock(&(g_busy_frame_queue.mut));
+ pthread_cond_signal(&(g_busy_frame_queue.wait));
+ pthread_mutex_unlock(&(g_busy_frame_queue.mut));
+ for (int cnt = 0; cnt < kRecordBufferCount; cnt++) {
+ if(mStoreMetaDataInFrame && (metadata_memory[cnt] != NULL)){
+ struct encoder_media_buffer_type * packet =
+ (struct encoder_media_buffer_type *)metadata_memory[cnt]->data;
+ native_handle_delete(const_cast<native_handle_t *>(packet->meta_handle));
+ metadata_memory[cnt]->release(metadata_memory[cnt]);
+ metadata_memory[cnt] = NULL;
+ }
+ }
+ }
+ else if(mCurrentTarget == TARGET_MSM7627A) {
+ for (int cnt = 0; cnt < mTotalPreviewBufferCount; cnt++) {
+ if(mStoreMetaDataInFrame && (metadata_memory[cnt] != NULL)){
+ struct encoder_media_buffer_type * packet =
+ (struct encoder_media_buffer_type *)metadata_memory[cnt]->data;
+ native_handle_delete(const_cast<native_handle_t *>(packet->meta_handle));
+ metadata_memory[cnt]->release(metadata_memory[cnt]);
+ metadata_memory[cnt] = NULL;
+ }
+ }
+ }
+#if 0
+ else // for other targets where output2 is not enabled
+ stopPreviewInternal();
+ if (mJpegHeap != NULL) {
+ ALOGV("stopRecording: clearing old mJpegHeap.");
+ mJpegHeap.clear();
+ }
+#endif
+ mRecordingState = 0; // recording not started
+ ALOGV("stopRecording: X");
+}
+
+void QualcommCameraHardware::releaseRecordingFrame(const void *opaque)
+{
+ ALOGI("%s : BEGIN, opaque = 0x%p",__func__, opaque);
+ Mutex::Autolock rLock(&mRecordFrameLock);
+ mReleasedRecordingFrame = true;
+ mRecordWait.signal();
+
+ // Ff 7x30 : add the frame to the free camframe queue
+ if( (mCurrentTarget == TARGET_MSM7630 ) || (mCurrentTarget == TARGET_QSD8250) || (mCurrentTarget == TARGET_MSM8660)) {
+ ssize_t offset;
+ size_t size;
+ //sp<IMemoryHeap> heap = mem->getMemory(&offset, &size);
+ msm_frame* releaseframe = NULL;
+ int cnt;
+ for (cnt = 0; cnt < kRecordBufferCount; cnt++) {
+ if(mStoreMetaDataInFrame){
+ if(metadata_memory[cnt] && metadata_memory[cnt]->data == opaque){
+ ALOGV("in release recording frame(meta) found match , releasing buffer %d", (unsigned int)recordframes[cnt].buffer);
+ releaseframe = &recordframes[cnt];
+ break;
+ }
+ }else {
+ if(recordframes[cnt].buffer && ((unsigned long)opaque == recordframes[cnt].buffer) ){
+ ALOGV("in release recording frame found match , releasing buffer %d", (unsigned int)recordframes[cnt].buffer);
+ releaseframe = &recordframes[cnt];
+ break;
+ }
+ }
+ }
+ if(cnt < kRecordBufferCount) {
+ // do this only if frame thread is running
+ mFrameThreadWaitLock.lock();
+ if(mFrameThreadRunning ) {
+ //Reset the track flag for this frame buffer
+ record_buffers_tracking_flag[cnt] = false;
+ LINK_camframe_add_frame(CAM_VIDEO_FRAME,releaseframe);
+ }
+
+ mFrameThreadWaitLock.unlock();
+ } else {
+ ALOGE("in release recordingframe XXXXX error , buffer not found");
+ for (int i=0; i< kRecordBufferCount; i++) {
+ ALOGE(" recordframes[%d].buffer = %d", i, (unsigned int)recordframes[i].buffer);
+ }
+ }
+ }
+
+ ALOGV("releaseRecordingFrame X");
+}
+
+bool QualcommCameraHardware::recordingEnabled()
+{
+ return mCameraRunning && mDataCallbackTimestamp && (mMsgEnabled & CAMERA_MSG_VIDEO_FRAME);
+}
+
+void QualcommCameraHardware::notifyShutter(bool mPlayShutterSoundOnly)
+{
+ private_handle_t *thumbnailHandle;
+ if(mThumbnailBuffer) {
+ thumbnailHandle = (private_handle_t *) (*mThumbnailBuffer);
+ }
+ mShutterLock.lock();
+ //image_rect_type size;
+
+ if(mPlayShutterSoundOnly) {
+ /* At this point, invoke Notify Callback to play shutter sound only.
+ * We want to call notify callback again when we have the
+ * yuv picture ready. This is to reduce blanking at the time
+ * of displaying postview frame. Using ext2 to indicate whether
+ * to play shutter sound only or register the postview buffers.
+ */
+ mNotifyCallback(CAMERA_MSG_SHUTTER, 0, mPlayShutterSoundOnly,
+ mCallbackCookie);
+ mShutterLock.unlock();
+ return;
+ }
+
+ if (mShutterPending && mNotifyCallback && (mMsgEnabled & CAMERA_MSG_SHUTTER)) {
+ //mDisplayHeap = mThumbnailHeap;
+#if 0
+ if (crop != NULL && (crop->in1_w != 0 && crop->in1_h != 0)) {
+ size.width = crop->in1_w;
+ size.height = crop->in1_h;
+ }
+ else {
+ size.width = mPostviewWidth;
+ size.height = mPostviewHeight;
+ }
+#endif
+/*
+ if(strTexturesOn == true) {
+ mDisplayHeap = mRawHeap;
+ size.width = mPictureWidth;
+ size.height = mPictureHeight;
+ }
+*/
+ /* Now, invoke Notify Callback to unregister preview buffer
+ * and register postview buffer with surface flinger. Set ext2
+ * as 0 to indicate not to play shutter sound.
+ */
+ mNotifyCallback(CAMERA_MSG_SHUTTER, 0, 0,
+ mCallbackCookie);
+ mShutterPending = false;
+ }
+ mShutterLock.unlock();
+}
+
+static void receive_shutter_callback(common_crop_t *crop)
+{
+ ALOGV("receive_shutter_callback: E");
+ QualcommCameraHardware* obj = QualcommCameraHardware::getInstance();
+ if (obj != 0) {
+ /* Just play shutter sound at this time */
+ obj->notifyShutter(TRUE);
+ }
+ ALOGV("receive_shutter_callback: X");
+}
+
+// Crop the picture in place.
+static void crop_yuv420(uint32_t width, uint32_t height,
+ uint32_t cropped_width, uint32_t cropped_height,
+ uint8_t *image, const char *name)
+{
+ uint32_t i;
+ uint32_t x, y;
+ uint8_t* chroma_src, *chroma_dst;
+ int yOffsetSrc, yOffsetDst, CbCrOffsetSrc, CbCrOffsetDst;
+ int mSrcSize, mDstSize;
+
+ //check if all fields needed eg. size and also how to set y offset. If condition for 7x27
+ //and need to check if needed for 7x30.
+
+ LINK_jpeg_encoder_get_buffer_offset(width, height, (uint32_t *)&yOffsetSrc,
+ (uint32_t *)&CbCrOffsetSrc, (uint32_t *)&mSrcSize);
+
+ LINK_jpeg_encoder_get_buffer_offset(cropped_width, cropped_height, (uint32_t *)&yOffsetDst,
+ (uint32_t *)&CbCrOffsetDst, (uint32_t *)&mDstSize);
+
+ // Calculate the start position of the cropped area.
+ x = (width - cropped_width) / 2;
+ y = (height - cropped_height) / 2;
+ x &= ~1;
+ y &= ~1;
+
+ if((mCurrentTarget == TARGET_MSM7627)
+ || (mCurrentTarget == TARGET_MSM7625A)
+ || (mCurrentTarget == TARGET_MSM7627A)
+ || (mCurrentTarget == TARGET_MSM7630)
+ || (mCurrentTarget == TARGET_MSM8660)) {
+ if (!strcmp("snapshot camera", name)) {
+ chroma_src = image + CbCrOffsetSrc;
+ chroma_dst = image + CbCrOffsetDst;
+ } else {
+ chroma_src = image + width * height;
+ chroma_dst = image + cropped_width * cropped_height;
+ yOffsetSrc = 0;
+ yOffsetDst = 0;
+ CbCrOffsetSrc = width * height;
+ CbCrOffsetDst = cropped_width * cropped_height;
+ }
+ } else {
+ chroma_src = image + CbCrOffsetSrc;
+ chroma_dst = image + CbCrOffsetDst;
+ }
+
+ int32_t bufDst = yOffsetDst;
+ int32_t bufSrc = yOffsetSrc + (width * y) + x;
+
+ if( bufDst > bufSrc ){
+ ALOGV("crop yuv Y destination position follows source position");
+ /*
+ * If buffer destination follows buffer source, memcpy
+ * of lines will lead to overwriting subsequent lines. In order
+ * to prevent this, reverse copying of lines is performed
+ * for the set of lines where destination follows source and
+ * forward copying of lines is performed for lines where source
+ * follows destination. To calculate the position to switch,
+ * the initial difference between source and destination is taken
+ * and divided by difference between width and cropped width. For
+ * every line copied the difference between source destination
+ * drops by width - cropped width
+ */
+ //calculating inversion
+ int position = ( bufDst - bufSrc ) / (width - cropped_width);
+ // Copy luma component.
+ for(i=position+1; i < cropped_height; i++){
+ memmove(image + yOffsetDst + i * cropped_width,
+ image + yOffsetSrc + width * (y + i) + x,
+ cropped_width);
+ }
+ for(int j=position; j>=0; j--){
+ memmove(image + yOffsetDst + j * cropped_width,
+ image + yOffsetSrc + width * (y + j) + x,
+ cropped_width);
+ }
+ } else {
+ // Copy luma component.
+ for(i = 0; i < cropped_height; i++)
+ memcpy(image + yOffsetDst + i * cropped_width,
+ image + yOffsetSrc + width * (y + i) + x,
+ cropped_width);
+ }
+
+ // Copy chroma components.
+ cropped_height /= 2;
+ y /= 2;
+
+ bufDst = CbCrOffsetDst;
+ bufSrc = CbCrOffsetSrc + (width * y) + x;
+
+ if( bufDst > bufSrc ) {
+ ALOGV("crop yuv Chroma destination position follows source position");
+ /*
+ * Similar to y
+ */
+ int position = ( bufDst - bufSrc ) / (width - cropped_width);
+ for(i=position+1; i < cropped_height; i++){
+ memmove(chroma_dst + i * cropped_width,
+ chroma_src + width * (y + i) + x,
+ cropped_width);
+ }
+ for(int j=position; j >=0; j--){
+ memmove(chroma_dst + j * cropped_width,
+ chroma_src + width * (y + j) + x,
+ cropped_width);
+ }
+ } else {
+ for(i = 0; i < cropped_height; i++)
+ memcpy(chroma_dst + i * cropped_width,
+ chroma_src + width * (y + i) + x,
+ cropped_width);
+ }
+}
+// ReceiveRawPicture for ICS
+void QualcommCameraHardware::receiveRawPicture(status_t status,struct msm_frame *postviewframe, struct msm_frame *mainframe)
+{
+ ALOGV("%s: E", __FUNCTION__);
+
+ void* cropp;
+ mSnapshotThreadWaitLock.lock();
+ if(mSnapshotThreadRunning == false) {
+ ALOGE("%s called in wrong state, ignore", __FUNCTION__);
+ return;
+ }
+ mSnapshotThreadWaitLock.unlock();
+
+ if(status != NO_ERROR){
+ ALOGE("%s: Failed to get Snapshot Image", __FUNCTION__);
+ if(mDataCallback &&
+ (mMsgEnabled & CAMERA_MSG_COMPRESSED_IMAGE)) {
+ /* get picture failed. Give jpeg callback with NULL data
+ * to the application to restore to preview mode
+ */
+ ALOGE("get picture failed, giving jpeg callback with NULL data");
+ mDataCallback(CAMERA_MSG_COMPRESSED_IMAGE, NULL, data_counter, NULL, mCallbackCookie);
+ }
+ mShutterLock.lock();
+ mShutterPending = false;
+ mShutterLock.unlock();
+ mJpegThreadWaitLock.lock();
+ mJpegThreadRunning = false;
+ mJpegThreadWait.signal();
+ mJpegThreadWaitLock.unlock();
+ mInSnapshotModeWaitLock.lock();
+ mInSnapshotMode = false;
+ mInSnapshotModeWait.signal();
+ mInSnapshotModeWaitLock.unlock();
+ return;
+ }
+ /* call notifyShutter to config surface and overlay
+ * for postview rendering.
+ * Its necessary to issue another notifyShutter here with
+ * mPlayShutterSoundOnly as FALSE, since that is when the
+ * preview buffers are unregistered with the surface flinger.
+ * That is necessary otherwise the preview memory wont be
+ * deallocated.
+ */
+ cropp =postviewframe->cropinfo;
+ notifyShutter(FALSE);
+
+ if(mSnapshotFormat == PICTURE_FORMAT_JPEG) {
+ if(cropp != NULL){
+ common_crop_t *crop = (common_crop_t *)cropp;
+ if (crop->in1_w != 0 && crop->in1_h != 0) {
+ zoomCropInfo.left = (crop->out1_w - crop->in1_w + 1) / 2 - 1;
+ zoomCropInfo.top = (crop->out1_h - crop->in1_h + 1) / 2 - 1;
+ if(zoomCropInfo.left < 0) zoomCropInfo.left = 0;
+ if(zoomCropInfo.top < 0) zoomCropInfo.top = 0;
+ zoomCropInfo.right = zoomCropInfo.left + crop->in1_w;
+ zoomCropInfo.bottom = zoomCropInfo.top + crop->in1_h;
+ mPreviewWindow->set_crop(mPreviewWindow,
+ zoomCropInfo.left,
+ zoomCropInfo.top,
+ zoomCropInfo.right,
+ zoomCropInfo.bottom);
+ mResetWindowCrop = true;
+ } else {
+ zoomCropInfo.left = 0;
+ zoomCropInfo.top = 0;
+ zoomCropInfo.right = mPostviewWidth;
+ zoomCropInfo.bottom = mPostviewHeight;
+ mPreviewWindow->set_crop(mPreviewWindow,
+ zoomCropInfo.left,
+ zoomCropInfo.top,
+ zoomCropInfo.right,
+ zoomCropInfo.bottom);
+ }
+ }
+ ALOGI("receiverawpicture : display lock");
+ mDisplayLock.lock();
+ int index = mapThumbnailBuffer(postviewframe);
+ ALOGI("receiveRawPicture : mapThumbnailBuffer returned %d", index);
+ private_handle_t *handle;
+ if(mThumbnailBuffer[index] != NULL && mZslEnable == false) {
+ handle = (private_handle_t *)(*mThumbnailBuffer[index]);
+ ALOGV("%s: Queueing postview buffer for display %d",
+ __FUNCTION__,handle->fd);
+ if (BUFFER_LOCKED == mThumbnailLockState[index]) {
+ if (GENLOCK_FAILURE == genlock_unlock_buffer(handle)) {
+ ALOGE("%s: genlock_unlock_buffer failed", __FUNCTION__);
+ mDisplayLock.unlock();
+ return;
+ } else {
+ mThumbnailLockState[index] = BUFFER_UNLOCKED;
+ }
+ }
+ status_t retVal = mPreviewWindow->enqueue_buffer(mPreviewWindow,
+ mThumbnailBuffer[index]);
+ ALOGI(" enQ thumbnailbuffer");
+ if( retVal != NO_ERROR) {
+ ALOGE("%s: Queuebuffer failed for postview buffer", __FUNCTION__);
+ }
+
+ }
+ mDisplayLock.unlock();
+ ALOGI("receiverawpicture : display unlock");
+ /* Give the main Image as raw to upper layers */
+ //Either CAMERA_MSG_RAW_IMAGE or CAMERA_MSG_RAW_IMAGE_NOTIFY will be set not both
+ if (mDataCallback && (mMsgEnabled & CAMERA_MSG_RAW_IMAGE))
+ mDataCallback(CAMERA_MSG_RAW_IMAGE, mRawMapped[index],data_counter,
+ NULL, mCallbackCookie);
+ else if (mNotifyCallback && (mMsgEnabled & CAMERA_MSG_RAW_IMAGE_NOTIFY))
+ mNotifyCallback(CAMERA_MSG_RAW_IMAGE_NOTIFY, 0, 0,
+ mCallbackCookie);
+
+ if(strTexturesOn == true) {
+ ALOGI("Raw Data given to app for processing...will wait for jpeg encode call");
+ mEncodePending = true;
+ mEncodePendingWaitLock.unlock();
+ mJpegThreadWaitLock.lock();
+ mJpegThreadWait.signal();
+ mJpegThreadWaitLock.unlock();
+ }
+ } else { // Not Jpeg snapshot, it is Raw Snapshot , handle later
+ ALOGV("ReceiveRawPicture : raw snapshot not Jpeg, sending callback up");
+ if (mDataCallback && (mMsgEnabled & CAMERA_MSG_COMPRESSED_IMAGE))
+ mDataCallback(CAMERA_MSG_COMPRESSED_IMAGE,
+ mRawSnapshotMapped,
+ data_counter,
+ NULL,
+ mCallbackCookie);
+
+ // TEMP
+ ALOGI("receiveRawPicture : gave raw frame to app, giving signal");
+ mJpegThreadWaitLock.lock();
+ mJpegThreadRunning = false;
+ mJpegThreadWait.signal();
+ mJpegThreadWaitLock.unlock();
+
+ }
+ /* can start preview at this stage? early preview? */
+ mInSnapshotModeWaitLock.lock();
+ mInSnapshotMode = false;
+ mInSnapshotModeWait.signal();
+ mInSnapshotModeWaitLock.unlock();
+
+ ALOGV("%s: X", __FUNCTION__);
+
+}
+
+
+void QualcommCameraHardware::receiveJpegPicture(status_t status, mm_camera_buffer_t *encoded_buffer)
+{
+ Mutex::Autolock cbLock(&mCallbackLock);
+ numJpegReceived++;
+ uint32_t offset ;
+ int32_t index = -1;
+ int32_t buffer_size = 0;
+ if(encoded_buffer && status == NO_ERROR) {
+ buffer_size = encoded_buffer->filled_size;
+ ALOGV("receiveJpegPicture: E buffer_size %d mJpegMaxSize = %d",buffer_size, mJpegMaxSize);
+
+ index = mapJpegBuffer(encoded_buffer);
+ ALOGI("receiveJpegPicutre : mapJpegBuffer index : %d", index);
+ }
+ if((index < 0) || (index >= (MAX_SNAPSHOT_BUFFERS-2))){
+ ALOGE("Jpeg index is not valid or fails. ");
+ if (mDataCallback && (mMsgEnabled & CAMERA_MSG_COMPRESSED_IMAGE)) {
+ mDataCallback(CAMERA_MSG_COMPRESSED_IMAGE, NULL, data_counter, NULL, mCallbackCookie);
+ }
+ mJpegThreadWaitLock.lock();
+ mJpegThreadRunning = false;
+ mJpegThreadWait.signal();
+ mJpegThreadWaitLock.unlock();
+ } else {
+ ALOGV("receiveJpegPicture: Index of Jpeg is %d",index);
+
+ if (mDataCallback && (mMsgEnabled & CAMERA_MSG_COMPRESSED_IMAGE)) {
+ if(status == NO_ERROR) {
+ ALOGI("receiveJpegPicture : giving jpeg image callback to services");
+ mJpegCopyMapped = mGetMemory(-1, encoded_buffer->filled_size,1, mCallbackCookie);
+ if(!mJpegCopyMapped){
+ ALOGE("%s: mGetMemory failed.\n", __func__);
+ }
+ memcpy(mJpegCopyMapped->data, mJpegMapped[index]->data, encoded_buffer->filled_size );
+ mDataCallback(CAMERA_MSG_COMPRESSED_IMAGE,mJpegCopyMapped,data_counter,NULL,mCallbackCookie);
+ if(NULL != mJpegCopyMapped) {
+ mJpegCopyMapped->release(mJpegCopyMapped);
+ mJpegCopyMapped = NULL;
+ }
+ }
+ } else {
+ ALOGI("JPEG callback was cancelled--not delivering image.");
+ }
+ if(numJpegReceived == numCapture){
+ mJpegThreadWaitLock.lock();
+ mJpegThreadRunning = false;
+ mJpegThreadWait.signal();
+ mJpegThreadWaitLock.unlock();
+ }
+ }
+
+ ALOGV("receiveJpegPicture: X callback done.");
+}
+bool QualcommCameraHardware::previewEnabled()
+{
+ /* If overlay is used the message CAMERA_MSG_PREVIEW_FRAME would
+ * be disabled at CameraService layer. Hence previewEnabled would
+ * return FALSE even though preview is running. Hence check for
+ * mOverlay not being NULL to ensure that previewEnabled returns
+ * accurate information.
+ */
+
+// return mCameraRunning && mDataCallback &&
+// ((mMsgEnabled & CAMERA_MSG_PREVIEW_FRAME) || (mOverlay != NULL));
+ ALOGI(" : mCameraRunning : %d mPreviewWindow = %x",mCameraRunning,mPreviewWindow);
+ return mCameraRunning;// || (mPreviewWindow != NULL);
+}
+status_t QualcommCameraHardware::setRecordSize(const QCameraParameters& params)
+{
+ const char *recordSize = NULL;
+ recordSize = params.get(QCameraParameters::KEY_VIDEO_SIZE);
+ if(!recordSize) {
+ mParameters.set(QCameraParameters::KEY_VIDEO_SIZE, "");
+ //If application didn't set this parameter string, use the values from
+ //getPreviewSize() as video dimensions.
+ ALOGV("No Record Size requested, use the preview dimensions");
+ videoWidth = previewWidth;
+ videoHeight = previewHeight;
+ } else {
+ //Extract the record witdh and height that application requested.
+ ALOGI("%s: requested record size %s", __FUNCTION__, recordSize);
+ if(!parse_size(recordSize, videoWidth, videoHeight)) {
+ mParameters.set(QCameraParameters::KEY_VIDEO_SIZE , recordSize);
+ //VFE output1 shouldn't be greater than VFE output2.
+ if( (previewWidth > videoWidth) || (previewHeight > videoHeight)) {
+ //Set preview sizes as record sizes.
+ ALOGI("Preview size %dx%d is greater than record size %dx%d,\
+ resetting preview size to record size",previewWidth,\
+ previewHeight, videoWidth, videoHeight);
+ previewWidth = videoWidth;
+ previewHeight = videoHeight;
+ mParameters.setPreviewSize(previewWidth, previewHeight);
+ }
+ if( (mCurrentTarget != TARGET_MSM7630)
+ && (mCurrentTarget != TARGET_QSD8250)
+ && (mCurrentTarget != TARGET_MSM8660) ) {
+ //For Single VFE output targets, use record dimensions as preview dimensions.
+ previewWidth = videoWidth;
+ previewHeight = videoHeight;
+ mParameters.setPreviewSize(previewWidth, previewHeight);
+ }
+ if(mIs3DModeOn == true) {
+ /* As preview and video frames are same in 3D mode,
+ * preview size should be same as video size. This
+ * cahnge is needed to take of video resolutions
+ * like 720P and 1080p where the application can
+ * request different preview sizes like 768x432
+ */
+ previewWidth = videoWidth;
+ previewHeight = videoHeight;
+ mParameters.setPreviewSize(previewWidth, previewHeight);
+ }
+ } else {
+ mParameters.set(QCameraParameters::KEY_VIDEO_SIZE, "");
+ ALOGE("initPreview X: failed to parse parameter record-size (%s)", recordSize);
+ return BAD_VALUE;
+ }
+ }
+ ALOGI("%s: preview dimensions: %dx%d", __FUNCTION__, previewWidth, previewHeight);
+ ALOGI("%s: video dimensions: %dx%d", __FUNCTION__, videoWidth, videoHeight);
+ mDimension.display_width = previewWidth;
+ mDimension.display_height= previewHeight;
+ return NO_ERROR;
+}
+
+status_t QualcommCameraHardware::setCameraMode(const QCameraParameters& params) {
+ int32_t value = params.getInt(QCameraParameters::KEY_CAMERA_MODE);
+ mParameters.set(QCameraParameters::KEY_CAMERA_MODE,value);
+
+ ALOGI("ZSL is enabled %d", value);
+ if( value != mZslEnable) {
+ mFrameThreadWaitLock.lock();
+ while (mFrameThreadRunning) {
+ ALOGI("initPreview: waiting for old frame thread to complete.");
+ mFrameThreadWait.wait(mFrameThreadWaitLock);
+ ALOGI("initPreview: old frame thread completed.");
+ }
+ mFrameThreadWaitLock.unlock();
+ }
+ if(value == 1) {
+ mZslEnable = true;
+ /* mParameters.set(QCameraParameters::KEY_SUPPORTED_FOCUS_MODES,
+ QCameraParameters::FOCUS_MODE_INFINITY);
+ mParameters.set(QCameraParameters::KEY_FOCUS_MODE,
+ QCameraParameters::FOCUS_MODE_INFINITY);*/
+ }else{
+ mZslEnable = false;
+ /*mParameters.set(QCameraParameters::KEY_SUPPORTED_FOCUS_MODES,
+ focus_mode_values);
+ mParameters.set(QCameraParameters::KEY_FOCUS_MODE,
+ QCameraParameters::FOCUS_MODE_AUTO);*/
+ }
+ return NO_ERROR;
+}
+
+status_t QualcommCameraHardware::setPreviewSize(const QCameraParameters& params)
+{
+ int width, height;
+ params.getPreviewSize(&width, &height);
+ ALOGV("requested preview size %d x %d", width, height);
+
+ // Validate the preview size
+ for (size_t i = 0; i < PREVIEW_SIZE_COUNT; ++i) {
+ if (width == preview_sizes[i].width
+ && height == preview_sizes[i].height) {
+ mParameters.setPreviewSize(width, height);
+ //previewWidth = width;
+ //previewHeight = height;
+ mDimension.display_width = width;
+ mDimension.display_height= height;
+ return NO_ERROR;
+ }
+ }
+ ALOGE("Invalid preview size requested: %dx%d", width, height);
+ return BAD_VALUE;
+}
+status_t QualcommCameraHardware::setPreviewFpsRange(const QCameraParameters& params)
+{
+ int minFps,maxFps;
+ params.getPreviewFpsRange(&minFps,&maxFps);
+ ALOGI("FPS Range Values: %dx%d", minFps, maxFps);
+
+ for(size_t i=0;i<FPS_RANGES_SUPPORTED_COUNT;i++)
+ {
+ if(minFps==FpsRangesSupported[i].minFPS && maxFps == FpsRangesSupported[i].maxFPS){
+ mParameters.setPreviewFpsRange(minFps,maxFps);
+ return NO_ERROR;
+ }
+ }
+ return BAD_VALUE;
+}
+
+status_t QualcommCameraHardware::setPreviewFrameRate(const QCameraParameters& params)
+{
+ if( !mCfgControl.mm_camera_is_supported(CAMERA_PARM_FPS)){
+ ALOGI("Set fps is not supported for this sensor");
+ return NO_ERROR;
+ }
+ uint16_t previousFps = (uint16_t)mParameters.getPreviewFrameRate();
+ uint16_t fps = (uint16_t)params.getPreviewFrameRate();
+ ALOGV("requested preview frame rate is %u", fps);
+
+ if(mInitialized && (fps == previousFps)){
+ ALOGV("fps same as previous fps");
+ return NO_ERROR;
+ }
+
+ if(MINIMUM_FPS <= fps && fps <=MAXIMUM_FPS){
+ mParameters.setPreviewFrameRate(fps);
+ bool ret = native_set_parms(CAMERA_PARM_FPS,
+ sizeof(fps), (void *)&fps);
+ return ret ? NO_ERROR : UNKNOWN_ERROR;
+ }
+ return BAD_VALUE;
+}
+
+status_t QualcommCameraHardware::setPreviewFrameRateMode(const QCameraParameters& params) {
+ if( !mCfgControl.mm_camera_is_supported(CAMERA_PARM_FPS_MODE) && !mCfgControl.mm_camera_is_supported(CAMERA_PARM_FPS)){
+ ALOGI("set fps mode is not supported for this sensor");
+ return NO_ERROR;
+ }
+
+ const char *previousMode = mParameters.getPreviewFrameRateMode();
+ const char *str = params.getPreviewFrameRateMode();
+ if( mInitialized && !strcmp(previousMode, str)) {
+ ALOGV("frame rate mode same as previous mode %s", previousMode);
+ return NO_ERROR;
+ }
+ int32_t frameRateMode = attr_lookup(frame_rate_modes, sizeof(frame_rate_modes) / sizeof(str_map),str);
+ if(frameRateMode != NOT_FOUND) {
+ ALOGV("setPreviewFrameRateMode: %s ", str);
+ mParameters.setPreviewFrameRateMode(str);
+ bool ret = native_set_parms(CAMERA_PARM_FPS_MODE, sizeof(frameRateMode), (void *)&frameRateMode);
+ if(!ret) return ret;
+ //set the fps value when chaging modes
+ int16_t fps = (uint16_t)params.getPreviewFrameRate();
+ if(MINIMUM_FPS <= fps && fps <=MAXIMUM_FPS){
+ mParameters.setPreviewFrameRate(fps);
+ ret = native_set_parms(CAMERA_PARM_FPS,
+ sizeof(fps), (void *)&fps);
+ return ret ? NO_ERROR : UNKNOWN_ERROR;
+ }
+ ALOGE("Invalid preview frame rate value: %d", fps);
+ return BAD_VALUE;
+ }
+ ALOGE("Invalid preview frame rate mode value: %s", (str == NULL) ? "NULL" : str);
+ return BAD_VALUE;
+}
+
+status_t QualcommCameraHardware::setJpegThumbnailSize(const QCameraParameters& params){
+ int width = params.getInt(QCameraParameters::KEY_JPEG_THUMBNAIL_WIDTH);
+ int height = params.getInt(QCameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT);
+ ALOGV("requested jpeg thumbnail size %d x %d", width, height);
+
+ // Validate the picture size
+ for (unsigned int i = 0; i < JPEG_THUMBNAIL_SIZE_COUNT; ++i) {
+ if (width == jpeg_thumbnail_sizes[i].width
+ && height == jpeg_thumbnail_sizes[i].height) {
+ mParameters.set(QCameraParameters::KEY_JPEG_THUMBNAIL_WIDTH, width);
+ mParameters.set(QCameraParameters::KEY_JPEG_THUMBNAIL_HEIGHT, height);
+ return NO_ERROR;
+ }
+ }
+ return BAD_VALUE;
+}
+
+bool QualcommCameraHardware::updatePictureDimension(const QCameraParameters& params, int& width, int& height)
+{
+ bool retval = false;
+ int previewWidth, previewHeight;
+ params.getPreviewSize(&previewWidth, &previewHeight);
+ ALOGV("updatePictureDimension: %dx%d <- %dx%d", width, height,
+ previewWidth, previewHeight);
+ if ((width < previewWidth) && (height < previewHeight)) {
+ /*As we donot support jpeg downscaling for picture dimension < previewdimesnion/8 ,
+ Adding support for the same for cts testcases*/
+ mActualPictWidth = width;
+ mActualPictHeight = height;
+ if((previewWidth /8) > width ) {
+ int ratio = previewWidth/width;
+ int i;
+ for(i =0 ; i < ratio ; i++) {
+ if((ratio >> i) < 8)
+ break;
+ }
+ width = width *i*2;
+ height = height *i*2;
+ }
+ else {
+ width = previewWidth;
+ height = previewHeight;
+ }
+ mUseJpegDownScaling = true;
+ retval = true;
+ } else
+ mUseJpegDownScaling = false;
+ return retval;
+}
+
+status_t QualcommCameraHardware::setPictureSize(const QCameraParameters& params)
+{
+ int width, height;
+ params.getPictureSize(&width, &height);
+ ALOGV("requested picture size %d x %d", width, height);
+
+ // Validate the picture size
+ for (int i = 0; i < supportedPictureSizesCount; ++i) {
+ if (width == picture_sizes_ptr[i].width
+ && height == picture_sizes_ptr[i].height) {
+ mParameters.setPictureSize(width, height);
+ mDimension.picture_width = width;
+ mDimension.picture_height = height;
+ return NO_ERROR;
+ }
+ }
+ /* Dimension not among the ones in the list. Check if
+ * its a valid dimension, if it is, then configure the
+ * camera accordingly. else reject it.
+ */
+ if( isValidDimension(width, height) ) {
+ mParameters.setPictureSize(width, height);
+ mDimension.picture_width = width;
+ mDimension.picture_height = height;
+ return NO_ERROR;
+ } else
+ ALOGE("Invalid picture size requested: %dx%d", width, height);
+ return BAD_VALUE;
+}
+
+status_t QualcommCameraHardware::setJpegQuality(const QCameraParameters& params) {
+ status_t rc = NO_ERROR;
+ int quality = params.getInt(QCameraParameters::KEY_JPEG_QUALITY);
+ if (quality >= 0 && quality <= 100) {
+ mParameters.set(QCameraParameters::KEY_JPEG_QUALITY, quality);
+ } else {
+ ALOGE("Invalid jpeg quality=%d", quality);
+ rc = BAD_VALUE;
+ }
+
+ quality = params.getInt(QCameraParameters::KEY_JPEG_THUMBNAIL_QUALITY);
+ if (quality >= 0 && quality <= 100) {
+ mParameters.set(QCameraParameters::KEY_JPEG_THUMBNAIL_QUALITY, quality);
+ } else {
+ ALOGE("Invalid jpeg thumbnail quality=%d", quality);
+ rc = BAD_VALUE;
+ }
+ return rc;
+}
+
+status_t QualcommCameraHardware::setEffect(const QCameraParameters& params)
+{
+ const char *str = params.get(QCameraParameters::KEY_EFFECT);
+ int result;
+
+ if (str != NULL) {
+ int32_t value = attr_lookup(effects, sizeof(effects) / sizeof(str_map), str);
+ if (value != NOT_FOUND) {
+ if( !mCfgControl.mm_camera_is_parm_supported(CAMERA_PARM_EFFECT, (void *) &value)){
+ ALOGI("Camera Effect - %s mode is not supported for this sensor",str);
+ return NO_ERROR;
+ }else {
+ mParameters.set(QCameraParameters::KEY_EFFECT, str);
+ bool ret = native_set_parms(CAMERA_PARM_EFFECT, sizeof(value),
+ (void *)&value,(int *)&result);
+ if(result == MM_CAMERA_ERR_INVALID_OPERATION) {
+ ALOGI("Camera Effect: %s is not set as the selected value is not supported ", str);
+ }
+ return ret ? NO_ERROR : UNKNOWN_ERROR;
+ }
+ }
+ }
+ ALOGE("Invalid effect value: %s", (str == NULL) ? "NULL" : str);
+ return BAD_VALUE;
+}
+
+status_t QualcommCameraHardware::setRecordingHint(const QCameraParameters& params)
+{
+
+ const char * str = params.get(QCameraParameters::KEY_RECORDING_HINT);
+
+ if(str != NULL){
+ int32_t value = attr_lookup(recording_Hints,
+ sizeof(recording_Hints) / sizeof(str_map), str);
+ if(value != NOT_FOUND){
+
+ native_set_parms(CAMERA_PARM_RECORDING_HINT, sizeof(value),
+ (void *)&value);
+ /*native_set_parms(CAMERA_PARM_CAF_ENABLE, sizeof(value),
+ (void *)&value);*/
+ mParameters.set(QCameraParameters::KEY_RECORDING_HINT, str);
+ } else {
+ ALOGE("Invalid Picture Format value: %s", str);
+ return BAD_VALUE;
+ }
+ }
+ return NO_ERROR;
+}
+
+status_t QualcommCameraHardware::setExposureCompensation(
+ const QCameraParameters & params){
+ ALOGV("DEBBUG: %s E",__FUNCTION__);
+ if(!mCfgControl.mm_camera_is_supported(CAMERA_PARM_EXPOSURE_COMPENSATION)) {
+ ALOGI("Exposure Compensation is not supported for this sensor");
+ return NO_ERROR;
+ }
+
+ int numerator = params.getInt(QCameraParameters::KEY_EXPOSURE_COMPENSATION);
+ if(EXPOSURE_COMPENSATION_MINIMUM_NUMERATOR <= numerator &&
+ numerator <= EXPOSURE_COMPENSATION_MAXIMUM_NUMERATOR){
+ int16_t numerator16 = (int16_t)(numerator & 0x0000ffff);
+ uint16_t denominator16 = EXPOSURE_COMPENSATION_DENOMINATOR;
+ uint32_t value = 0;
+ value = numerator16 << 16 | denominator16;
+
+ mParameters.set(QCameraParameters::KEY_EXPOSURE_COMPENSATION,
+ numerator);
+ bool ret = native_set_parms(CAMERA_PARM_EXPOSURE_COMPENSATION,
+ sizeof(value), (void *)&value);
+ ALOGI("DEBBUG: %s ret = %d X",__FUNCTION__, ret);
+ return ret ? NO_ERROR : UNKNOWN_ERROR;
+ }
+ ALOGE("Invalid Exposure Compensation");
+ return BAD_VALUE;
+}
+
+status_t QualcommCameraHardware::setAutoExposure(const QCameraParameters& params)
+{
+ if(!mCfgControl.mm_camera_is_supported(CAMERA_PARM_EXPOSURE)) {
+ ALOGI("Auto Exposure not supported for this sensor");
+ return NO_ERROR;
+ }
+ const char *str = params.get(QCameraParameters::KEY_AUTO_EXPOSURE);
+ if (str != NULL) {
+ int32_t value = attr_lookup(autoexposure, sizeof(autoexposure) / sizeof(str_map), str);
+ if (value != NOT_FOUND) {
+ mParameters.set(QCameraParameters::KEY_AUTO_EXPOSURE, str);
+ bool ret = native_set_parms(CAMERA_PARM_EXPOSURE, sizeof(value),
+ (void *)&value);
+ return ret ? NO_ERROR : UNKNOWN_ERROR;
+ }
+ }
+ ALOGE("Invalid auto exposure value: %s", (str == NULL) ? "NULL" : str);
+ return BAD_VALUE;
+}
+
+status_t QualcommCameraHardware::setSharpness(const QCameraParameters& params)
+{
+ if(!mCfgControl.mm_camera_is_supported(CAMERA_PARM_SHARPNESS)) {
+ ALOGI("Sharpness not supported for this sensor");
+ return NO_ERROR;
+ }
+ int sharpness = params.getInt(QCameraParameters::KEY_SHARPNESS);
+ if((sharpness < CAMERA_MIN_SHARPNESS
+ || sharpness > CAMERA_MAX_SHARPNESS))
+ return UNKNOWN_ERROR;
+
+ ALOGV("setting sharpness %d", sharpness);
+ mParameters.set(QCameraParameters::KEY_SHARPNESS, sharpness);
+ bool ret = native_set_parms(CAMERA_PARM_SHARPNESS, sizeof(sharpness),
+ (void *)&sharpness);
+ return ret ? NO_ERROR : UNKNOWN_ERROR;
+}
+
+status_t QualcommCameraHardware::setContrast(const QCameraParameters& params)
+{
+ if(!mCfgControl.mm_camera_is_supported(CAMERA_PARM_CONTRAST)) {
+ ALOGI("Contrast not supported for this sensor");
+ return NO_ERROR;
+ }
+
+ const char *str = params.get(QCameraParameters::KEY_SCENE_MODE);
+ int32_t value = attr_lookup(scenemode, sizeof(scenemode) / sizeof(str_map), str);
+
+ if(value == CAMERA_BESTSHOT_OFF) {
+ int contrast = params.getInt(QCameraParameters::KEY_CONTRAST);
+ if((contrast < CAMERA_MIN_CONTRAST)
+ || (contrast > CAMERA_MAX_CONTRAST))
+ return UNKNOWN_ERROR;
+
+ ALOGV("setting contrast %d", contrast);
+ mParameters.set(QCameraParameters::KEY_CONTRAST, contrast);
+ bool ret = native_set_parms(CAMERA_PARM_CONTRAST, sizeof(contrast),
+ (void *)&contrast);
+ return ret ? NO_ERROR : UNKNOWN_ERROR;
+ } else {
+ ALOGI(" Contrast value will not be set " \
+ "when the scenemode selected is %s", str);
+ return NO_ERROR;
+ }
+}
+
+status_t QualcommCameraHardware::setSaturation(const QCameraParameters& params)
+{
+ if(!mCfgControl.mm_camera_is_supported(CAMERA_PARM_SATURATION)) {
+ ALOGI("Saturation not supported for this sensor");
+ return NO_ERROR;
+ }
+ int result;
+ int saturation = params.getInt(QCameraParameters::KEY_SATURATION);
+
+ if((saturation < CAMERA_MIN_SATURATION)
+ || (saturation > CAMERA_MAX_SATURATION))
+ return UNKNOWN_ERROR;
+
+ ALOGV("Setting saturation %d", saturation);
+ mParameters.set(QCameraParameters::KEY_SATURATION, saturation);
+ bool ret = native_set_parms(CAMERA_PARM_SATURATION, sizeof(saturation),
+ (void *)&saturation, (int *)&result);
+ if(result == MM_CAMERA_ERR_INVALID_OPERATION)
+ ALOGI("Saturation Value: %d is not set as the selected value is not supported", saturation);
+
+ return ret ? NO_ERROR : UNKNOWN_ERROR;
+}
+
+status_t QualcommCameraHardware::setPreviewFormat(const QCameraParameters& params) {
+ const char *str = params.getPreviewFormat();
+ int32_t previewFormat = attr_lookup(preview_formats, sizeof(preview_formats) / sizeof(str_map), str);
+ if(previewFormat != NOT_FOUND) {
+ mParameters.set(QCameraParameters::KEY_PREVIEW_FORMAT, str);
+ mPreviewFormat = previewFormat;
+ if(HAL_currentCameraMode != CAMERA_MODE_3D) {
+ ALOGI("Setting preview format to native");
+ bool ret = native_set_parms(CAMERA_PARM_PREVIEW_FORMAT, sizeof(previewFormat),
+ (void *)&previewFormat);
+ }else{
+ ALOGI("Skipping set preview format call to native");
+ }
+ return NO_ERROR;
+ }
+ ALOGE("Invalid preview format value: %s", (str == NULL) ? "NULL" : str);
+ return BAD_VALUE;
+}
+
+status_t QualcommCameraHardware::setStrTextures(const QCameraParameters& params) {
+ const char *str = params.get("strtextures");
+ if(str != NULL) {
+ ALOGV("strtextures = %s", str);
+ mParameters.set("strtextures", str);
+ if(!strncmp(str, "on", 2) || !strncmp(str, "ON", 2)) {
+ strTexturesOn = true;
+ } else if (!strncmp(str, "off", 3) || !strncmp(str, "OFF", 3)) {
+ strTexturesOn = false;
+ }
+ }
+ return NO_ERROR;
+}
+
+status_t QualcommCameraHardware::setBrightness(const QCameraParameters& params) {
+ if(!mCfgControl.mm_camera_is_supported(CAMERA_PARM_BRIGHTNESS)) {
+ ALOGI("Set Brightness not supported for this sensor");
+ return NO_ERROR;
+ }
+ int brightness = params.getInt("luma-adaptation");
+ if (mBrightness != brightness) {
+ ALOGV(" new brightness value : %d ", brightness);
+ mBrightness = brightness;
+ mParameters.set("luma-adaptation", brightness);
+ bool ret = native_set_parms(CAMERA_PARM_BRIGHTNESS, sizeof(mBrightness),
+ (void *)&mBrightness);
+ return ret ? NO_ERROR : UNKNOWN_ERROR;
+ }
+ return NO_ERROR;
+}
+
+status_t QualcommCameraHardware::setSkinToneEnhancement(const QCameraParameters& params) {
+ if(!mCfgControl.mm_camera_is_supported(CAMERA_PARM_SCE_FACTOR)) {
+ ALOGI("SkinToneEnhancement not supported for this sensor");
+ return NO_ERROR;
+ }
+ int skinToneValue = params.getInt("skinToneEnhancement");
+ if (mSkinToneEnhancement != skinToneValue) {
+ ALOGV(" new skinTone correction value : %d ", skinToneValue);
+ mSkinToneEnhancement = skinToneValue;
+ mParameters.set("skinToneEnhancement", skinToneValue);
+ bool ret = native_set_parms(CAMERA_PARM_SCE_FACTOR, sizeof(mSkinToneEnhancement),
+ (void *)&mSkinToneEnhancement);
+ return ret ? NO_ERROR : UNKNOWN_ERROR;
+ }
+ return NO_ERROR;
+}
+
+status_t QualcommCameraHardware::setWhiteBalance(const QCameraParameters& params)
+{
+ if(!mCfgControl.mm_camera_is_supported(CAMERA_PARM_WHITE_BALANCE)) {
+ ALOGI("WhiteBalance not supported for this sensor");
+ return NO_ERROR;
+ }
+
+ int result;
+
+ const char *str = params.get(QCameraParameters::KEY_WHITE_BALANCE);
+ if (str != NULL) {
+ int32_t value = attr_lookup(whitebalance, sizeof(whitebalance) / sizeof(str_map), str);
+ if (value != NOT_FOUND) {
+ mParameters.set(QCameraParameters::KEY_WHITE_BALANCE, str);
+ bool ret = native_set_parms(CAMERA_PARM_WHITE_BALANCE, sizeof(value),
+ (void *)&value, (int *)&result);
+ if(result == MM_CAMERA_ERR_INVALID_OPERATION) {
+ ALOGI("WhiteBalance Value: %s is not set as the selected value is not supported ", str);
+ }
+ return ret ? NO_ERROR : UNKNOWN_ERROR;
+ }
+ }
+ ALOGE("Invalid whitebalance value: %s", (str == NULL) ? "NULL" : str);
+ return BAD_VALUE;
+
+}
+
+status_t QualcommCameraHardware::setFlash(const QCameraParameters& params)
+{
+ if(!mCfgControl.mm_camera_is_supported(CAMERA_PARM_LED_MODE)) {
+ ALOGI("%s: flash not supported", __FUNCTION__);
+ return NO_ERROR;
+ }
+
+ const char *str = params.get(QCameraParameters::KEY_FLASH_MODE);
+ if (str != NULL) {
+ int32_t value = attr_lookup(flash, sizeof(flash) / sizeof(str_map), str);
+ if (value != NOT_FOUND) {
+ mParameters.set(QCameraParameters::KEY_FLASH_MODE, str);
+ bool ret = native_set_parms(CAMERA_PARM_LED_MODE,
+ sizeof(value), (void *)&value);
+ if(mZslEnable && (value != LED_MODE_OFF)){
+ mParameters.set("num-snaps-per-shutter", "1");
+ ALOGI("%s Setting num-snaps-per-shutter to 1", __FUNCTION__);
+ numCapture = 1;
+ }
+ return ret ? NO_ERROR : UNKNOWN_ERROR;
+ }
+ }
+ ALOGE("Invalid flash mode value: %s", (str == NULL) ? "NULL" : str);
+ return BAD_VALUE;
+}
+
+status_t QualcommCameraHardware::setAntibanding(const QCameraParameters& params)
+{
+ int result;
+ if(!mCfgControl.mm_camera_is_supported(CAMERA_PARM_ANTIBANDING)) {
+ ALOGI("Parameter AntiBanding is not supported for this sensor");
+ return NO_ERROR;
+ }
+ const char *str = params.get(QCameraParameters::KEY_ANTIBANDING);
+ if (str != NULL) {
+ int value = (camera_antibanding_type)attr_lookup(
+ antibanding, sizeof(antibanding) / sizeof(str_map), str);
+ if (value != NOT_FOUND) {
+ camera_antibanding_type temp = (camera_antibanding_type) value;
+ mParameters.set(QCameraParameters::KEY_ANTIBANDING, str);
+ bool ret = native_set_parms(CAMERA_PARM_ANTIBANDING,
+ sizeof(camera_antibanding_type), (void *)&temp ,(int *)&result);
+ if(result == MM_CAMERA_ERR_INVALID_OPERATION) {
+ ALOGI("AntiBanding Value: %s is not supported for the given BestShot Mode", str);
+ }
+ return ret ? NO_ERROR : UNKNOWN_ERROR;
+ }
+ }
+ ALOGE("Invalid antibanding value: %s", (str == NULL) ? "NULL" : str);
+ return BAD_VALUE;
+}
+
+status_t QualcommCameraHardware::setMCEValue(const QCameraParameters& params)
+{
+ if(!mCfgControl.mm_camera_is_supported(CAMERA_PARM_MCE)) {
+ ALOGI("Parameter MCE is not supported for this sensor");
+ return NO_ERROR;
+ }
+
+ const char *str = params.get(QCameraParameters::KEY_MEMORY_COLOR_ENHANCEMENT);
+ if (str != NULL) {
+ int value = attr_lookup(mce, sizeof(mce) / sizeof(str_map), str);
+ if (value != NOT_FOUND) {
+ int8_t temp = (int8_t)value;
+ ALOGI("%s: setting MCE value of %s", __FUNCTION__, str);
+ mParameters.set(QCameraParameters::KEY_MEMORY_COLOR_ENHANCEMENT, str);
+
+ native_set_parms(CAMERA_PARM_MCE, sizeof(int8_t), (void *)&temp);
+ return NO_ERROR;
+ }
+ }
+ ALOGE("Invalid MCE value: %s", (str == NULL) ? "NULL" : str);
+ return BAD_VALUE;
+}
+
+status_t QualcommCameraHardware::setHighFrameRate(const QCameraParameters& params)
+{
+ if((!mCfgControl.mm_camera_is_supported(CAMERA_PARM_HFR)) || (mIs3DModeOn)) {
+ ALOGI("Parameter HFR is not supported for this sensor");
+ return NO_ERROR;
+ }
+
+ const char *str = params.get(QCameraParameters::KEY_VIDEO_HIGH_FRAME_RATE);
+ if (str != NULL) {
+ int value = attr_lookup(hfr, sizeof(hfr) / sizeof(str_map), str);
+ if (value != NOT_FOUND) {
+ int32_t temp = (int32_t)value;
+ ALOGI("%s: setting HFR value of %s(%d)", __FUNCTION__, str, temp);
+ //Check for change in HFR value
+ const char *oldHfr = mParameters.get(QCameraParameters::KEY_VIDEO_HIGH_FRAME_RATE);
+ if(strcmp(oldHfr, str)){
+ ALOGI("%s: old HFR: %s, new HFR %s", __FUNCTION__, oldHfr, str);
+ mParameters.set(QCameraParameters::KEY_VIDEO_HIGH_FRAME_RATE, str);
+ mHFRMode = true;
+ if(mCameraRunning == true) {
+ mHFRThreadWaitLock.lock();
+ pthread_attr_t pattr;
+ pthread_attr_init(&pattr);
+ pthread_attr_setdetachstate(&pattr, PTHREAD_CREATE_DETACHED);
+ mHFRThreadRunning = !pthread_create(&mHFRThread,
+ &pattr,
+ hfr_thread,
+ (void*)NULL);
+ mHFRThreadWaitLock.unlock();
+ return NO_ERROR;
+ }
+ }
+ native_set_parms(CAMERA_PARM_HFR, sizeof(int32_t), (void *)&temp);
+ return NO_ERROR;
+ }
+ }
+ ALOGE("Invalid HFR value: %s", (str == NULL) ? "NULL" : str);
+ return BAD_VALUE;
+}
+
+status_t QualcommCameraHardware::setHDRImaging(const QCameraParameters& params)
+{
+ if(!mCfgControl.mm_camera_is_supported(CAMERA_PARM_HDR) && mZslEnable) {
+ ALOGI("Parameter HDR is not supported for this sensor/ ZSL mode");
+ return NO_ERROR;
+ }
+ const char *str = params.get(QCameraParameters::KEY_HIGH_DYNAMIC_RANGE_IMAGING);
+ if (str != NULL) {
+ int value = attr_lookup(hdr, sizeof(hdr) / sizeof(str_map), str);
+ if (value != NOT_FOUND) {
+ exp_bracketing_t temp;
+ memset(&temp, 0, sizeof(temp));
+ temp.hdr_enable= (int32_t)value;
+ temp.mode = HDR_MODE;
+ temp.total_frames = 3;
+ temp.total_hal_frames = HDR_HAL_FRAME;
+ mHdrMode = temp.hdr_enable;
+ ALOGI("%s: setting HDR value of %s", __FUNCTION__, str);
+ mParameters.set(QCameraParameters::KEY_HIGH_DYNAMIC_RANGE_IMAGING, str);
+ if(mHdrMode){
+ numCapture = temp.total_hal_frames;
+ } else
+ numCapture = 1;
+ native_set_parms(CAMERA_PARM_HDR, sizeof(exp_bracketing_t), (void *)&temp);
+ return NO_ERROR;
+ }
+ }
+ ALOGE("Invalid HDR value: %s", (str == NULL) ? "NULL" : str);
+ return BAD_VALUE;
+}
+
+status_t QualcommCameraHardware::setExpBracketing(const QCameraParameters& params)
+{
+ if(!mCfgControl.mm_camera_is_supported(CAMERA_PARM_HDR) && mZslEnable) {
+ ALOGI("Parameter Exposure Bracketing is not supported for this sensor/ZSL mode");
+ return NO_ERROR;
+ }
+ const char *str = params.get("capture-burst-exposures");
+ if ((str != NULL) && (!mHdrMode)) {
+ char exp_val[MAX_EXP_BRACKETING_LENGTH];
+ exp_bracketing_t temp;
+ memset(&temp, 0, sizeof(temp));
+
+ mExpBracketMode = true;
+ temp.mode = EXP_BRACKETING_MODE;
+ temp.hdr_enable = true;
+ /* App sets values separated by comma.
+ Thus total number of snapshot to capture is strlen(str)/2
+ eg: "-1,1,2" */
+ strlcpy(exp_val, str, sizeof(exp_val));
+ temp.total_frames = (strlen(exp_val) > MAX_SNAPSHOT_BUFFERS -2) ?
+ MAX_SNAPSHOT_BUFFERS -2 : strlen(exp_val);
+ temp.total_hal_frames = temp.total_frames;
+ strlcpy(temp.values, exp_val, MAX_EXP_BRACKETING_LENGTH);
+ ALOGI("%s: setting Exposure Bracketing value of %s", __FUNCTION__, temp.values);
+ mParameters.set("capture-burst-exposures", str);
+ if(!mZslEnable){
+ numCapture = temp.total_frames;
+ }
+ native_set_parms(CAMERA_PARM_HDR, sizeof(exp_bracketing_t), (void *)&temp);
+ return NO_ERROR;
+ } else
+ mExpBracketMode = false;
+ return NO_ERROR;
+}
+
+status_t QualcommCameraHardware::setLensshadeValue(const QCameraParameters& params)
+{
+ if(!mCfgControl.mm_camera_is_supported(CAMERA_PARM_ROLLOFF)) {
+ ALOGI("Parameter Rolloff is not supported for this sensor");
+ return NO_ERROR;
+ }
+
+ const char *str = params.get(QCameraParameters::KEY_LENSSHADE);
+ if (str != NULL) {
+ int value = attr_lookup(lensshade,
+ sizeof(lensshade) / sizeof(str_map), str);
+ if (value != NOT_FOUND) {
+ int8_t temp = (int8_t)value;
+ mParameters.set(QCameraParameters::KEY_LENSSHADE, str);
+
+ native_set_parms(CAMERA_PARM_ROLLOFF, sizeof(int8_t), (void *)&temp);
+ return NO_ERROR;
+ }
+ }
+ ALOGE("Invalid lensShade value: %s", (str == NULL) ? "NULL" : str);
+ return NO_ERROR;
+}
+
+status_t QualcommCameraHardware::setSelectableZoneAf(const QCameraParameters& params)
+{
+ if(mHasAutoFocusSupport && supportsSelectableZoneAf()) {
+ const char *str = params.get(QCameraParameters::KEY_SELECTABLE_ZONE_AF);
+ if (str != NULL) {
+ int32_t value = attr_lookup(selectable_zone_af, sizeof(selectable_zone_af) / sizeof(str_map), str);
+ if (value != NOT_FOUND) {
+ mParameters.set(QCameraParameters::KEY_SELECTABLE_ZONE_AF, str);
+ bool ret = native_set_parms(CAMERA_PARM_FOCUS_RECT, sizeof(value),
+ (void *)&value);
+ return ret ? NO_ERROR : UNKNOWN_ERROR;
+ }
+ }
+ ALOGE("Invalid selectable zone af value: %s", (str == NULL) ? "NULL" : str);
+ return BAD_VALUE;
+ }
+ return NO_ERROR;
+}
+
+status_t QualcommCameraHardware::setTouchAfAec(const QCameraParameters& params)
+{
+ ALOGV("%s",__func__);
+ if(mHasAutoFocusSupport){
+ int xAec, yAec, xAf, yAf;
+ int cx, cy;
+ int width, height;
+ params.getMeteringAreaCenter(&cx, &cy);
+ mParameters.getPreviewSize(&width, &height);
+
+ // @Punit
+ // The coords sent from upper layer is in range (-1000, -1000) to (1000, 1000)
+ // So, they are transformed to range (0, 0) to (previewWidth, previewHeight)
+ cx = cx + 1000;
+ cy = cy + 1000;
+ cx = cx * (width / 2000.0f);
+ cy = cy * (height / 2000.0f);
+
+ //Negative values are invalid and does not update anything
+ ALOGV("Touch Area Center (cx, cy) = (%d, %d)", cx, cy);
+
+ //Currently using same values for AF and AEC
+ xAec = cx; yAec = cy;
+ xAf = cx; yAf = cy;
+
+ const char *str = params.get(QCameraParameters::KEY_TOUCH_AF_AEC);
+ if (str != NULL) {
+ int value = attr_lookup(touchafaec,
+ sizeof(touchafaec) / sizeof(str_map), str);
+ if (value != NOT_FOUND) {
+
+ //Dx,Dy will be same as defined in res/layout/camera.xml
+ //passed down to HAL in a key.value pair.
+ int FOCUS_RECTANGLE_DX = params.getInt("touchAfAec-dx");
+ int FOCUS_RECTANGLE_DY = params.getInt("touchAfAec-dy");
+ mParameters.set(QCameraParameters::KEY_TOUCH_AF_AEC, str);
+ mParameters.setTouchIndexAec(xAec, yAec);
+ mParameters.setTouchIndexAf(xAf, yAf);
+
+ cam_set_aec_roi_t aec_roi_value;
+ roi_info_t af_roi_value;
+
+ memset(&af_roi_value, 0, sizeof(roi_info_t));
+
+ //If touch AF/AEC is enabled and touch event has occured then
+ //call the ioctl with valid values.
+ if (value == true
+ && (xAec >= 0 && yAec >= 0)
+ && (xAf >= 0 && yAf >= 0)) {
+ //Set Touch AEC params (Pass the center co-ordinate)
+ aec_roi_value.aec_roi_enable = AEC_ROI_ON;
+ aec_roi_value.aec_roi_type = AEC_ROI_BY_COORDINATE;
+ aec_roi_value.aec_roi_position.coordinate.x = xAec;
+ aec_roi_value.aec_roi_position.coordinate.y = yAec;
+
+ //Set Touch AF params (Pass the top left co-ordinate)
+ af_roi_value.num_roi = 1;
+ if ((xAf-(FOCUS_RECTANGLE_DX/2)) < 0)
+ af_roi_value.roi[0].x = 1;
+ else
+ af_roi_value.roi[0].x = xAf - (FOCUS_RECTANGLE_DX/2);
+
+ if ((yAf-(FOCUS_RECTANGLE_DY/2)) < 0)
+ af_roi_value.roi[0].y = 1;
+ else
+ af_roi_value.roi[0].y = yAf - (FOCUS_RECTANGLE_DY/2);
+
+ af_roi_value.roi[0].dx = FOCUS_RECTANGLE_DX;
+ af_roi_value.roi[0].dy = FOCUS_RECTANGLE_DY;
+ af_roi_value.is_multiwindow = mMultiTouch;
+ native_set_parms(CAMERA_PARM_AEC_ROI, sizeof(cam_set_aec_roi_t), (void *)&aec_roi_value);
+ native_set_parms(CAMERA_PARM_AF_ROI, sizeof(roi_info_t), (void*)&af_roi_value);
+ }
+ else if(value == false) {
+ //Set Touch AEC params
+ aec_roi_value.aec_roi_enable = AEC_ROI_OFF;
+ aec_roi_value.aec_roi_type = AEC_ROI_BY_COORDINATE;
+ aec_roi_value.aec_roi_position.coordinate.x = DONT_CARE_COORDINATE;
+ aec_roi_value.aec_roi_position.coordinate.y = DONT_CARE_COORDINATE;
+
+ //Set Touch AF params
+ af_roi_value.num_roi = 0;
+ native_set_parms(CAMERA_PARM_AEC_ROI, sizeof(cam_set_aec_roi_t), (void *)&aec_roi_value);
+ native_set_parms(CAMERA_PARM_AF_ROI, sizeof(roi_info_t), (void*)&af_roi_value);
+ }
+ //@Punit: If the values are negative, we dont send anything to the lower layer
+ }
+ return NO_ERROR;
+ }
+ ALOGE("Invalid Touch AF/AEC value: %s", (str == NULL) ? "NULL" : str);
+ return BAD_VALUE;
+ }
+ return NO_ERROR;
+}
+
+status_t QualcommCameraHardware::setFaceDetection(const char *str)
+{
+ if(supportsFaceDetection() == false){
+ ALOGI("Face detection is not enabled");
+ return NO_ERROR;
+ }
+ if (str != NULL) {
+ int value = attr_lookup(facedetection,
+ sizeof(facedetection) / sizeof(str_map), str);
+ if (value != NOT_FOUND) {
+ mMetaDataWaitLock.lock();
+ mFaceDetectOn = value;
+ mMetaDataWaitLock.unlock();
+ mParameters.set(QCameraParameters::KEY_FACE_DETECTION, str);
+ return NO_ERROR;
+ }
+ }
+ ALOGE("Invalid Face Detection value: %s", (str == NULL) ? "NULL" : str);
+ return BAD_VALUE;
+}
+
+status_t QualcommCameraHardware::setRedeyeReduction(const QCameraParameters& params)
+{
+ if(!mCfgControl.mm_camera_is_supported(CAMERA_PARM_REDEYE_REDUCTION)) {
+ ALOGI("Parameter Redeye Reduction is not supported for this sensor");
+ return NO_ERROR;
+ }
+
+ const char *str = params.get(QCameraParameters::KEY_REDEYE_REDUCTION);
+ if (str != NULL) {
+ int value = attr_lookup(redeye_reduction, sizeof(redeye_reduction) / sizeof(str_map), str);
+ if (value != NOT_FOUND) {
+ int8_t temp = (int8_t)value;
+ ALOGI("%s: setting Redeye Reduction value of %s", __FUNCTION__, str);
+ mParameters.set(QCameraParameters::KEY_REDEYE_REDUCTION, str);
+
+ native_set_parms(CAMERA_PARM_REDEYE_REDUCTION, sizeof(int8_t), (void *)&temp);
+ return NO_ERROR;
+ }
+ }
+ ALOGE("Invalid Redeye Reduction value: %s", (str == NULL) ? "NULL" : str);
+ return BAD_VALUE;
+}
+
+status_t QualcommCameraHardware::setISOValue(const QCameraParameters& params) {
+ int8_t temp_hjr;
+ if(!mCfgControl.mm_camera_is_supported(CAMERA_PARM_ISO)) {
+ ALOGI("Parameter ISO Value is not supported for this sensor");
+ return NO_ERROR;
+ }
+ const char *str = params.get(QCameraParameters::KEY_ISO_MODE);
+ if (str != NULL) {
+ int value = (camera_iso_mode_type)attr_lookup(
+ iso, sizeof(iso) / sizeof(str_map), str);
+ if (value != NOT_FOUND) {
+ camera_iso_mode_type temp = (camera_iso_mode_type) value;
+ if (value == CAMERA_ISO_DEBLUR) {
+ temp_hjr = true;
+ native_set_parms(CAMERA_PARM_HJR, sizeof(int8_t), (void*)&temp_hjr);
+ mHJR = value;
+ }
+ else {
+ if (mHJR == CAMERA_ISO_DEBLUR) {
+ temp_hjr = false;
+ native_set_parms(CAMERA_PARM_HJR, sizeof(int8_t), (void*)&temp_hjr);
+ mHJR = value;
+ }
+ }
+
+ mParameters.set(QCameraParameters::KEY_ISO_MODE, str);
+ native_set_parms(CAMERA_PARM_ISO, sizeof(camera_iso_mode_type), (void *)&temp);
+ return NO_ERROR;
+ }
+ }
+ ALOGE("Invalid Iso value: %s", (str == NULL) ? "NULL" : str);
+ return BAD_VALUE;
+}
+
+status_t QualcommCameraHardware::setSceneDetect(const QCameraParameters& params)
+{
+ bool retParm1, retParm2;
+ if (supportsSceneDetection()) {
+ if(!mCfgControl.mm_camera_is_supported(CAMERA_PARM_BL_DETECTION) && !mCfgControl.mm_camera_is_supported(CAMERA_PARM_SNOW_DETECTION)) {
+ ALOGI("Parameter Auto Scene Detection is not supported for this sensor");
+ return NO_ERROR;
+ }
+ const char *str = params.get(QCameraParameters::KEY_SCENE_DETECT);
+ if (str != NULL) {
+ int32_t value = attr_lookup(scenedetect, sizeof(scenedetect) / sizeof(str_map), str);
+ if (value != NOT_FOUND) {
+ mParameters.set(QCameraParameters::KEY_SCENE_DETECT, str);
+
+ retParm1 = native_set_parms(CAMERA_PARM_BL_DETECTION, sizeof(value),
+ (void *)&value);
+
+ retParm2 = native_set_parms(CAMERA_PARM_SNOW_DETECTION, sizeof(value),
+ (void *)&value);
+
+ //All Auto Scene detection modes should be all ON or all OFF.
+ if(retParm1 == false || retParm2 == false) {
+ value = !value;
+ retParm1 = native_set_parms(CAMERA_PARM_BL_DETECTION, sizeof(value),
+ (void *)&value);
+
+ retParm2 = native_set_parms(CAMERA_PARM_SNOW_DETECTION, sizeof(value),
+ (void *)&value);
+ }
+ return (retParm1 && retParm2) ? NO_ERROR : UNKNOWN_ERROR;
+ }
+ }
+ ALOGE("Invalid auto scene detection value: %s", (str == NULL) ? "NULL" : str);
+ return BAD_VALUE;
+ }
+ return NO_ERROR;
+}
+
+status_t QualcommCameraHardware::setSceneMode(const QCameraParameters& params)
+{
+ if(!mCfgControl.mm_camera_is_supported(CAMERA_PARM_BESTSHOT_MODE)) {
+ ALOGI("Parameter Scenemode is not supported for this sensor");
+ return NO_ERROR;
+ }
+
+ const char *str = params.get(QCameraParameters::KEY_SCENE_MODE);
+
+ if (str != NULL) {
+ int32_t value = attr_lookup(scenemode, sizeof(scenemode) / sizeof(str_map), str);
+ int32_t asd_val;
+ if (value != NOT_FOUND) {
+ mParameters.set(QCameraParameters::KEY_SCENE_MODE, str);
+ bool ret = native_set_parms(CAMERA_PARM_BESTSHOT_MODE, sizeof(value),
+ (void *)&value);
+
+ if (ret == NO_ERROR) {
+ int retParm1, retParm2;
+ /*if value is auto, set ASD on, else set ASD off*/
+ if (value == CAMERA_BESTSHOT_AUTO ) {
+ asd_val = TRUE;
+ } else {
+ asd_val = FALSE;
+ }
+
+ /*note: we need to simplify this logic by using a single ctrl as in 8960*/
+ retParm1 = native_set_parms(CAMERA_PARM_BL_DETECTION, sizeof(value),
+ (void *)&asd_val);
+ retParm2 = native_set_parms(CAMERA_PARM_SNOW_DETECTION, sizeof(value),
+ (void *)&asd_val);
+ }
+ return ret ? NO_ERROR : UNKNOWN_ERROR;
+ }
+ }
+ ALOGE("Invalid scenemode value: %s", (str == NULL) ? "NULL" : str);
+ return BAD_VALUE;
+}
+status_t QualcommCameraHardware::setGpsLocation(const QCameraParameters& params)
+{
+ const char *method = params.get(QCameraParameters::KEY_GPS_PROCESSING_METHOD);
+ if (method) {
+ mParameters.set(QCameraParameters::KEY_GPS_PROCESSING_METHOD, method);
+ }else {
+ mParameters.remove(QCameraParameters::KEY_GPS_PROCESSING_METHOD);
+ }
+
+ const char *latitude = params.get(QCameraParameters::KEY_GPS_LATITUDE);
+ if (latitude) {
+ ALOGI("latitude %s",latitude);
+ mParameters.set(QCameraParameters::KEY_GPS_LATITUDE, latitude);
+ }else {
+ mParameters.remove(QCameraParameters::KEY_GPS_LATITUDE);
+ }
+
+ const char *latitudeRef = params.get(QCameraParameters::KEY_GPS_LATITUDE_REF);
+ if (latitudeRef) {
+ mParameters.set(QCameraParameters::KEY_GPS_LATITUDE_REF, latitudeRef);
+ }else {
+ mParameters.remove(QCameraParameters::KEY_GPS_LATITUDE_REF);
+ }
+
+ const char *longitude = params.get(QCameraParameters::KEY_GPS_LONGITUDE);
+ if (longitude) {
+ mParameters.set(QCameraParameters::KEY_GPS_LONGITUDE, longitude);
+ }else {
+ mParameters.remove(QCameraParameters::KEY_GPS_LONGITUDE);
+ }
+
+ const char *longitudeRef = params.get(QCameraParameters::KEY_GPS_LONGITUDE_REF);
+ if (longitudeRef) {
+ mParameters.set(QCameraParameters::KEY_GPS_LONGITUDE_REF, longitudeRef);
+ }else {
+ mParameters.remove(QCameraParameters::KEY_GPS_LONGITUDE_REF);
+ }
+
+ const char *altitudeRef = params.get(QCameraParameters::KEY_GPS_ALTITUDE_REF);
+ if (altitudeRef) {
+ mParameters.set(QCameraParameters::KEY_GPS_ALTITUDE_REF, altitudeRef);
+ }else {
+ mParameters.remove(QCameraParameters::KEY_GPS_ALTITUDE_REF);
+ }
+
+ const char *altitude = params.get(QCameraParameters::KEY_GPS_ALTITUDE);
+ if (altitude) {
+ mParameters.set(QCameraParameters::KEY_GPS_ALTITUDE, altitude);
+ }else {
+ mParameters.remove(QCameraParameters::KEY_GPS_ALTITUDE);
+ }
+
+ const char *status = params.get(QCameraParameters::KEY_GPS_STATUS);
+ if (status) {
+ mParameters.set(QCameraParameters::KEY_GPS_STATUS, status);
+ }
+
+ const char *dateTime = params.get(QCameraParameters::KEY_EXIF_DATETIME);
+ if (dateTime) {
+ mParameters.set(QCameraParameters::KEY_EXIF_DATETIME, dateTime);
+ }else {
+ mParameters.remove(QCameraParameters::KEY_EXIF_DATETIME);
+ }
+
+ const char *timestamp = params.get(QCameraParameters::KEY_GPS_TIMESTAMP);
+ if (timestamp) {
+ mParameters.set(QCameraParameters::KEY_GPS_TIMESTAMP, timestamp);
+ }else {
+ mParameters.remove(QCameraParameters::KEY_GPS_TIMESTAMP);
+ }
+
+ return NO_ERROR;
+
+}
+
+status_t QualcommCameraHardware::setRotation(const QCameraParameters& params)
+{
+ status_t rc = NO_ERROR;
+ int sensor_mount_angle = HAL_cameraInfo[HAL_currentCameraId].sensor_mount_angle;
+ int rotation = params.getInt(QCameraParameters::KEY_ROTATION);
+ if (rotation != NOT_FOUND) {
+ if (rotation == 0 || rotation == 90 || rotation == 180
+ || rotation == 270) {
+ rotation = (rotation + sensor_mount_angle)%360;
+ mParameters.set(QCameraParameters::KEY_ROTATION, rotation);
+ mRotation = rotation;
+ } else {
+ ALOGE("Invalid rotation value: %d", rotation);
+ rc = BAD_VALUE;
+ }
+ }
+ return rc;
+}
+
+status_t QualcommCameraHardware::setZoom(const QCameraParameters& params)
+{
+ if(!mCfgControl.mm_camera_is_supported(CAMERA_PARM_ZOOM)) {
+ ALOGI("Parameter setZoom is not supported for this sensor");
+ return NO_ERROR;
+ }
+ status_t rc = NO_ERROR;
+ // No matter how many different zoom values the driver can provide, HAL
+ // provides applictations the same number of zoom levels. The maximum driver
+ // zoom value depends on sensor output (VFE input) and preview size (VFE
+ // output) because VFE can only crop and cannot upscale. If the preview size
+ // is bigger, the maximum zoom ratio is smaller. However, we want the
+ // zoom ratio of each zoom level is always the same whatever the preview
+ // size is. Ex: zoom level 1 is always 1.2x, zoom level 2 is 1.44x, etc. So,
+ // we need to have a fixed maximum zoom value and do read it from the
+ // driver.
+ static const int ZOOM_STEP = 1;
+ int32_t zoom_level = params.getInt("zoom");
+ if(zoom_level >= 0 && zoom_level <= mMaxZoom-1) {
+ mParameters.set("zoom", zoom_level);
+ int32_t zoom_value = ZOOM_STEP * zoom_level;
+ bool ret = native_set_parms(CAMERA_PARM_ZOOM,
+ sizeof(zoom_value), (void *)&zoom_value);
+ rc = ret ? NO_ERROR : UNKNOWN_ERROR;
+ } else {
+ rc = BAD_VALUE;
+ }
+
+ return rc;
+}
+
+status_t QualcommCameraHardware::setDenoise(const QCameraParameters& params)
+{
+ if(!mCfgControl.mm_camera_is_supported(CAMERA_PARM_WAVELET_DENOISE)) {
+ ALOGI("Wavelet Denoise is not supported for this sensor");
+ return NO_ERROR;
+ }
+ const char *str = params.get(QCameraParameters::KEY_DENOISE);
+ if (str != NULL) {
+ int value = attr_lookup(denoise,
+ sizeof(denoise) / sizeof(str_map), str);
+ if ((value != NOT_FOUND) && (mDenoiseValue != value)) {
+ mDenoiseValue = value;
+ mParameters.set(QCameraParameters::KEY_DENOISE, str);
+ bool ret = native_set_parms(CAMERA_PARM_WAVELET_DENOISE, sizeof(value),
+ (void *)&value);
+ return ret ? NO_ERROR : UNKNOWN_ERROR;
+ }
+ return NO_ERROR;
+ }
+ ALOGE("Invalid Denoise value: %s", (str == NULL) ? "NULL" : str);
+ return BAD_VALUE;
+}
+
+status_t QualcommCameraHardware::setZslParam(const QCameraParameters& params)
+{
+ if(!mZslEnable) {
+ ALOGV("Zsl is not enabled");
+ return NO_ERROR;
+ }
+ /* This ensures that restart of Preview doesnt happen when taking
+ * Snapshot for continuous viewfinder */
+ const char *str = params.get("continuous-temporal-bracketing");
+ if(str !=NULL) {
+ if(!strncmp(str, "enable", 8))
+ mZslPanorama = true;
+ else
+ mZslPanorama = false;
+ return NO_ERROR;
+ }
+ mZslPanorama = false;
+ return NO_ERROR;
+
+}
+
+status_t QualcommCameraHardware::setSnapshotCount(const QCameraParameters& params)
+{
+ int value;
+ char snapshotCount[5];
+ if(!mZslEnable){
+ value = numCapture;
+ } else {
+ /* ZSL case: Get value from App */
+ const char *str = params.get("num-snaps-per-shutter");
+ if (str != NULL) {
+ value = atoi(str);
+ } else
+ value = 1;
+ }
+ /* Sanity check */
+ if(value > MAX_SNAPSHOT_BUFFERS -2)
+ value = MAX_SNAPSHOT_BUFFERS -2;
+ else if(value < 1)
+ value = 1;
+ snprintf(snapshotCount, sizeof(snapshotCount),"%d",value);
+ numCapture = value;
+ mParameters.set("num-snaps-per-shutter", snapshotCount);
+ ALOGI("%s setting num-snaps-per-shutter to %s", __FUNCTION__, snapshotCount);
+ return NO_ERROR;
+
+}
+
+status_t QualcommCameraHardware::updateFocusDistances(const char *focusmode)
+{
+ ALOGV("%s: IN", __FUNCTION__);
+ focus_distances_info_t focusDistances;
+ if( mCfgControl.mm_camera_get_parm(CAMERA_PARM_FOCUS_DISTANCES,
+ (void *)&focusDistances) == MM_CAMERA_SUCCESS) {
+ String8 str;
+ char buffer[32];
+ snprintf(buffer, sizeof(buffer), "%f", focusDistances.focus_distance[0]);
+ str.append(buffer);
+ snprintf(buffer, sizeof(buffer), ",%f", focusDistances.focus_distance[1]);
+ str.append(buffer);
+ if(strcmp(focusmode, QCameraParameters::FOCUS_MODE_INFINITY) == 0)
+ snprintf(buffer, sizeof(buffer), ",%s", "Infinity");
+ else
+ snprintf(buffer, sizeof(buffer), ",%f", focusDistances.focus_distance[2]);
+ str.append(buffer);
+ ALOGI("%s: setting KEY_FOCUS_DISTANCES as %s", __FUNCTION__, str.string());
+ mParameters.set(QCameraParameters::KEY_FOCUS_DISTANCES, str.string());
+ return NO_ERROR;
+ }
+ ALOGE("%s: get CAMERA_PARM_FOCUS_DISTANCES failed!!!", __FUNCTION__);
+ return BAD_VALUE;
+}
+
+status_t QualcommCameraHardware::setMeteringAreas(const QCameraParameters& params)
+{
+ const char *str = params.get(QCameraParameters::KEY_METERING_AREAS);
+ if (str == NULL || (strcmp(str, "0") == 0)) {
+ ALOGE("%s: Parameter string is null", __FUNCTION__);
+ }
+ else {
+ // handling default string
+ if ((strcmp("(-2000,-2000,-2000,-2000,0)", str) == 0) ||
+ (strcmp("(0,0,0,0,0)", str) == 0)){
+ mParameters.set(QCameraParameters::KEY_METERING_AREAS, NULL);
+ return NO_ERROR;
+ }
+ if(checkAreaParameters(str) != 0) {
+ ALOGE("%s: Failed to parse the input string '%s'", __FUNCTION__, str);
+ return BAD_VALUE;
+ }
+ mParameters.set(QCameraParameters::KEY_METERING_AREAS, str);
+ }
+
+ return NO_ERROR;
+}
+
+status_t QualcommCameraHardware::setFocusAreas(const QCameraParameters& params)
+{
+ const char *str = params.get(QCameraParameters::KEY_FOCUS_AREAS);
+
+ if (str == NULL || (strcmp(str, "0") == 0)) {
+ ALOGE("%s: Parameter string is null", __FUNCTION__);
+ }
+ else {
+ // handling default string
+ if ((strcmp("(-2000,-2000,-2000,-2000,0)", str) == 0) ||
+ (strcmp("(0,0,0,0,0)", str) == 0)) {
+ mParameters.set(QCameraParameters::KEY_FOCUS_AREAS, NULL);
+ return NO_ERROR;
+ }
+
+ if(checkAreaParameters(str) != 0) {
+ ALOGE("%s: Failed to parse the input string '%s'", __FUNCTION__, str);
+ return BAD_VALUE;
+ }
+
+ mParameters.set(QCameraParameters::KEY_FOCUS_AREAS, str);
+ }
+
+ return NO_ERROR;
+}
+status_t QualcommCameraHardware::setFocusMode(const QCameraParameters& params)
+{
+ const char *str = params.get(QCameraParameters::KEY_FOCUS_MODE);
+ if (str != NULL) {
+ ALOGI("FocusMode =%s", str);
+ int32_t value = attr_lookup(focus_modes,
+ sizeof(focus_modes) / sizeof(str_map), str);
+ if (value != NOT_FOUND) {
+ mParameters.set(QCameraParameters::KEY_FOCUS_MODE, str);
+
+ if(mHasAutoFocusSupport && (updateFocusDistances(str) != NO_ERROR)) {
+ ALOGE("%s: updateFocusDistances failed for %s", __FUNCTION__, str);
+ return UNKNOWN_ERROR;
+ }
+
+ if(mHasAutoFocusSupport){
+ int cafSupport = FALSE;
+ if(!strcmp(str, QCameraParameters::FOCUS_MODE_CONTINUOUS_VIDEO) ||
+ !strcmp(str, QCameraParameters::FOCUS_MODE_CONTINUOUS_PICTURE)){
+ cafSupport = TRUE;
+ }
+ ALOGV("Continuous Auto Focus %d", cafSupport);
+ native_set_parms(CAMERA_PARM_CONTINUOUS_AF, sizeof(int8_t), (void *)&cafSupport);
+ }
+ // Focus step is reset to infinity when preview is started. We do
+ // not need to do anything now.
+ return NO_ERROR;
+ }
+ }
+ ALOGE("Invalid focus mode value: %s", (str == NULL) ? "NULL" : str);
+ return BAD_VALUE;
+
+}
+QualcommCameraHardware::DispMemPool::DispMemPool(int fd, int buffer_size,
+ int num_buffers, int frame_size,
+ const char *name) :
+ QualcommCameraHardware::MemPool(buffer_size,
+ num_buffers,
+ frame_size,
+ name),
+ mFD(fd)
+{
+#if 0
+ ALOGV("constructing MemPool %s from gralloc memory: "
+ "%d frames @ %d size "
+ "buffer size %d",
+ mName,
+ num_buffers, frame_size, buffer_size);
+ /* Use the fd given by gralloc and ask MemoryHeapBase to map it
+ * in this process space */
+ mHeap = new MemoryHeapBase(mFD, buffer_size, MemoryHeapBase::NO_CACHING, 0);
+ completeInitialization();
+#endif
+}
+
+QualcommCameraHardware::DispMemPool::~DispMemPool()
+{
+ /* Not much to do in destructor for now */
+ ALOGV(" ~DispMemPool : E ");
+ mFD = -1;
+ ALOGV(" ~DispMemPool : X ");
+}
+status_t QualcommCameraHardware::setOrientation(const QCameraParameters& params)
+{
+ const char *str = params.get("orientation");
+
+ if (str != NULL) {
+ if (strcmp(str, "portrait") == 0 || strcmp(str, "landscape") == 0) {
+ // Camera service needs this to decide if the preview frames and raw
+ // pictures should be rotated.
+ mParameters.set("orientation", str);
+ } else {
+ ALOGE("Invalid orientation value: %s", str);
+ return BAD_VALUE;
+ }
+ }
+ return NO_ERROR;
+}
+
+status_t QualcommCameraHardware::setPictureFormat(const QCameraParameters& params)
+{
+ const char * str = params.get(QCameraParameters::KEY_PICTURE_FORMAT);
+
+ if(str != NULL){
+ int32_t value = attr_lookup(picture_formats,
+ sizeof(picture_formats) / sizeof(str_map), str);
+ if(value != NOT_FOUND){
+ mParameters.set(QCameraParameters::KEY_PICTURE_FORMAT, str);
+ } else {
+ ALOGE("Invalid Picture Format value: %s", str);
+ return BAD_VALUE;
+ }
+ }
+ return NO_ERROR;
+}
+
+QualcommCameraHardware::MMCameraDL::MMCameraDL(){
+ ALOGV("MMCameraDL: E");
+ libmmcamera = NULL;
+#if DLOPEN_LIBMMCAMERA
+ libmmcamera = ::dlopen("liboemcamera.so", RTLD_NOW);
+#endif
+ ALOGV("Open MM camera DL libeomcamera loaded at %p ", libmmcamera);
+ ALOGV("MMCameraDL: X");
+}
+
+void * QualcommCameraHardware::MMCameraDL::pointer(){
+ return libmmcamera;
+}
+
+QualcommCameraHardware::MMCameraDL::~MMCameraDL(){
+ ALOGV("~MMCameraDL: E");
+ LINK_mm_camera_destroy();
+ if (libmmcamera != NULL) {
+ ::dlclose(libmmcamera);
+ ALOGV("closed MM Camera DL ");
+ }
+ libmmcamera = NULL;
+ ALOGV("~MMCameraDL: X");
+}
+
+wp<QualcommCameraHardware::MMCameraDL> QualcommCameraHardware::MMCameraDL::instance;
+Mutex QualcommCameraHardware::MMCameraDL::singletonLock;
+
+
+sp<QualcommCameraHardware::MMCameraDL> QualcommCameraHardware::MMCameraDL::getInstance(){
+ Mutex::Autolock instanceLock(singletonLock);
+ sp<MMCameraDL> mmCamera = instance.promote();
+ if(mmCamera == NULL){
+ mmCamera = new MMCameraDL();
+ instance = mmCamera;
+ }
+ return mmCamera;
+}
+
+QualcommCameraHardware::MemPool::MemPool(int buffer_size, int num_buffers,
+ int frame_size,
+ const char *name) :
+ mBufferSize(buffer_size),
+ mNumBuffers(num_buffers),
+ mFrameSize(frame_size),
+ mBuffers(NULL), mName(name)
+{
+ int page_size_minus_1 = getpagesize() - 1;
+ mAlignedBufferSize = (buffer_size + page_size_minus_1) & (~page_size_minus_1);
+}
+
+void QualcommCameraHardware::MemPool::completeInitialization()
+{
+ // If we do not know how big the frame will be, we wait to allocate
+ // the buffers describing the individual frames until we do know their
+ // size.
+
+ if (mFrameSize > 0) {
+ ALOGI("Before new Mem BASE #buffers :%d",mNumBuffers);
+ mBuffers = new sp<MemoryBase>[mNumBuffers];
+ for (int i = 0; i < mNumBuffers; i++) {
+ mBuffers[i] = new
+ MemoryBase(mHeap,
+ i * mAlignedBufferSize,
+ mFrameSize);
+ }
+ }
+}
+
+QualcommCameraHardware::AshmemPool::AshmemPool(int buffer_size, int num_buffers,
+ int frame_size,
+ const char *name) :
+ QualcommCameraHardware::MemPool(buffer_size,
+ num_buffers,
+ frame_size,
+ name)
+{
+ ALOGV("constructing MemPool %s backed by ashmem: "
+ "%d frames @ %d uint8_ts, "
+ "buffer size %d",
+ mName,
+ num_buffers, frame_size, buffer_size);
+
+ int page_mask = getpagesize() - 1;
+ int ashmem_size = buffer_size * num_buffers;
+ ashmem_size += page_mask;
+ ashmem_size &= ~page_mask;
+
+ mHeap = new MemoryHeapBase(ashmem_size);
+
+ completeInitialization();
+}
+
+bool QualcommCameraHardware::register_record_buffers(bool register_buffer) {
+ ALOGI("%s: (%d) E", __FUNCTION__, register_buffer);
+ struct msm_pmem_info pmemBuf;
+#if 0
+ for (int cnt = 0; cnt < kRecordBufferCount; ++cnt) {
+ pmemBuf.type = MSM_PMEM_VIDEO;
+ pmemBuf.fd = mRecordHeap->mHeap->getHeapID();
+ pmemBuf.offset = mRecordHeap->mAlignedBufferSize * cnt;
+ pmemBuf.len = mRecordHeap->mBufferSize;
+ pmemBuf.vaddr = (uint8_t *)mRecordHeap->mHeap->base() + mRecordHeap->mAlignedBufferSize * cnt;
+ pmemBuf.planar0_off = 0;
+ pmemBuf.planar1_off = recordframes[0].planar1_off;
+ pmemBuf.planar2_off = 0;
+ if(register_buffer == true) {
+ pmemBuf.active = (cnt<ACTIVE_VIDEO_BUFFERS);
+ if( (mVpeEnabled) && (cnt == kRecordBufferCount-1)) {
+ pmemBuf.type = MSM_PMEM_VIDEO_VPE;
+ pmemBuf.active = 1;
+ }
+ } else {
+ pmemBuf.active = false;
+ }
+
+ ALOGV("register_buf: reg = %d buffer = %p", !register_buffer,
+ (void *)pmemBuf.vaddr);
+ if(native_start_ops(register_buffer ? CAMERA_OPS_REGISTER_BUFFER :
+ CAMERA_OPS_UNREGISTER_BUFFER ,(void *)&pmemBuf) < 0) {
+ ALOGE("register_buf: MSM_CAM_IOCTL_(UN)REGISTER_PMEM error %s",
+ strerror(errno));
+ return false;
+ }
+ }
+#endif
+ return true;
+}
+
+QualcommCameraHardware::PmemPool::PmemPool(const char *pmem_pool,
+ int flags,
+ int pmem_type,
+ int buffer_size, int num_buffers,
+ int frame_size, int cbcr_offset,
+ int yOffset, const char *name) :
+ QualcommCameraHardware::MemPool(buffer_size,
+ num_buffers,
+ frame_size,
+ name),
+ mPmemType(pmem_type),
+ mCbCrOffset(cbcr_offset),
+ myOffset(yOffset)
+{
+ bool all_chnls = false;
+ ALOGI("constructing MemPool %s backed by pmem pool %s: "
+ "%d frames @ %d bytes, buffer size %d",
+ mName,
+ pmem_pool, num_buffers, frame_size,
+ buffer_size);
+
+ mMMCameraDLRef = QualcommCameraHardware::MMCameraDL::getInstance();
+
+
+ // Make a new mmap'ed heap that can be shared across processes.
+ // mAlignedBufferSize is already in 4k aligned. (do we need total size necessary to be in power of 2??)
+ mAlignedSize = mAlignedBufferSize * num_buffers;
+
+ sp<MemoryHeapBase> masterHeap =
+ new MemoryHeapBase(pmem_pool, mAlignedSize, flags);
+
+ if (masterHeap->getHeapID() < 0) {
+ ALOGE("failed to construct master heap for pmem pool %s", pmem_pool);
+ masterHeap.clear();
+ return;
+ }
+
+ sp<MemoryHeapPmem> pmemHeap = new MemoryHeapPmem(masterHeap, flags);
+ if (pmemHeap->getHeapID() >= 0) {
+ pmemHeap->slap();
+ masterHeap.clear();
+ mHeap = pmemHeap;
+ pmemHeap.clear();
+
+ mFd = mHeap->getHeapID();
+ if (::ioctl(mFd, PMEM_GET_SIZE, &mSize)) {
+ ALOGE("pmem pool %s ioctl(PMEM_GET_SIZE) error %s (%d)",
+ pmem_pool,
+ ::strerror(errno), errno);
+ mHeap.clear();
+ return;
+ }
+
+ ALOGV("pmem pool %s ioctl(fd = %d, PMEM_GET_SIZE) is %ld",
+ pmem_pool,
+ mFd,
+ mSize.len);
+ ALOGD("mBufferSize=%d, mAlignedBufferSize=%d\n", mBufferSize, mAlignedBufferSize);
+ // Unregister preview buffers with the camera drivers. Allow the VFE to write
+ // to all preview buffers except for the last one.
+ // Only Register the preview, snapshot and thumbnail buffers with the kernel.
+ if( (strcmp("postview", mName) != 0) ){
+ int num_buf = num_buffers;
+ if(!strcmp("preview", mName)) num_buf = kTotalPreviewBufferCount;
+ ALOGD("num_buffers = %d", num_buf);
+ for (int cnt = 0; cnt < num_buf; ++cnt) {
+ int active = 1;
+ if(pmem_type == MSM_PMEM_VIDEO){
+ active = (cnt<ACTIVE_VIDEO_BUFFERS);
+ //When VPE is enabled, set the last record
+ //buffer as active and pmem type as PMEM_VIDEO_VPE
+ //as this is a requirement from VPE operation.
+ //No need to set this pmem type to VIDEO_VPE while unregistering,
+ //because as per camera stack design: "the VPE AXI is also configured
+ //when VFE is configured for VIDEO, which is as part of preview
+ //initialization/start. So during this VPE AXI config camera stack
+ //will lookup the PMEM_VIDEO_VPE buffer and give it as o/p of VPE and
+ //change it's type to PMEM_VIDEO".
+ if( (mVpeEnabled) && (cnt == kRecordBufferCount-1)) {
+ active = 1;
+ pmem_type = MSM_PMEM_VIDEO_VPE;
+ }
+ ALOGV(" pmempool creating video buffers : active %d ", active);
+ }
+ else if (pmem_type == MSM_PMEM_PREVIEW){
+ active = (cnt < ACTIVE_PREVIEW_BUFFERS);
+ }
+ else if ((pmem_type == MSM_PMEM_MAINIMG)
+ || (pmem_type == MSM_PMEM_THUMBNAIL)){
+ active = (cnt < ACTIVE_ZSL_BUFFERS);
+ }
+ if (pmem_type == MSM_PMEM_PREVIEW &&
+ mPreviewFormat == CAMERA_YUV_420_YV12 && mCurrentTarget != TARGET_MSM7627A)
+ all_chnls = true;
+
+ register_buf(mBufferSize,
+ mFrameSize, mCbCrOffset, myOffset,
+ mHeap->getHeapID(),
+ mAlignedBufferSize * cnt,
+ (uint8_t *)mHeap->base() + mAlignedBufferSize * cnt,
+ pmem_type,
+ active,true,
+ all_chnls);
+ }
+ }
+
+ completeInitialization();
+ }
+ else ALOGE("pmem pool %s error: could not create master heap!",
+ pmem_pool);
+ ALOGV("%s: (%s) X ", __FUNCTION__, mName);
+}
+
+QualcommCameraHardware::PmemPool::~PmemPool()
+{
+ ALOGI("%s: %s E", __FUNCTION__, mName);
+ if (mHeap != NULL) {
+ // Unregister preview buffers with the camera drivers.
+ // Only Unregister the preview, snapshot and thumbnail
+ // buffers with the kernel.
+ if( (strcmp("postview", mName) != 0) ){
+ int num_buffers = mNumBuffers;
+ if(!strcmp("preview", mName)) num_buffers = kTotalPreviewBufferCount;
+ for (int cnt = 0; cnt < num_buffers; ++cnt) {
+ register_buf(mBufferSize,
+ mFrameSize,
+ mCbCrOffset,
+ myOffset,
+ mHeap->getHeapID(),
+ mAlignedBufferSize * cnt,
+ (uint8_t *)mHeap->base() + mAlignedBufferSize * cnt,
+ mPmemType,
+ false,
+ false,/* unregister */
+ false);
+ }
+ }
+ }
+ mMMCameraDLRef.clear();
+ ALOGI("%s: %s X", __FUNCTION__, mName);
+}
+#if 0
+#ifdef USE_ION
+const char QualcommCameraHardware::IonPool::mIonDevName[] = "/dev/ion";
+QualcommCameraHardware::IonPool::IonPool(int ion_heap_id, int flags,
+ int ion_type,
+ int buffer_size, int num_buffers,
+ int frame_size, int cbcr_offset,
+ int yOffset, const char *name) :
+ QualcommCameraHardware::MemPool(buffer_size,
+ num_buffers,
+ frame_size,
+ name),
+ mIonType(ion_type),
+ mCbCrOffset(cbcr_offset),
+ myOffset(yOffset)
+{
+ ALOGI("constructing MemPool %s backed by pmem pool %s: "
+ "%d frames @ %d bytes, buffer size %d",
+ mName,
+ mIonDevName, num_buffers, frame_size,
+ buffer_size);
+
+ mMMCameraDLRef = QualcommCameraHardware::MMCameraDL::getInstance();
+
+
+ // Make a new mmap'ed heap that can be shared across processes.
+ // mAlignedBufferSize is already in 4k aligned. (do we need total size necessary to be in power of 2??)
+ mAlignedSize = mAlignedBufferSize * num_buffers;
+ sp<MemoryHeapIon> ionHeap = new MemoryHeapIon(mIonDevName, mAlignedSize,
+ flags, 0x1<<ion_heap_id);
+ if (ionHeap->getHeapID() >= 0) {
+ mHeap = ionHeap;
+ ionHeap.clear();
+
+ mFd = mHeap->getHeapID();
+ ALOGE("ion pool %s fd = %d", mIonDevName, mFd);
+ ALOGE("mBufferSize=%d, mAlignedBufferSize=%d\n",
+ mBufferSize, mAlignedBufferSize);
+
+ // Unregister preview buffers with the camera drivers. Allow the VFE to write
+ // to all preview buffers except for the last one.
+ // Only Register the preview, snapshot and thumbnail buffers with the kernel.
+ if( (strcmp("postview", mName) != 0) ){
+ int num_buf = num_buffers;
+ if(!strcmp("preview", mName)) num_buf = kPreviewBufferCount;
+ ALOGD("num_buffers = %d", num_buf);
+ for (int cnt = 0; cnt < num_buf; ++cnt) {
+ int active = 1;
+ if(ion_type == MSM_PMEM_VIDEO){
+ active = (cnt<ACTIVE_VIDEO_BUFFERS);
+ //When VPE is enabled, set the last record
+ //buffer as active and pmem type as PMEM_VIDEO_VPE
+ //as this is a requirement from VPE operation.
+ //No need to set this pmem type to VIDEO_VPE while unregistering,
+ //because as per camera stack design: "the VPE AXI is also configured
+ //when VFE is configured for VIDEO, which is as part of preview
+ //initialization/start. So during this VPE AXI config camera stack
+ //will lookup the PMEM_VIDEO_VPE buffer and give it as o/p of VPE and
+ //change it's type to PMEM_VIDEO".
+ if( (mVpeEnabled) && (cnt == kRecordBufferCount-1)) {
+ active = 1;
+ ion_type = MSM_PMEM_VIDEO_VPE;
+ }
+ ALOGV(" pmempool creating video buffers : active %d ", active);
+ }
+ else if (ion_type == MSM_PMEM_PREVIEW){
+ active = (cnt < ACTIVE_PREVIEW_BUFFERS);
+ }
+ else if ((ion_type == MSM_PMEM_MAINIMG)
+ || (ion_type == MSM_PMEM_THUMBNAIL)){
+ active = (cnt < ACTIVE_ZSL_BUFFERS);
+ }
+ register_buf(mBufferSize,
+ mFrameSize, mCbCrOffset, myOffset,
+ mHeap->getHeapID(),
+ mAlignedBufferSize * cnt,
+ (uint8_t *)mHeap->base() + mAlignedBufferSize * cnt,
+ ion_type,
+ active);
+ }
+ }
+
+ completeInitialization();
+ }
+ else ALOGE("pmem pool %s error: could not create master heap!",
+ mIonDevName);
+ ALOGI("%s: (%s) X ", __FUNCTION__, mName);
+}
+
+QualcommCameraHardware::IonPool::~IonPool()
+{
+ ALOGI("%s: %s E", __FUNCTION__, mName);
+ if (mHeap != NULL) {
+ // Unregister preview buffers with the camera drivers.
+ // Only Unregister the preview, snapshot and thumbnail
+ // buffers with the kernel.
+ if( (strcmp("postview", mName) != 0) ){
+ int num_buffers = mNumBuffers;
+ if(!strcmp("preview", mName)) num_buffers = kPreviewBufferCount;
+ for (int cnt = 0; cnt < num_buffers; ++cnt) {
+ register_buf(mBufferSize,
+ mFrameSize,
+ mCbCrOffset,
+ myOffset,
+ mHeap->getHeapID(),
+ mAlignedBufferSize * cnt,
+ (uint8_t *)mHeap->base() + mAlignedBufferSize * cnt,
+ mIonType,
+ false,
+ false /* unregister */);
+ }
+ }
+ }
+ mMMCameraDLRef.clear();
+ ALOGI("%s: %s X", __FUNCTION__, mName);
+}
+#endif
+#endif
+QualcommCameraHardware::MemPool::~MemPool()
+{
+ ALOGV("destroying MemPool %s", mName);
+ if (mFrameSize > 0)
+ delete [] mBuffers;
+ mHeap.clear();
+ ALOGV("destroying MemPool %s completed", mName);
+}
+
+status_t QualcommCameraHardware::MemPool::dump(int fd, const Vector<String16>& args) const
+{
+ const size_t SIZE = 256;
+ char buffer[SIZE];
+ String8 result;
+ CAMERA_HAL_UNUSED(args);
+ snprintf(buffer, 255, "QualcommCameraHardware::AshmemPool::dump\n");
+ result.append(buffer);
+ if (mName) {
+ snprintf(buffer, 255, "mem pool name (%s)\n", mName);
+ result.append(buffer);
+ }
+ if (mHeap != 0) {
+ snprintf(buffer, 255, "heap base(%p), size(%d), flags(%d), device(%s)\n",
+ mHeap->getBase(), mHeap->getSize(),
+ mHeap->getFlags(), mHeap->getDevice());
+ result.append(buffer);
+ }
+ snprintf(buffer, 255,
+ "buffer size (%d), number of buffers (%d), frame size(%d)",
+ mBufferSize, mNumBuffers, mFrameSize);
+ result.append(buffer);
+ write(fd, result.string(), result.size());
+ return NO_ERROR;
+}
+
+static void receive_camframe_callback(struct msm_frame *frame)
+{
+ QualcommCameraHardware* obj = QualcommCameraHardware::getInstance();
+ if (obj != 0) {
+ obj->receivePreviewFrame(frame);
+ }
+}
+
+static void receive_camstats_callback(camstats_type stype, camera_preview_histogram_info* histinfo)
+{
+ QualcommCameraHardware* obj = QualcommCameraHardware::getInstance();
+ if (obj != 0) {
+ obj->receiveCameraStats(stype,histinfo);
+ }
+}
+
+static void receive_liveshot_callback(liveshot_status status, uint32_t jpeg_size)
+{
+ if(status == LIVESHOT_SUCCESS) {
+ QualcommCameraHardware* obj = QualcommCameraHardware::getInstance();
+ if (obj != 0) {
+ obj->receiveLiveSnapshot(jpeg_size);
+ }
+ }
+ else
+ ALOGE("Liveshot not succesful");
+}
+
+
+static int8_t receive_event_callback(mm_camera_event* event)
+{
+ ALOGV("%s: E", __FUNCTION__);
+ if(event == NULL) {
+ ALOGE("%s: event is NULL!", __FUNCTION__);
+ return FALSE;
+ }
+ switch(event->event_type) {
+ case SNAPSHOT_DONE:
+ {
+ /* postview buffer is received */
+ QualcommCameraHardware* obj = QualcommCameraHardware::getInstance();
+ if (obj != 0) {
+
+ obj->receiveRawPicture(NO_ERROR, event->event_data.yuv_frames[0], event->event_data.yuv_frames[0]);
+ }
+ }
+ break;
+ case SNAPSHOT_FAILED:
+ {
+ /* postview buffer is received */
+ QualcommCameraHardware* obj = QualcommCameraHardware::getInstance();
+ if (obj != 0) {
+
+ obj->receiveRawPicture(UNKNOWN_ERROR, NULL, NULL);
+ }
+ }
+ break;
+ case JPEG_ENC_DONE:
+ {
+ QualcommCameraHardware* obj = QualcommCameraHardware::getInstance();
+ if (obj != 0) {
+ obj->receiveJpegPicture(NO_ERROR, event->event_data.encoded_frame);
+ }
+ }
+ break;
+ case JPEG_ENC_FAILED:
+ {
+ QualcommCameraHardware* obj = QualcommCameraHardware::getInstance();
+ if (obj != 0) {
+ obj->receiveJpegPicture(UNKNOWN_ERROR, 0);
+ }
+ }
+ break;
+ default:
+ ALOGE("%s: ignore default case", __FUNCTION__);
+ }
+ return TRUE;
+ ALOGV("%s: X", __FUNCTION__);
+}
+// 720p : video frame calbback from camframe
+static void receive_camframe_video_callback(struct msm_frame *frame)
+{
+ ALOGV("receive_camframe_video_callback E");
+ QualcommCameraHardware* obj = QualcommCameraHardware::getInstance();
+ if (obj != 0) {
+ obj->receiveRecordingFrame(frame);
+ }
+ ALOGV("receive_camframe_video_callback X");
+}
+
+
+int QualcommCameraHardware::storeMetaDataInBuffers(int enable)
+{
+ /* this is a dummy func now. fix me later */
+ ALOGI("in storeMetaDataInBuffers : enable %d", enable);
+ mStoreMetaDataInFrame = enable;
+ return 0;
+}
+
+void QualcommCameraHardware::setCallbacks(camera_notify_callback notify_cb,
+ camera_data_callback data_cb,
+ camera_data_timestamp_callback data_cb_timestamp,
+ camera_request_memory get_memory,
+ void* user)
+{
+ Mutex::Autolock lock(mLock);
+ mNotifyCallback = notify_cb;
+ mDataCallback = data_cb;
+ mDataCallbackTimestamp = data_cb_timestamp;
+ mGetMemory = get_memory;
+ mCallbackCookie = user;
+}
+int32_t QualcommCameraHardware::getNumberOfVideoBuffers() {
+ ALOGI("getNumOfVideoBuffers: %d", kRecordBufferCount);
+ return kRecordBufferCount;
+}
+
+sp<IMemory> QualcommCameraHardware::getVideoBuffer(int32_t index) {
+ if(index > kRecordBufferCount)
+ return NULL;
+ else
+ return NULL;
+#if 0
+ return mRecordHeap->mBuffers[index];
+#endif
+}
+void QualcommCameraHardware::enableMsgType(int32_t msgType)
+{
+ Mutex::Autolock lock(mLock);
+ mMsgEnabled |= msgType;
+ if( (mCurrentTarget != TARGET_MSM7630 ) && (mCurrentTarget != TARGET_QSD8250) && (mCurrentTarget != TARGET_MSM8660)) {
+ if(mMsgEnabled & CAMERA_MSG_VIDEO_FRAME){
+ native_start_ops(CAMERA_OPS_VIDEO_RECORDING, NULL);
+ mRecordingState = 1;
+ }
+ }
+}
+
+void QualcommCameraHardware::disableMsgType(int32_t msgType)
+{
+ Mutex::Autolock lock(mLock);
+ if( (mCurrentTarget != TARGET_MSM7630 ) && (mCurrentTarget != TARGET_QSD8250) && (mCurrentTarget != TARGET_MSM8660)) {
+ if(mMsgEnabled & CAMERA_MSG_VIDEO_FRAME){
+ native_stop_ops(CAMERA_OPS_VIDEO_RECORDING, NULL);
+ mRecordingState = 0;
+ }
+ }
+ mMsgEnabled &= ~msgType;
+}
+
+bool QualcommCameraHardware::msgTypeEnabled(int32_t msgType)
+{
+ return (mMsgEnabled & msgType);
+}
+
+
+void QualcommCameraHardware::receive_camframe_error_timeout(void) {
+ ALOGI("receive_camframe_error_timeout: E");
+ Mutex::Autolock l(&mCamframeTimeoutLock);
+ ALOGE(" Camframe timed out. Not receiving any frames from camera driver ");
+ camframe_timeout_flag = TRUE;
+ mNotifyCallback(CAMERA_MSG_ERROR, CAMERA_ERROR_UNKNOWN, 0,
+ mCallbackCookie);
+ ALOGI("receive_camframe_error_timeout: X");
+}
+
+static void receive_camframe_error_callback(camera_error_type err) {
+ QualcommCameraHardware* obj = QualcommCameraHardware::getInstance();
+ if (obj != 0) {
+ if ((err == CAMERA_ERROR_TIMEOUT) ||
+ (err == CAMERA_ERROR_ESD)) {
+ /* Handling different error types is dependent on the requirement.
+ * Do the same action by default
+ */
+ obj->receive_camframe_error_timeout();
+ }
+ }
+}
+
+bool QualcommCameraHardware::storePreviewFrameForPostview(void) {
+ ALOGV("storePreviewFrameForPostview : E ");
+
+ /* Since there is restriction on the maximum overlay dimensions
+ * that can be created, we use the last preview frame as postview
+ * for 7x30. */
+ ALOGV("Copying the preview buffer to postview buffer %d ",
+ mPreviewFrameSize);
+ if(mLastPreviewFrameHeap == NULL) {
+ int CbCrOffset = PAD_TO_WORD(mPreviewFrameSize * 2/3);
+#if 0
+#ifdef USE_ION
+
+ mLastPreviewFrameHeap =
+ new IonPool(ION_HEAP_ADSP_ID,
+ MemoryHeapBase::READ_ONLY | MemoryHeapBase::NO_CACHING,
+ MSM_PMEM_PREVIEW, //MSM_PMEM_OUTPUT2,
+ mPreviewFrameSize,
+ 1,
+ mPreviewFrameSize,
+ CbCrOffset,
+ 0,
+ "postview");
+#else
+ mLastPreviewFrameHeap =
+ new PmemPool("/dev/pmem_adsp",
+ MemoryHeapBase::READ_ONLY | MemoryHeapBase::NO_CACHING,
+ MSM_PMEM_PREVIEW, //MSM_PMEM_OUTPUT2,
+ mPreviewFrameSize,
+ 1,
+ mPreviewFrameSize,
+ CbCrOffset,
+ 0,
+ "postview");
+#endif
+ if (!mLastPreviewFrameHeap->initialized()) {
+ mLastPreviewFrameHeap.clear();
+ ALOGE(" Failed to initialize Postview Heap");
+ return false;
+ }
+#endif
+ }
+#if 0
+ if( mLastPreviewFrameHeap != NULL && mLastQueuedFrame != NULL) {
+ memcpy(mLastPreviewFrameHeap->mHeap->base(),
+ (uint8_t *)mLastQueuedFrame, mPreviewFrameSize );
+
+ if(mUseOverlay && !mZslPanorama) {
+ //mOverlayLock.lock();
+ //if(mOverlay != NULL){
+ //mOverlay->setFd(mLastPreviewFrameHeap->mHeap->getHeapID());
+ if( zoomCropInfo.w !=0 && zoomCropInfo.h !=0) {
+ ALOGE("zoomCropInfo non-zero, setting crop ");
+ ALOGE("setCrop with %dx%d and %dx%d", zoomCropInfo.x, zoomCropInfo.y, zoomCropInfo.w, zoomCropInfo.h);
+ // mOverlay->setCrop(zoomCropInfo.x, zoomCropInfo.y,
+ //zoomCropInfo.w, zoomCropInfo.h);
+ }
+ ALOGV("Queueing Postview with last frame till the snapshot is done ");
+ //mOverlay->queueBuffer((void *)0);
+ }
+ //mOverlayLock.unlock();
+ }
+
+ } else
+ ALOGE("Failed to store Preview frame. No Postview ");
+#endif
+ ALOGV("storePreviewFrameForPostview : X ");
+ return true;
+}
+
+bool QualcommCameraHardware::isValidDimension(int width, int height) {
+ bool retVal = FALSE;
+ /* This function checks if a given resolution is valid or not.
+ * A particular resolution is considered valid if it satisfies
+ * the following conditions:
+ * 1. width & height should be multiple of 16.
+ * 2. width & height should be less than/equal to the dimensions
+ * supported by the camera sensor.
+ * 3. the aspect ratio is a valid aspect ratio and is among the
+ * commonly used aspect ratio as determined by the thumbnail_sizes
+ * data structure.
+ */
+
+ if( (width == CEILING16(width)) && (height == CEILING16(height))
+ && (width <= maxSnapshotWidth)
+ && (height <= maxSnapshotHeight) )
+ {
+ uint32_t pictureAspectRatio = (uint32_t)((width * Q12)/height);
+ for(uint32_t i = 0; i < THUMBNAIL_SIZE_COUNT; i++ ) {
+ if(thumbnail_sizes[i].aspect_ratio == pictureAspectRatio) {
+ retVal = TRUE;
+ break;
+ }
+ }
+ }
+ return retVal;
+}
+status_t QualcommCameraHardware::getBufferInfo(sp<IMemory>& Frame, size_t *alignedSize) {
+ status_t ret;
+ ALOGV(" getBufferInfo : E ");
+ if( ( mCurrentTarget == TARGET_MSM7630 ) || (mCurrentTarget == TARGET_QSD8250) || (mCurrentTarget == TARGET_MSM8660) )
+ {
+ if( mRecordHeap != NULL){
+ ALOGV(" Setting valid buffer information ");
+ Frame = mRecordHeap->mBuffers[0];
+ if( alignedSize != NULL) {
+ *alignedSize = mRecordHeap->mAlignedBufferSize;
+ ALOGV(" HAL : alignedSize = %d ", *alignedSize);
+ ret = NO_ERROR;
+ } else {
+ ALOGE(" HAL : alignedSize is NULL. Cannot update alignedSize ");
+ ret = UNKNOWN_ERROR;
+ }
+ } else {
+ ALOGE(" RecordHeap is null. Buffer information wont be updated ");
+ Frame = NULL;
+ ret = UNKNOWN_ERROR;
+ }
+ } else {
+ if(mPreviewHeap != NULL) {
+ ALOGV(" Setting valid buffer information ");
+ // Frame = mPreviewHeap->mBuffers[0];
+ if( alignedSize != NULL) {
+ //*alignedSize = mPreviewHeap->mAlignedBufferSize;
+ ALOGV(" HAL : alignedSize = %d ", *alignedSize);
+ ret = NO_ERROR;
+ } else {
+ ALOGE(" HAL : alignedSize is NULL. Cannot update alignedSize ");
+ ret = UNKNOWN_ERROR;
+ }
+ } else {
+ ALOGE(" PreviewHeap is null. Buffer information wont be updated ");
+ Frame = NULL;
+ ret = UNKNOWN_ERROR;
+ }
+ }
+ ALOGV(" getBufferInfo : X ");
+ return ret;
+}
+
+void QualcommCameraHardware::encodeData() {
+ ALOGV("encodeData: E");
+
+ if (mDataCallback && (mMsgEnabled & CAMERA_MSG_COMPRESSED_IMAGE)) {
+ mJpegThreadWaitLock.lock();
+ mJpegThreadRunning = true;
+ mJpegThreadWaitLock.unlock();
+ mm_camera_ops_type_t current_ops_type = CAMERA_OPS_ENCODE;
+ mCamOps.mm_camera_start(current_ops_type,(void *)&mImageCaptureParms,
+ (void *)&mImageEncodeParms);
+ //Wait until jpeg encoding is done and clear the resources.
+ mJpegThreadWaitLock.lock();
+ while (mJpegThreadRunning) {
+ ALOGV("encodeData: waiting for jpeg thread to complete.");
+ mJpegThreadWait.wait(mJpegThreadWaitLock);
+ ALOGV("encodeData: jpeg thread completed.");
+ }
+ mJpegThreadWaitLock.unlock();
+ }
+ else ALOGV("encodeData: JPEG callback is NULL, not encoding image.");
+
+ mCamOps.mm_camera_deinit(CAMERA_OPS_CAPTURE, NULL, NULL);
+ //clear the resources
+ deinitRaw();
+ //Encoding is done.
+ mEncodePendingWaitLock.lock();
+ mEncodePending = false;
+ mEncodePendingWait.signal();
+ mEncodePendingWaitLock.unlock();
+
+ ALOGV("encodeData: X");
+}
+
+void QualcommCameraHardware::getCameraInfo()
+{
+ ALOGI("getCameraInfo: IN");
+ mm_camera_status_t status;
+
+#if DLOPEN_LIBMMCAMERA
+ void *libhandle = ::dlopen("liboemcamera.so", RTLD_NOW);
+ ALOGI("getCameraInfo: loading libqcamera at %p", libhandle);
+ if (!libhandle) {
+ ALOGE("FATAL ERROR: could not dlopen liboemcamera.so: %s", dlerror());
+ }
+ *(void **)&LINK_mm_camera_get_camera_info =
+ ::dlsym(libhandle, "mm_camera_get_camera_info");
+#endif
+ storeTargetType();
+ status = LINK_mm_camera_get_camera_info(HAL_cameraInfo, &HAL_numOfCameras);
+ ALOGI("getCameraInfo: numOfCameras = %d", HAL_numOfCameras);
+ for(int i = 0; i < HAL_numOfCameras; i++) {
+ if((HAL_cameraInfo[i].position == BACK_CAMERA )&&
+ mCurrentTarget == TARGET_MSM8660){
+ HAL_cameraInfo[i].modes_supported |= CAMERA_ZSL_MODE;
+ } else{
+ HAL_cameraInfo[i].modes_supported |= CAMERA_NONZSL_MODE;
+ }
+ ALOGI("Camera sensor %d info:", i);
+ ALOGI("camera_id: %d", HAL_cameraInfo[i].camera_id);
+ ALOGI("modes_supported: %x", HAL_cameraInfo[i].modes_supported);
+ ALOGI("position: %d", HAL_cameraInfo[i].position);
+ ALOGI("sensor_mount_angle: %d", HAL_cameraInfo[i].sensor_mount_angle);
+ }
+
+#if DLOPEN_LIBMMCAMERA
+ if (libhandle) {
+ ::dlclose(libhandle);
+ ALOGV("getCameraInfo: dlclose(libqcamera)");
+ }
+#endif
+ ALOGI("getCameraInfo: OUT");
+}
+
+extern "C" int HAL_isIn3DMode()
+{
+ return HAL_currentCameraMode == CAMERA_MODE_3D;
+}
+
+extern "C" int HAL_getNumberOfCameras()
+{
+ QualcommCameraHardware::getCameraInfo();
+ return HAL_numOfCameras;
+}
+
+extern "C" void HAL_getCameraInfo(int cameraId, struct CameraInfo* cameraInfo)
+{
+ int i;
+ char mDeviceName[PROPERTY_VALUE_MAX];
+ if(cameraInfo == NULL) {
+ ALOGE("cameraInfo is NULL");
+ return;
+ }
+
+ property_get("ro.board.platform",mDeviceName," ");
+
+ for(i = 0; i < HAL_numOfCameras; i++) {
+ if(i == cameraId) {
+ ALOGI("Found a matching camera info for ID %d", cameraId);
+ cameraInfo->facing = (HAL_cameraInfo[i].position == BACK_CAMERA)?
+ CAMERA_FACING_BACK : CAMERA_FACING_FRONT;
+ // App Orientation not needed for 7x27 , sensor mount angle 0 is
+ // enough.
+ if(cameraInfo->facing == CAMERA_FACING_FRONT)
+ cameraInfo->orientation = HAL_cameraInfo[i].sensor_mount_angle;
+ else if( !strncmp(mDeviceName, "msm7625a", 8))
+ cameraInfo->orientation = HAL_cameraInfo[i].sensor_mount_angle;
+ else if( !strncmp(mDeviceName, "msm7627a", 8))
+ cameraInfo->orientation = HAL_cameraInfo[i].sensor_mount_angle;
+ else if( !strncmp(mDeviceName, "msm7627", 7))
+ cameraInfo->orientation = HAL_cameraInfo[i].sensor_mount_angle;
+ else if( !strncmp(mDeviceName, "msm8660", 7))
+ cameraInfo->orientation = HAL_cameraInfo[i].sensor_mount_angle;
+ else
+ cameraInfo->orientation = ((APP_ORIENTATION - HAL_cameraInfo[i].sensor_mount_angle) + 360)%360;
+
+ ALOGI("%s: orientation = %d", __FUNCTION__, cameraInfo->orientation);
+ sensor_rotation = HAL_cameraInfo[i].sensor_mount_angle;
+ cameraInfo->mode = 0;
+ if(HAL_cameraInfo[i].modes_supported & CAMERA_MODE_2D)
+ cameraInfo->mode |= CAMERA_SUPPORT_MODE_2D;
+ if(HAL_cameraInfo[i].modes_supported & CAMERA_MODE_3D)
+ cameraInfo->mode |= CAMERA_SUPPORT_MODE_3D;
+ if((HAL_cameraInfo[i].position == BACK_CAMERA )&&
+ !strncmp(mDeviceName, "msm8660", 7)){
+ cameraInfo->mode |= CAMERA_ZSL_MODE;
+ } else{
+ cameraInfo->mode |= CAMERA_NONZSL_MODE;
+ }
+
+ ALOGI("%s: modes supported = %d", __FUNCTION__, cameraInfo->mode);
+
+ return;
+ }
+ }
+// ALOGE("Unable to find matching camera info for ID %d", cameraId);
+}
+
+}; // namespace android
diff --git a/camera/QualcommCameraHardware.h b/camera/QualcommCameraHardware.h
new file mode 100644
index 0000000..61fd0c8
--- /dev/null
+++ b/camera/QualcommCameraHardware.h
@@ -0,0 +1,651 @@
+/*
+** Copyright 2008, Google Inc.
+** Copyright (c) 2009-2011, The Linux Foundation. All rights reserved.
+**
+** Licensed under the Apache License, Version 2.0 (the "License");
+** you may not use this file except in compliance with the License.
+** You may obtain a copy of the License at
+**
+** http://www.apache.org/licenses/LICENSE-2.0
+**
+** Unless required by applicable law or agreed to in writing, software
+** distributed under the License is distributed on an "AS IS" BASIS,
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+** See the License for the specific language governing permissions and
+** limitations under the License.
+*/
+
+#ifndef ANDROID_HARDWARE_QUALCOMM_CAMERA_HARDWARE_H
+#define ANDROID_HARDWARE_QUALCOMM_CAMERA_HARDWARE_H
+
+#define ICS
+
+//#include <camera/CameraHardwareInterface.h>
+#include <utils/threads.h>
+#include <binder/MemoryBase.h>
+#include <binder/MemoryHeapBase.h>
+#include <stdint.h>
+#include <ui/egl/android_natives.h>
+#ifdef ICS
+#include <hardware/camera.h>
+#endif
+#include <camera/Camera.h>
+#include "QCameraParameters.h"
+#include <system/window.h>
+#include <system/camera.h>
+#include <hardware/camera.h>
+#include <gralloc_priv.h>
+#include <QComOMXMetadata.h>
+#include "QCamera_Intf.h"
+
+extern "C" {
+#include <linux/android_pmem.h>
+#include <linux/ion.h>
+#include <mm_camera_interface.h>
+}
+
+struct str_map {
+ const char *const desc;
+ int val;
+};
+
+struct buffer_map {
+ msm_frame *frame;
+ buffer_handle_t * buffer;
+ int size;
+ int lockState;
+};
+
+typedef enum {
+ TARGET_MSM7625,
+ TARGET_MSM7625A,
+ TARGET_MSM7627,
+ TARGET_MSM7627A,
+ TARGET_QSD8250,
+ TARGET_MSM7630,
+ TARGET_MSM8660,
+ TARGET_MAX
+}targetType;
+
+typedef enum {
+ LIVESHOT_DONE,
+ LIVESHOT_IN_PROGRESS,
+ LIVESHOT_STOPPED
+}liveshotState;
+#define MIN_UNDEQUEUD_BUFFER_COUNT 2
+struct target_map {
+ const char *targetStr;
+ targetType targetEnum;
+};
+
+enum {
+ BUFFER_UNLOCKED,
+ BUFFER_LOCKED
+};
+
+struct board_property{
+ targetType target;
+ unsigned int previewSizeMask;
+ bool hasSceneDetect;
+ bool hasSelectableZoneAf;
+ bool hasFaceDetect;
+};
+
+namespace android {
+
+class QualcommCameraHardware : public RefBase{
+public:
+
+ //virtual sp<IMemoryHeap> getPreviewHeap() const;
+ //virtual sp<IMemoryHeap> getRawHeap() const;
+
+ void setCallbacks(camera_notify_callback notify_cb,
+ camera_data_callback data_cb,
+ camera_data_timestamp_callback data_cb_timestamp,
+ camera_request_memory get_memory,
+ void *user);
+
+ virtual void enableMsgType(int32_t msgType);
+ virtual void disableMsgType(int32_t msgType);
+ virtual bool msgTypeEnabled(int32_t msgType);
+
+ virtual status_t dump(int fd, const Vector<String16>& args) const;
+ virtual status_t startPreview();
+ virtual void stopPreview();
+ virtual bool previewEnabled();
+ virtual status_t startRecording();
+ virtual void stopRecording();
+ virtual bool recordingEnabled();
+ virtual void releaseRecordingFrame(const void *opaque);
+ virtual status_t autoFocus();
+ virtual status_t cancelAutoFocus();
+ virtual status_t takePicture();
+ virtual status_t takeLiveSnapshot();
+ virtual status_t takeLiveSnapshotInternal();
+ void set_liveshot_exifinfo();
+ virtual status_t cancelPicture();
+ virtual status_t setParameters(const QCameraParameters& params);
+ virtual QCameraParameters getParameters() const;
+ virtual status_t sendCommand(int32_t command, int32_t arg1, int32_t arg2);
+ virtual int32_t getNumberOfVideoBuffers();
+ virtual sp<IMemory> getVideoBuffer(int32_t index);
+ virtual status_t getBufferInfo( sp<IMemory>& Frame, size_t *alignedSize);
+ virtual void encodeData( );
+#ifdef ICS
+ virtual status_t set_PreviewWindow(void* param);
+ virtual status_t setPreviewWindow(preview_stream_ops_t* window);
+#endif
+ virtual status_t setPreviewWindow(const sp<ANativeWindow>& buf) {return NO_ERROR;};
+ virtual void release();
+
+ static QualcommCameraHardware* createInstance();
+ static QualcommCameraHardware* getInstance();
+
+ void receivePreviewFrame(struct msm_frame *frame);
+ void receiveLiveSnapshot(uint32_t jpeg_size);
+ void receiveCameraStats(camstats_type stype, camera_preview_histogram_info* histinfo);
+ void receiveRecordingFrame(struct msm_frame *frame);
+ void receiveJpegPicture(status_t status, mm_camera_buffer_t *encoded_buffer);
+ void jpeg_set_location();
+ void receiveJpegPictureFragment(uint8_t *buf, uint32_t size);
+ void notifyShutter(bool mPlayShutterSoundOnly);
+ void receive_camframe_error_timeout();
+ static void getCameraInfo();
+ void receiveRawPicture(status_t status,struct msm_frame *postviewframe, struct msm_frame *mainframe);
+ int allocate_ion_memory(int *main_ion_fd, struct ion_allocation_data* alloc,
+ struct ion_fd_data* ion_info_fd, int ion_type, int size, int *memfd);
+ int deallocate_ion_memory(int *main_ion_fd, struct ion_fd_data* ion_info_fd);
+ virtual ~QualcommCameraHardware();
+ int storeMetaDataInBuffers(int enable);
+
+private:
+ QualcommCameraHardware();
+ status_t startPreviewInternal();
+ status_t startRecordingInternal();
+ status_t setHistogramOn();
+ status_t setHistogramOff();
+ status_t runFaceDetection();
+ status_t setFaceDetection(const char *str);
+
+ void stopPreviewInternal();
+ friend void *auto_focus_thread(void *user);
+ void runAutoFocus();
+ status_t cancelAutoFocusInternal();
+ bool native_set_dimension (int camfd);
+ bool native_jpeg_encode (void);
+ bool updatePictureDimension(const QCameraParameters& params, int& width, int& height);
+ bool native_set_parms(camera_parm_type_t type, uint16_t length, void *value);
+ bool native_set_parms(camera_parm_type_t type, uint16_t length, void *value, int *result);
+ bool native_zoom_image(int fd, int srcOffset, int dstOffset, common_crop_t *crop);
+
+ status_t startInitialPreview();
+ void stopInitialPreview();
+ status_t getBuffersAndStartPreview();
+ void relinquishBuffers();
+
+ QualcommCameraHardware * singleton;
+
+ /* These constants reflect the number of buffers that libmmcamera requires
+ for preview and raw, and need to be updated when libmmcamera
+ changes.
+ */
+ static const int kPreviewBufferCount = NUM_PREVIEW_BUFFERS;
+ static const int kRawBufferCount = 1;
+ static const int kJpegBufferCount = 1;
+ static const int kTotalPreviewBufferCount = kPreviewBufferCount + MIN_UNDEQUEUD_BUFFER_COUNT;
+ int numCapture;
+ int numJpegReceived;
+ int jpegPadding;
+
+ QCameraParameters mParameters;
+ unsigned int frame_size;
+ bool mCameraRunning;
+ Mutex mCameraRunningLock;
+ bool mPreviewInitialized;
+
+
+ class MMCameraDL : public RefBase{
+ private:
+ static wp<MMCameraDL> instance;
+ MMCameraDL();
+ virtual ~MMCameraDL();
+ void *libmmcamera;
+ static Mutex singletonLock;
+ public:
+ static sp<MMCameraDL> getInstance();
+ void * pointer();
+ };
+
+ // This class represents a heap which maintains several contiguous
+ // buffers. The heap may be backed by pmem (when pmem_pool contains
+ // the name of a /dev/pmem* file), or by ashmem (when pmem_pool == NULL).
+ struct MemPool : public RefBase {
+ MemPool(int buffer_size, int num_buffers,
+ int frame_size,
+ const char *name);
+
+ virtual ~MemPool() ;// = 0;
+
+ void completeInitialization();
+ bool initialized() const {
+ return mHeap != NULL && mHeap->base() != MAP_FAILED;
+ }
+
+ virtual status_t dump(int fd, const Vector<String16>& args) const;
+
+ int mBufferSize;
+ int mAlignedBufferSize;
+ int mNumBuffers;
+ int mFrameSize;
+ sp<MemoryHeapBase> mHeap;
+ sp<MemoryBase> *mBuffers;
+
+ const char *mName;
+ };
+ struct DispMemPool : public MemPool {
+ DispMemPool(int fd, int buffer_size,
+ int num_buffers, int frame_size,
+ const char *name);
+ virtual ~DispMemPool();
+ int mFD;
+ };
+ sp<DispMemPool> mPreviewHeap[kPreviewBufferCount + MIN_UNDEQUEUD_BUFFER_COUNT];
+
+ struct AshmemPool : public MemPool {
+ AshmemPool(int buffer_size, int num_buffers,
+ int frame_size,
+ const char *name);
+ };
+
+ struct PmemPool : public MemPool {
+ PmemPool(const char *pmem_pool,
+ int flags, int pmem_type,
+ int buffer_size, int num_buffers,
+ int frame_size, int cbcr_offset,
+ int yoffset, const char *name);
+ virtual ~PmemPool();
+ int mFd;
+ int mPmemType;
+ int mCbCrOffset;
+ int myOffset;
+ int mCameraControlFd;
+ uint32_t mAlignedSize;
+ struct pmem_region mSize;
+ sp<QualcommCameraHardware::MMCameraDL> mMMCameraDLRef;
+ };
+//TODO
+ struct IonPool : public MemPool {
+ IonPool( int ion_heap_id, int flags, int ion_type,
+ int buffer_size, int num_buffers,
+ int frame_size, int cbcr_offset,
+ int yoffset, const char *name);
+ virtual ~IonPool();
+ int mFd;
+ int mIonType;
+ int mCbCrOffset;
+ int myOffset;
+ int mCameraControlFd;
+ uint32_t mAlignedSize;
+ sp<QualcommCameraHardware::MMCameraDL> mMMCameraDLRef;
+ static const char mIonDevName[];
+ };
+#ifdef USE_ION
+// sp<IonPool> mPreviewHeap;
+ sp<IonPool> mYV12Heap;
+ sp<IonPool> mRecordHeap;
+ sp<IonPool> mThumbnailHeap;
+ sp<IonPool> mRawHeap;
+ sp<IonPool> mDisplayHeap;
+ sp<AshmemPool> mJpegHeap;
+ sp<AshmemPool> mStatHeap;
+ sp<AshmemPool> mMetaDataHeap;
+ sp<IonPool> mRawSnapShotPmemHeap;
+ sp<IonPool> mLastPreviewFrameHeap;
+ sp<IonPool> mPostviewHeap;
+#else
+// sp<PmemPool> mPreviewHeap;
+ sp<PmemPool> mYV12Heap;
+ sp<PmemPool> mRecordHeap;
+ sp<PmemPool> mThumbnailHeap;
+ sp<PmemPool> mRawHeap;
+ sp<PmemPool> mDisplayHeap;
+ sp<AshmemPool> mJpegHeap;
+ sp<AshmemPool> mStatHeap;
+ sp<AshmemPool> mMetaDataHeap;
+ sp<PmemPool> mRawSnapShotPmemHeap;
+ sp<PmemPool> mLastPreviewFrameHeap;
+ sp<PmemPool> mPostviewHeap;
+ sp<PmemPool> mPostViewHeap;
+ sp<PmemPool> mInitialPreviewHeap;
+#endif
+
+ sp<MMCameraDL> mMMCameraDLRef;
+
+ bool startCamera();
+ bool initPreview();
+ bool initRecord();
+ void deinitPreview();
+ bool initRaw(bool initJpegHeap);
+ bool initZslBuffers(bool initJpegHeap);
+ bool deinitZslBuffers();
+ bool initLiveSnapshot(int videowidth, int videoheight);
+ bool initRawSnapshot();
+ void deinitRaw();
+ void deinitRawSnapshot();
+ bool mPreviewThreadRunning;
+ bool createSnapshotMemory (int numberOfRawBuffers, int numberOfJpegBuffers,
+ bool initJpegHeap, int snapshotFormat = 1 /*PICTURE_FORMAT_JPEG*/);
+ Mutex mPreviewThreadWaitLock;
+ Condition mPreviewThreadWait;
+ friend void *preview_thread(void *user);
+ friend void *openCamera(void *data);
+ void runPreviewThread(void *data);
+ friend void *hfr_thread(void *user);
+ void runHFRThread(void *data);
+ bool mHFRThreadRunning;
+ int mapBuffer(msm_frame *frame);
+ int mapRawBuffer(msm_frame *frame);
+ int mapThumbnailBuffer(msm_frame *frame);
+ int mapJpegBuffer(mm_camera_buffer_t* buffer);
+ int mapvideoBuffer( msm_frame *frame);
+ int mapFrame(buffer_handle_t *buffer);
+ Mutex mHFRThreadWaitLock;
+
+ class FrameQueue : public RefBase{
+ private:
+ Mutex mQueueLock;
+ Condition mQueueWait;
+ bool mInitialized;
+
+ Vector<struct msm_frame *> mContainer;
+ public:
+ FrameQueue();
+ virtual ~FrameQueue();
+ bool add(struct msm_frame *element);
+ void flush();
+ struct msm_frame* get();
+ void init();
+ void deinit();
+ bool isInitialized();
+ };
+
+ FrameQueue mPreviewBusyQueue;
+
+ bool mFrameThreadRunning;
+ Mutex mFrameThreadWaitLock;
+ Condition mFrameThreadWait;
+ friend void *frame_thread(void *user);
+ void runFrameThread(void *data);
+
+ //720p recording video thread
+ bool mVideoThreadExit;
+ bool mVideoThreadRunning;
+ Mutex mVideoThreadWaitLock;
+ Condition mVideoThreadWait;
+ friend void *video_thread(void *user);
+ void runVideoThread(void *data);
+
+ // smooth zoom
+ int mTargetSmoothZoom;
+ bool mSmoothzoomThreadExit;
+ bool mSmoothzoomThreadRunning;
+ Mutex mSmoothzoomThreadWaitLock;
+ Mutex mSmoothzoomThreadLock;
+ Condition mSmoothzoomThreadWait;
+ friend void *smoothzoom_thread(void *user);
+ void runSmoothzoomThread(void* data);
+
+ // For Histogram
+ int mStatsOn;
+ int mCurrent;
+ bool mSendData;
+ Mutex mStatsWaitLock;
+ Condition mStatsWait;
+
+ //For Face Detection
+ int mFaceDetectOn;
+ bool mSendMetaData;
+ Mutex mMetaDataWaitLock;
+
+ bool mShutterPending;
+ Mutex mShutterLock;
+
+ bool mSnapshotThreadRunning;
+ Mutex mSnapshotThreadWaitLock;
+ Condition mSnapshotThreadWait;
+ friend void *snapshot_thread(void *user);
+ void runSnapshotThread(void *data);
+ Mutex mRawPictureHeapLock;
+ bool mJpegThreadRunning;
+ Mutex mJpegThreadWaitLock;
+ Condition mJpegThreadWait;
+ bool mInSnapshotMode;
+ Mutex mInSnapshotModeWaitLock;
+ Condition mInSnapshotModeWait;
+ bool mEncodePending;
+ Mutex mEncodePendingWaitLock;
+ Condition mEncodePendingWait;
+ bool mBuffersInitialized;
+
+ void debugShowPreviewFPS() const;
+ void debugShowVideoFPS() const;
+
+ int mSnapshotFormat;
+ bool mFirstFrame;
+ void hasAutoFocusSupport();
+ void filterPictureSizes();
+ void filterPreviewSizes();
+ static void storeTargetType();
+ bool supportsSceneDetection();
+ bool supportsSelectableZoneAf();
+ bool supportsFaceDetection();
+
+ void initDefaultParameters();
+ bool initImageEncodeParameters(int size);
+ bool initZslParameter(void);
+ status_t setCameraMode(const QCameraParameters& params);
+ status_t setPreviewSize(const QCameraParameters& params);
+ status_t setJpegThumbnailSize(const QCameraParameters& params);
+ status_t setPreviewFpsRange(const QCameraParameters& params);
+ status_t setPreviewFrameRate(const QCameraParameters& params);
+ status_t setPreviewFrameRateMode(const QCameraParameters& params);
+ status_t setRecordSize(const QCameraParameters& params);
+ status_t setPictureSize(const QCameraParameters& params);
+ status_t setJpegQuality(const QCameraParameters& params);
+ status_t setAntibanding(const QCameraParameters& params);
+ status_t setEffect(const QCameraParameters& params);
+ status_t setRecordingHint(const QCameraParameters& params);
+ status_t setExposureCompensation(const QCameraParameters ¶ms);
+ status_t setAutoExposure(const QCameraParameters& params);
+ status_t setWhiteBalance(const QCameraParameters& params);
+ status_t setFlash(const QCameraParameters& params);
+ status_t setGpsLocation(const QCameraParameters& params);
+ status_t setRotation(const QCameraParameters& params);
+ status_t setZoom(const QCameraParameters& params);
+ status_t setFocusMode(const QCameraParameters& params);
+ status_t setFocusAreas(const QCameraParameters& params);
+ status_t setMeteringAreas(const QCameraParameters& params);
+ status_t setBrightness(const QCameraParameters& params);
+ status_t setSkinToneEnhancement(const QCameraParameters& params);
+ status_t setOrientation(const QCameraParameters& params);
+ status_t setLensshadeValue(const QCameraParameters& params);
+ status_t setMCEValue(const QCameraParameters& params);
+ status_t setHDRImaging(const QCameraParameters& params);
+ status_t setExpBracketing(const QCameraParameters& params);
+ status_t setISOValue(const QCameraParameters& params);
+ status_t setPictureFormat(const QCameraParameters& params);
+ status_t setSharpness(const QCameraParameters& params);
+ status_t setContrast(const QCameraParameters& params);
+ status_t setSaturation(const QCameraParameters& params);
+ status_t setSceneMode(const QCameraParameters& params);
+ status_t setContinuousAf(const QCameraParameters& params);
+ status_t setTouchAfAec(const QCameraParameters& params);
+ status_t setSceneDetect(const QCameraParameters& params);
+ status_t setStrTextures(const QCameraParameters& params);
+ status_t setPreviewFormat(const QCameraParameters& params);
+ status_t setSelectableZoneAf(const QCameraParameters& params);
+ status_t setHighFrameRate(const QCameraParameters& params);
+ bool register_record_buffers(bool register_buffer);
+ status_t setRedeyeReduction(const QCameraParameters& params);
+ status_t setDenoise(const QCameraParameters& params);
+ status_t setZslParam(const QCameraParameters& params);
+ status_t setSnapshotCount(const QCameraParameters& params);
+ void setGpsParameters();
+ bool storePreviewFrameForPostview();
+ bool isValidDimension(int w, int h);
+ status_t updateFocusDistances(const char *focusmode);
+ int mStoreMetaDataInFrame;
+
+ Mutex mLock;
+ Mutex mDisplayLock;
+ Mutex mCamframeTimeoutLock;
+ bool camframe_timeout_flag;
+ bool mReleasedRecordingFrame;
+
+ Mutex mParametersLock;
+
+
+ Mutex mCallbackLock;
+ Mutex mOverlayLock;
+ Mutex mRecordLock;
+ Mutex mRecordFrameLock;
+ Condition mRecordWait;
+ Condition mStateWait;
+
+ /* mJpegSize keeps track of the size of the accumulated JPEG. We clear it
+ when we are about to take a picture, so at any time it contains either
+ zero, or the size of the last JPEG picture taken.
+ */
+ uint32_t mJpegSize;
+ unsigned int mPreviewFrameSize;
+ unsigned int mRecordFrameSize;
+ int mRawSize;
+ int mCbCrOffsetRaw;
+ int mYOffset;
+ int mJpegMaxSize;
+ int32_t mStatSize;
+
+
+ cam_ctrl_dimension_t mDimension;
+ bool mAutoFocusThreadRunning;
+ Mutex mAutoFocusThreadLock;
+
+ Mutex mAfLock;
+
+ pthread_t mFrameThread;
+ pthread_t mVideoThread;
+ pthread_t mPreviewThread;
+ pthread_t mSnapshotThread;
+ pthread_t mDeviceOpenThread;
+ pthread_t mSmoothzoomThread;
+ pthread_t mHFRThread;
+
+ common_crop_t mCrop;
+
+ bool mInitialized;
+
+ int mBrightness;
+ int mSkinToneEnhancement;
+ int mHJR;
+ unsigned int mThumbnailMapped[MAX_SNAPSHOT_BUFFERS];
+ unsigned int mThumbnailLockState[MAX_SNAPSHOT_BUFFERS];
+ int mRawfd[MAX_SNAPSHOT_BUFFERS];
+ int mRawSnapshotfd;
+ int mJpegfd[MAX_SNAPSHOT_BUFFERS];
+ int mRecordfd[9];
+ camera_memory_t *mPreviewMapped[kPreviewBufferCount + MIN_UNDEQUEUD_BUFFER_COUNT];
+ camera_memory_t *mRawMapped[MAX_SNAPSHOT_BUFFERS];
+ camera_memory_t *mJpegMapped[MAX_SNAPSHOT_BUFFERS];
+ camera_memory_t *mRawSnapshotMapped;
+ camera_memory_t *mStatsMapped[3];
+ camera_memory_t *mRecordMapped[9];
+ camera_memory_t *mJpegCopyMapped;
+ camera_memory_t* metadata_memory[9];
+ camera_memory_t *mJpegLiveSnapMapped;
+ int raw_main_ion_fd[MAX_SNAPSHOT_BUFFERS];
+ int raw_snapshot_main_ion_fd;
+ int Jpeg_main_ion_fd[MAX_SNAPSHOT_BUFFERS];
+ int record_main_ion_fd[9];
+ struct ion_allocation_data raw_alloc[MAX_SNAPSHOT_BUFFERS];
+ struct ion_allocation_data raw_snapshot_alloc;
+ struct ion_allocation_data Jpeg_alloc[MAX_SNAPSHOT_BUFFERS];
+ struct ion_allocation_data record_alloc[9];
+ struct ion_fd_data raw_ion_info_fd[MAX_SNAPSHOT_BUFFERS];
+ struct ion_fd_data raw_snapshot_ion_info_fd;
+ struct ion_fd_data Jpeg_ion_info_fd[MAX_SNAPSHOT_BUFFERS];
+ struct ion_fd_data record_ion_info_fd[9];
+
+ struct msm_frame frames[kPreviewBufferCount + MIN_UNDEQUEUD_BUFFER_COUNT];
+ struct buffer_map frame_buffer[kPreviewBufferCount + MIN_UNDEQUEUD_BUFFER_COUNT];
+ struct msm_frame *recordframes;
+ struct msm_frame *rawframes;
+ bool *record_buffers_tracking_flag;
+ bool mInPreviewCallback;
+ preview_stream_ops_t* mPreviewWindow;
+ android_native_buffer_t *mPostViewBuffer;
+ buffer_handle_t *mThumbnailBuffer[MAX_SNAPSHOT_BUFFERS];
+ bool mIs3DModeOn;
+
+ int32_t mMsgEnabled; // camera msg to be handled
+ camera_notify_callback mNotifyCallback;
+ camera_data_callback mDataCallback;
+ camera_data_timestamp_callback mDataCallbackTimestamp;
+ camera_request_memory mGetMemory;
+ void *mCallbackCookie; // same for all callbacks
+ int mDebugFps;
+ int kPreviewBufferCountActual;
+ int previewWidth, previewHeight;
+ int yv12framesize;
+ bool mSnapshotDone;
+ int maxSnapshotWidth;
+ int maxSnapshotHeight;
+ bool mHasAutoFocusSupport;
+ int videoWidth, videoHeight;
+
+ bool mDisEnabled;
+ int mRotation;
+ bool mResetWindowCrop;
+ int mThumbnailWidth, mThumbnailHeight;
+ status_t setVpeParameters();
+ status_t setDIS();
+ bool strTexturesOn;
+ int mPictureWidth;
+ int mPictureHeight;
+ int mPostviewWidth;
+ int mPostviewHeight;
+ int mTotalPreviewBufferCount;
+ int mDenoiseValue;
+ int mZslEnable;
+ int mZslPanorama;
+ bool mZslFlashEnable;
+ cam_3d_frame_format_t mSnapshot3DFormat;
+ bool mSnapshotCancel;
+ bool mHFRMode;
+ Mutex mSnapshotCancelLock;
+ int mActualPictWidth;
+ int mActualPictHeight;
+ bool mUseJpegDownScaling;
+ bool mPreviewStopping;
+ bool mInHFRThread;
+ Mutex mPmemWaitLock;
+ Condition mPmemWait;
+ bool mPrevHeapDeallocRunning;
+ bool mHdrMode;
+ bool mExpBracketMode;
+
+ bool mMultiTouch;
+
+ int mRecordingState;
+
+ int mNumFDRcvd;
+ int mFacesDetected;
+ int mFaceArray[MAX_ROI * 4 + 1];
+
+};
+
+extern "C" int HAL_getNumberOfCameras();
+extern "C" void HAL_getCameraInfo(int cameraId, struct CameraInfo* cameraInfo);
+extern "C" QualcommCameraHardware* HAL_openCameraHardware(int cameraId);
+}; // namespace android
+
+#endif
diff --git a/camera/configure.ac b/camera/configure.ac
new file mode 100644
index 0000000..790d7e0
--- /dev/null
+++ b/camera/configure.ac
@@ -0,0 +1,81 @@
+AC_PREREQ(2.61)
+
+AC_INIT([camera-hal],1.0.0)
+
+AM_INIT_AUTOMAKE([-Werror -Wall gnu foreign])
+
+AM_MAINTAINER_MODE
+
+AC_CONFIG_HEADER([configure.h])
+AC_CONFIG_MACRO_DIR([m4])
+
+# Checks for programs.
+AM_PROG_AS
+AC_PROG_CC
+AC_PROG_CXX
+AM_PROG_CC_C_O
+AC_PROG_LIBTOOL
+AC_PROG_AWK
+AC_PROG_CPP
+AC_PROG_INSTALL
+AC_PROG_LN_S
+AC_PROG_MAKE_SET
+
+# Checks for typedefs, structures, and compiler characteristics.
+AC_TYPE_SIZE_T
+AC_TYPE_SSIZE_T
+AC_TYPE_UINT16_T
+AC_TYPE_UINT32_T
+AC_TYPE_UINT64_T
+AC_TYPE_UINT8_T
+
+AC_CHECK_TOOL(OBJCOPY, objcopy, false)
+
+COMPILE_CAMERA=yes
+AMSS_VERSION=
+VFE_VERS=
+MSM_VERSION=
+DEBUG_CPPFLAGS=
+DEBUG_CFLAGS=
+DEBUG_LDFLAGS=
+
+AC_ARG_ENABLE([target],
+ [AS_HELP_STRING([--enable-target=TARGET],[target to build for])],
+ [],
+ [enable_target=none]
+)
+
+if test "x$enable_target" = "xmsm7627a"; then
+ MSM_VERSION=7x27A
+ BUILD_UNIFIED_CODE=false
+ BUILD_JPEG=false
+elif test "x$enable_target" = "xmsm8960"; then
+ MSM_VERSION=8960
+ BUILD_UNIFIED_CODE=false
+ BUILD_JPEG=true
+else
+ MSM_VERSION=
+ BUILD_UNIFIED_CODE=false
+ BUILD_JPEG=false
+fi
+
+AC_ARG_ENABLE([debug],
+ [AS_HELP_STRING([--enable-debug],[Build with debug flags and options])],
+ [DEBUG=$enableval],
+ [DEBUG=no]
+)
+if test "x$DEBUG" = "xyes"; then
+ DEBUG_CPPFLAGS="${DEBUG_CPPFLAGS} -DLOG_DEBUG -DLOG_TAG=\"CameraService\""
+ DEBUG_CFLAGS="${DEBUG_CFLAGS} -DLOG_DEBUG -DLOG_TAG=\"CameraService\" -g -O0"
+fi
+
+AC_SUBST([MSM_VERSION])
+AM_CONDITIONAL([MSM7X27A], [test "x$MSM_VERSION" = "x7x27A"])
+AM_CONDITIONAL([MSM8960], [test "x$MSM_VERSION" = "x8960"])
+AM_CONDITIONAL([BUILD_UNIFIED_CODE], [test "x$BUILD_UNIFIED_CODE" = "xtrue"])
+AM_CONDITIONAL([BUILD_JPEG], [test "x$BUILD_JPEG" = "xtrue"])
+
+AC_OUTPUT([ \
+ Makefile
+])
+
diff --git a/camera/hdr/include/morpho_api.h b/camera/hdr/include/morpho_api.h
new file mode 100644
index 0000000..565e29f
--- /dev/null
+++ b/camera/hdr/include/morpho_api.h
@@ -0,0 +1,23 @@
+/**
+ * @file morpho_api.h
+ * @brief APIÖè`Ì}N
+ * @version 1.0.0
+ * @date Tue Sep 21 17:37:35 2010
+ *
+ * Copyright (C) 2006-2012 Morpho, Inc.
+ */
+
+#ifndef MORPHO_API_H
+#define MORPHO_API_H
+
+/**
+ * APIÖðè`·éÆ«Égp.
+ * WindowsÅDLLð쬷éÛÉ«·¦é±ÆÅØèÖ¦Â\
+ */
+#if defined(MORPHO_DLL) && defined(_WIN32)
+#define MORPHO_API(type) __declspec(dllexport) extern type
+#else
+#define MORPHO_API(type) extern type
+#endif
+
+#endif /* #ifndef MORPHO_API_H */
diff --git a/camera/hdr/include/morpho_easy_hdr.h b/camera/hdr/include/morpho_easy_hdr.h
new file mode 100644
index 0000000..a941762
--- /dev/null
+++ b/camera/hdr/include/morpho_easy_hdr.h
@@ -0,0 +1,769 @@
+/*******************************************************************
+ * morpho_easy_hdr.h
+ * [CP932/CRLF] { »û®©®»èp }
+ *------------------------------------------------------------------
+ * Copyright (C) 2010-2012 Morpho,Inc.
+ *******************************************************************/
+
+#ifndef MORPHO_EASY_HDR_H
+#define MORPHO_EASY_HDR_H
+
+/*******************************************************************/
+
+#include "morpho_api.h"
+#include "morpho_error.h"
+#include "morpho_image_data.h"
+#include "morpho_rect_int.h"
+
+/*******************************************************************/
+
+#define MORPHO_EASY_HDR_VER "Morpho EasyHDR Ver.2.0.1 2012/07/18"
+
+/*-----------------------------------------------------------------*/
+
+/* (input-limitaion) */
+
+#define MORPHO_EASY_HDR_MIN_IMAGE_WIDTH 100
+#define MORPHO_EASY_HDR_MAX_IMAGE_WIDTH 8192
+#define MORPHO_EASY_HDR_MIN_IMAGE_HEIGHT 100
+#define MORPHO_EASY_HDR_MAX_IMAGE_HEIGHT 8192
+#define MORPHO_EASY_HDR_MIN_NIMAGES 2
+#define MORPHO_EASY_HDR_MAX_NIMAGES 10
+
+/*-----------------------------------------------------------------*/
+
+/* (parameter) */
+
+#define MORPHO_EASY_HDR_DISABLED 0
+#define MORPHO_EASY_HDR_ENABLED 1
+
+#define MORPHO_EASY_HDR_IMAGE_ALIGNMENT_DEFAULT MORPHO_EASY_HDR_ENABLED
+
+#define MORPHO_EASY_HDR_GHOST_REMOVAL_DEFAULT MORPHO_EASY_HDR_ENABLED
+
+#define MORPHO_EASY_HDR_AUTO_SCALING_DEFAULT MORPHO_EASY_HDR_ENABLED
+
+#define MORPHO_EASY_HDR_FACE_DETECTION_DEFAULT MORPHO_EASY_HDR_ENABLED
+
+#define MORPHO_EASY_HDR_FAIL_SOFT_MERGING_DEFAULT MORPHO_EASY_HDR_ENABLED
+
+#define MORPHO_EASY_HDR_GHOST_DETECTION_SENSITIVITY_LEVEL_MIN 0
+#define MORPHO_EASY_HDR_GHOST_DETECTION_SENSITIVITY_LEVEL_MAX 10
+#define MORPHO_EASY_HDR_GHOST_DETECTION_SENSITIVITY_LEVEL_DEFAULT 7
+
+#define MORPHO_EASY_HDR_MERGE_SMOOTHNESS_LEVEL_MIN 0
+#define MORPHO_EASY_HDR_MERGE_SMOOTHNESS_LEVEL_MAX 10
+#define MORPHO_EASY_HDR_MERGE_SMOOTHNESS_LEVEL_DEFAULT 6
+
+#define MORPHO_EASY_HDR_MERGE_PARAM_MIN 0
+#define MORPHO_EASY_HDR_MERGE_PARAM_MAX 255
+#define MORPHO_EASY_HDR_MERGE_PARAM1_DEFAULT 0
+#define MORPHO_EASY_HDR_MERGE_PARAM2_DEFAULT 128
+#define MORPHO_EASY_HDR_MERGE_PARAM3_DEFAULT 0
+#define MORPHO_EASY_HDR_MERGE_PARAM4_DEFAULT 255
+
+#define MORPHO_EASY_HDR_RELIABLE_RECT_RATE_THRESHOLD_MIN 0
+#define MORPHO_EASY_HDR_RELIABLE_RECT_RATE_THRESHOLD_MAX 100
+#define MORPHO_EASY_HDR_RELIABLE_RECT_RATE_THRESHOLD_DEFAULT 80
+
+#define MORPHO_EASY_HDR_GHOST_RATE_THRESHOLD_MIN 0
+#define MORPHO_EASY_HDR_GHOST_RATE_THRESHOLD_MAX 100
+#define MORPHO_EASY_HDR_GHOST_RATE_THRESHOLD_DEFAULT 90
+
+#define MORPHO_EASY_HDR_CC_OFFSET_MIN 0
+#define MORPHO_EASY_HDR_CC_OFFSET_MAX 255
+#define MORPHO_EASY_HDR_CC_Y_OFFSET_DEFAULT 0
+#define MORPHO_EASY_HDR_CC_C_OFFSET_DEFAULT 0
+
+#define MORPHO_EASY_HDR_CC_GAIN_MIN 100
+#define MORPHO_EASY_HDR_CC_GAIN_MAX 2000
+#define MORPHO_EASY_HDR_CC_Y_GAIN_DEFAULT 1000
+#define MORPHO_EASY_HDR_CC_C_GAIN_DEFAULT 1000
+
+#define MORPHO_EASY_HDR_CC_GAMMA_MIN 100
+#define MORPHO_EASY_HDR_CC_GAMMA_MAX 2000
+#define MORPHO_EASY_HDR_CC_Y_GAMMA_DEFAULT 1000
+#define MORPHO_EASY_HDR_CC_C_GAMMA_DEFAULT 1000
+
+/*-----------------------------------------------------------------*/
+
+/* (merge-status) */
+
+#define MORPHO_EASY_HDR_OK 0x00000000
+#define MORPHO_EASY_HDR_ERROR_IMAGE_ALIGNMENT_FAILURE 0x00000001
+#define MORPHO_EASY_HDR_ERROR_EXP_ESTIMATION_FAILURE 0x00000002
+#define MORPHO_EASY_HDR_ERROR_MOSTLY_GHOST 0x00000004
+#define MORPHO_EASY_HDR_ERROR_INTERNAL 0x80000000
+
+/*******************************************************************/
+
+typedef struct _morpho_EasyHDR morpho_EasyHDR;
+typedef struct _morpho_EasyHDR_Callback morpho_EasyHDR_Callback;
+
+/*-----------------------------------------------------------------*/
+
+/** EasyHDR */
+struct _morpho_EasyHDR
+{
+ void *p; /**< à\¢ÌÖÌ|C^ */
+};
+
+/** EasyHDR Callback (for multi-thread processing) */
+struct _morpho_EasyHDR_Callback
+{
+ void *p; /**< R[obNÖÌæêøƵÄn³êél */
+
+ void * (* thread_create )(void *p, int index, void *(*start_routine)(void *arg), void *arg);
+ int (* thread_destroy)(void *p, void *thread);
+ int (* thread_join )(void *p, void *thread, void **value_ptr);
+
+ void * (* mutex_create )(void *p);
+ int (* mutex_destroy)(void *p, void *mutex);
+ int (* mutex_lock )(void *p, void *mutex);
+ int (* mutex_trylock)(void *p, void *mutex);
+ int (* mutex_unlock )(void *p, void *mutex);
+
+ void * (* cond_create )(void *p);
+ int (* cond_destroy )(void *p, void *cond);
+ int (* cond_wait )(void *p, void *cond, void *lock);
+ int (* cond_signal )(void *p, void *cond);
+ int (* cond_broadcast)(void *p, void *cond);
+};
+
+/*******************************************************************/
+
+#ifdef __cplusplus
+extern "C"
+{
+#endif
+
+/*-----------------------------------------------------------------*/
+
+/**
+ * o[W¶ñðæ¾
+ *
+ * @return o[W¶ñ(MORPHO_EASY_HDR_VER)
+ */
+MORPHO_API(char const *)
+morpho_EasyHDR_getVersion(void);
+
+/*-----------------------------------------------------------------*/
+
+/**
+ * KvÈTCYðæ¾
+ *
+ * @param[in] max_width üÍæÌÅå
+ * @param[in] max_height üÍæÌÅå³
+ * @param[in] format ætH[}bg¶ñ
+ *
+ * @return KvÈTCY(byte)
+ */
+MORPHO_API(int)
+morpho_EasyHDR_getBufferSize(
+ int max_width,
+ int max_height,
+ char const *format);
+
+/**
+ * ú»
+ *
+ * gpXbhÉ0ȺÌlðÝèµ½êA
+ * ªÀsðs¤B
+ *
+ * gpXbhÉ1ÈãÌlðÝèµ½êA
+ * êÀsðs¤B
+ *
+ * gpXbhÉ2ÈãÌlðÝèµ½êA
+ * }`XbhÉæéÀñÀs(êÀs)ðs¤B
+ * callback ÉKØÈlðÝè·éKv èB
+ *
+ * yÀsóÔÌJÚz
+ * ?_UNKNOWN ¨ 0_INITIALIZED
+ *
+ * @param[in,out] p EasyHDR CX^X
+ * @param[out] buffer EasyHDRÉèÄéÖÌ|C^
+ * @param[in] buffer_size EasyHDRÉèÄéÌTCY
+ * @param[in] nthreads gpXbh (RA)
+ * @param[in] callback R[obNÖQ
+ *
+ * @return G[R[h (see morpho_error.h)
+ */
+MORPHO_API(int)
+morpho_EasyHDR_initialize(
+ morpho_EasyHDR *p,
+ void *buffer,
+ int buffer_size,
+ int nthreads,
+ morpho_EasyHDR_Callback const *callback);
+
+/**
+ * N[Abv
+ * initialize() ÀsãÉÀsÂ\
+ *
+ * @param[in,out] p EasyHDR CX^X
+ *
+ * @return G[R[h (see morpho_error.h)
+ */
+MORPHO_API(int)
+morpho_EasyHDR_finalize(
+ morpho_EasyHDR *p);
+
+/*-----------------------------------------------------------------*/
+
+/**
+ * ¬ÌJnEÀs
+ * setImageFormat() ÀsãÉÀsÂ\
+ *
+ * yÀsóÔÌJÚ (êÀs)z
+ * 0_INITIALIZED ¨ (1_PROCESSING) ¨ 0_INITIALIZED (®¹)
+ * ¨ 2_SUSPENDED (suspend()ÄÑoµ)
+ *
+ * yÀsóÔÌJÚ (ªÀs)z
+ * 0_INITIALIZED ¨ 3_PAUSED ()
+ * ¨ 0_INITIALIZED (®¹)
+ *
+ * @param[in,out] p EasyHDR CX^X
+ * @param[out] output_image Êæ (u1ÚvÌüÍæðwèÂ\)
+ * @param[in,out] input_images üÍæQ (GWÉæÁÄ«·¦çêé)
+ * @param[in] nimages üÍæÌ
+ *
+ * @return G[R[h (see morpho_error.h)
+ */
+MORPHO_API(int)
+morpho_EasyHDR_merge(
+ morpho_EasyHDR *p,
+ morpho_ImageData *output_image,
+ morpho_ImageData *input_images[],
+ int nimages);
+
+/*-----------------------------------------------------------------*/
+
+/**
+ * ¬Ìp±Às
+ *
+ * merge() ÀsãÉÀsÂ\
+ *
+ * ªÀs(initialize() Å nthreads É 0 ðw赽ƫ)ÌÝLø
+ *
+ * yÀsóÔÌJÚ (ªÀs)z
+ * 3_PAUSED ¨ 3_PAUSED ()
+ * ¨ 0_INITIALIZED (®¹)
+ *
+ * @param[in,out] p EasyHDR CX^X
+ *
+ * @return G[R[h (see morpho_error.h)
+ */
+MORPHO_API(int)
+morpho_EasyHDR_process(
+ morpho_EasyHDR *p);
+
+/*-----------------------------------------------------------------*/
+
+/**
+ * ¬Ìf (ÊReLXg©çÌÄÑoµÉæé)
+ * merge() ÀsÉÀsÂ\
+ *
+ * yÀsóÔÌJÚ (êÀs)z
+ * 1_PROCESSING ¨ 2_SUSPENDED
+ *
+ * @param[in,out] p EasyHDR CX^X
+ *
+ * @return G[R[h (see morpho_error.h)
+ */
+MORPHO_API(int)
+morpho_EasyHDR_suspend(
+ morpho_EasyHDR *p);
+
+/**
+ * ¬ÌÄJ
+ * suspend() ÀsãÉÀsÂ\
+ *
+ * yÀsóÔÌJÚ (êÀs)z
+ * 2_SUSPENDED ¨ (1_PROCESSING) ¨ 0_INITIALIZED (®¹)
+ * ¨ 2_SUSPENDED (suspend()ÄÑoµ)
+ *
+ * @param[in,out] p EasyHDR CX^X
+ *
+ * @return G[R[h (see morpho_error.h)
+ */
+MORPHO_API(int)
+morpho_EasyHDR_resume(
+ morpho_EasyHDR *p);
+
+/*-----------------------------------------------------------------*/
+
+/**
+ * ætH[}bgÌÝè
+ * initialize() ÀsãÉÀsÂ\
+ *
+ * @param[in,out] p EasyHDR CX^X
+ * @param[in] format ætH[}bgð çí·¶ñ
+ *
+ * @return G[R[h (see morpho_error.h)
+ */
+MORPHO_API(int)
+morpho_EasyHDR_setImageFormat(
+ morpho_EasyHDR *p,
+ char const *format);
+
+/**
+ * ætH[}bgÌæ¾
+ * setImageFormat() ÀsãÉÀsÂ\
+ *
+ * @param[in,out] p EasyHDR CX^X
+ * @param[out] buffer ætH[}bgð çí·¶ñªi[³êéobt@
+ * @param[in] buffer_size obt@ÌTCY(I[¶ÜÞ)
+ *
+ * @return G[R[h (see morpho_error.h)
+ */
+MORPHO_API(int)
+morpho_EasyHDR_getImageFormat(
+ morpho_EasyHDR *p,
+ char *buffer,
+ int buffer_size);
+
+/*-----------------------------------------------------------------*/
+
+/**
+ * Êuí¹(èÔêâ³)ÌL³ÌÝè
+ * initialize() ÀsãÉÀsÂ\
+ *
+ * value:
+ * MOR_EASY_HDR_ENABLED : Êuí¹ è
+ * MOR_EASY_HDR_DISABLED : Êuí¹Èµ
+ *
+ * @param[in,out] p EasyHDR CX^X
+ * @param[in] value Ýèl
+ *
+ * @return G[R[h (see morpho_error.h)
+ */
+MORPHO_API(int)
+morpho_EasyHDR_setImageAlignmentStatus(
+ morpho_EasyHDR *p,
+ int value);
+
+/**
+ * Êuí¹(èÔêâ³)ÌL³Ìæ¾
+ * initialize() ÀsãÉÀsÂ\
+ *
+ * @param[in,out] p EasyHDR CX^X
+ * @param[out] value Ýèli[æ
+ *
+ * @return G[R[h (see morpho_error.h)
+ */
+MORPHO_API(int)
+morpho_EasyHDR_getImageAlignmentStatus(
+ morpho_EasyHDR *p,
+ int *value);
+
+/*-----------------------------------------------------------------*/
+
+/**
+ * S[Xg(íÊÌÔêâ³)ÌL³ÌÝè
+ * initialize() ÀsãÉÀsÂ\
+ *
+ * value:
+ * MOR_EASY_HDR_ENABLED : S[Xg è
+ * MOR_EASY_HDR_DISABLED : S[Xgȵ
+ *
+ * @param[in,out] p EasyHDR CX^X
+ * @param[in] value Ýèl
+ *
+ * @return G[R[h (see morpho_error.h)
+ */
+MORPHO_API(int)
+morpho_EasyHDR_setGhostRemovalStatus(
+ morpho_EasyHDR *p,
+ int value);
+
+/**
+ * S[Xg(íÊÌÔêâ³)ÌL³Ìæ¾
+ * initialize() ÀsãÉÀsÂ\
+ *
+ * @param[in,out] p EasyHDR CX^X
+ * @param[out] value Ýèli[æ
+ *
+ * @return G[R[h (see morpho_error.h)
+ */
+MORPHO_API(int)
+morpho_EasyHDR_getGhostRemovalStatus(
+ morpho_EasyHDR *p,
+ int *value);
+
+/*-----------------------------------------------------------------*/
+
+/**
+ * ©®gå(NbsO)ÌL³ÌÝè
+ * initialize() ÀsãÉÀsÂ\
+ *
+ * value:
+ * MOR_EASY_HDR_ENABLED : ©®gå è
+ * MOR_EASY_HDR_DISABLED : ©®gåȵ
+ *
+ * @param[in,out] p EasyHDR CX^X
+ * @param[in] value Ýèl
+ *
+ * @return G[R[h (see morpho_error.h)
+ */
+MORPHO_API(int)
+morpho_EasyHDR_setAutoScalingStatus(
+ morpho_EasyHDR *p,
+ int value);
+
+/**
+ * ©®gå(NbsO)ÌL³Ìæ¾
+ * initialize() ÀsãÉÀsÂ\
+ *
+ * @param[in,out] p EasyHDR CX^X
+ * @param[out] value Ýèli[æ
+ *
+ * @return G[R[h (see morpho_error.h)
+ */
+MORPHO_API(int)
+morpho_EasyHDR_getAutoScalingStatus(
+ morpho_EasyHDR *p,
+ int *value);
+
+/*-----------------------------------------------------------------*/
+
+/**
+ * çoâ³ÌL³ÌÝè
+ * initialize() ÀsãÉÀsÂ\
+ *
+ * value:
+ * MOR_EASY_HDR_ENABLED : çoâ³ è
+ * MOR_EASY_HDR_DISABLED : çoâ³Èµ
+ *
+ * @param[in,out] p EasyHDR CX^X
+ * @param[in] value Ýèl
+ *
+ * @return G[R[h (see morpho_error.h)
+ */
+MORPHO_API(int)
+morpho_EasyHDR_setFaceDetectionStatus(
+ morpho_EasyHDR *p,
+ int value);
+
+/**
+ * çoâ³ÌL³Ìæ¾
+ * initialize() ÀsãÉÀsÂ\
+ *
+ * @param[in,out] p EasyHDR CX^X
+ * @param[out] value Ýèli[æ
+ *
+ * @return G[R[h (see morpho_error.h)
+ */
+MORPHO_API(int)
+morpho_EasyHDR_getFaceDetectionStatus(
+ morpho_EasyHDR *p,
+ int *value);
+
+/*-----------------------------------------------------------------*/
+
+/**
+ * Fail-soft-merging ÌL³ÌÝè
+ * initialize() ÀsãÉÀsÂ\
+ *
+ * value:
+ * MOR_EASY_HDR_ENABLED : Fail-soft-merging è
+ * MOR_EASY_HDR_DISABLED : Fail-soft-merging ȵ
+ *
+ * @param[in,out] p EasyHDR CX^X
+ * @param[in] value Ýèl
+ *
+ * @return G[R[h (see morpho_error.h)
+ */
+MORPHO_API(int)
+morpho_EasyHDR_setFailSoftMergingStatus(
+ morpho_EasyHDR *p,
+ int value);
+
+/**
+ * Fail-soft-merging ÌL³Ìæ¾
+ * initialize() ÀsãÉÀsÂ\
+ *
+ * @param[in,out] p EasyHDR CX^X
+ * @param[out] value Ýèli[æ
+ *
+ * @return G[R[h (see morpho_error.h)
+ */
+MORPHO_API(int)
+morpho_EasyHDR_getFailSoftMergingStatus(
+ morpho_EasyHDR *p,
+ int *value);
+
+/*-----------------------------------------------------------------*/
+
+/**
+ * S[Xg»è´xxÌÝè
+ * initialize() ÀsãÉÀsÂ\
+ *
+ * @param[in,out] p EasyHDR CX^X
+ * @param[in] value Ýèl
+ *
+ * @return G[R[h (see morpho_error.h)
+ */
+MORPHO_API(int)
+morpho_EasyHDR_setGhostDetectionSensitivityLevel(
+ morpho_EasyHDR *p,
+ int value);
+
+/**
+ * S[Xg»è´xxÌæ¾
+ * initialize() ÀsãÉÀsÂ\
+ *
+ * @param[in,out] p EasyHDR CX^X
+ * @param[out] value Ýèli[æ
+ *
+ * @return G[R[h (see morpho_error.h)
+ */
+MORPHO_API(int)
+morpho_EasyHDR_getGhostDetectionSensitivityLevel(
+ morpho_EasyHDR *p,
+ int *value);
+
+/*-----------------------------------------------------------------*/
+
+/**
+ * ¬Èß穳ÌÝè
+ * initialize() ÀsãÉÀsÂ\
+ *
+ * @param[in,out] p EasyHDR CX^X
+ * @param[in] value Ýèl
+ *
+ * @return G[R[h (see morpho_error.h)
+ */
+MORPHO_API(int)
+morpho_EasyHDR_setMergeSmoothnessLevel(
+ morpho_EasyHDR *p,
+ int value);
+
+/**
+ * ¬Èß穳Ìæ¾
+ * initialize() ÀsãÉÀsÂ\
+ *
+ * @param[in,out] p EasyHDR CX^X
+ * @param[out] value Ýèli[æ
+ *
+ * @return G[R[h (see morpho_error.h)
+ */
+MORPHO_API(int)
+morpho_EasyHDR_getMergeSmoothnessLevel(
+ morpho_EasyHDR *p,
+ int *value);
+
+/*-----------------------------------------------------------------*/
+
+/**
+ * ¬p[^ÌÝè
+ * initialize() ÀsãÉÀsÂ\
+ *
+ * @param[in,out] p EasyHDR CX^X
+ * @param[in] value1 Ýèl
+ * @param[in] value2 Ýèl
+ * @param[in] value3 Ýèl
+ * @param[in] value4 Ýèl
+ *
+ * @return G[R[h (see morpho_error.h)
+ */
+MORPHO_API(int)
+morpho_EasyHDR_setMergeParameters(
+ morpho_EasyHDR *p,
+ int value1,
+ int value2,
+ int value3,
+ int value4);
+
+/**
+ * ¬p[^Ìæ¾
+ * initialize() ÀsãÉÀsÂ\
+ *
+ * @param[in,out] p EasyHDR CX^X
+ * @param[out] value1 Ýèli[æ
+ * @param[out] value2 Ýèli[æ
+ * @param[out] value3 Ýèli[æ
+ * @param[out] value4 Ýèli[æ
+ *
+ * @return G[R[h (see morpho_error.h)
+ */
+MORPHO_API(int)
+morpho_EasyHDR_getMergeParameters(
+ morpho_EasyHDR *p,
+ int *value1,
+ int *value2,
+ int *value3,
+ int *value4);
+
+/*-----------------------------------------------------------------*/
+
+/**
+ * LøÌæèlÌÝè
+ * initialize() ÀsãÉÀsÂ\
+ *
+ * @param[in,out] p EasyHDR CX^X
+ * @param[in] rate Ýèl ( rate % é`)
+ *
+ * @return G[R[h (see morpho_error.h)
+ */
+MORPHO_API(int)
+morpho_EasyHDR_setReliableRectRateThreshold(
+ morpho_EasyHDR *p,
+ int rate);
+
+/**
+ * LøÌæèlÌæ¾
+ * initialize() ÀsãÉÀsÂ\
+ *
+ * @param[in,out] p EasyHDR CX^X
+ * @param[out] rate Ýèli[æ
+ *
+ * @return G[R[h (see morpho_error.h)
+ */
+MORPHO_API(int)
+morpho_EasyHDR_getReliableRectRateThreshold(
+ morpho_EasyHDR *p,
+ int *rate);
+
+/**
+ * LøÌæÌæ¾
+ * initialize() ÀsãÉÀsÂ\
+ * (LøÈlªZbg³êéÌÍ merge() ã)
+ *
+ * @param[in,out] p EasyHDR CX^X
+ * @param[out] rect Ýèli[æ
+ *
+ * @return G[R[h (see morpho_error.h)
+ */
+MORPHO_API(int)
+morpho_EasyHDR_getReliableRect(
+ morpho_EasyHDR *p,
+ morpho_RectInt *rect);
+
+/*-----------------------------------------------------------------*/
+
+/**
+ * S[XgèlÌÝè
+ * initialize() ÀsãÉÀsÂ\
+ *
+ * @param[in,out] p EasyHDR CX^X
+ * @param[in] rate Ýèl
+ *
+ * @return G[R[h (see morpho_error.h)
+ */
+MORPHO_API(int)
+morpho_EasyHDR_setGhostRateThreshold(
+ morpho_EasyHDR *p,
+ int rate);
+
+/**
+ * S[XgèlÌæ¾
+ * initialize() ÀsãÉÀsÂ\
+ *
+ * @param[in,out] p EasyHDR CX^X
+ * @param[out] rate Ýèli[æ
+ *
+ * @return G[R[h (see morpho_error.h)
+ */
+MORPHO_API(int)
+morpho_EasyHDR_getGhostRateThreshold(
+ morpho_EasyHDR *p,
+ int *rate);
+
+/*-----------------------------------------------------------------*/
+
+/**
+ * Fâ³p[^ÌÝè
+ * initialize() ÀsãÉÀsÂ\
+ *
+ * @param[in,out] p EasyHDR CX^X
+ * @param[in] y_offset Ýèl
+ * @param[in] y_gain Ýèl
+ * @param[in] y_gamma Ýèl
+ * @param[in] c_offset Ýèl
+ * @param[in] c_gain Ýèl
+ * @param[in] c_gamma Ýèl
+ *
+ * @return G[R[h (see morpho_error.h)
+ */
+MORPHO_API(int)
+morpho_EasyHDR_setColorCorrectionParameters(
+ morpho_EasyHDR *p,
+ int y_offset,
+ int y_gain,
+ int y_gamma,
+ int c_offset,
+ int c_gain,
+ int c_gamma);
+
+/**
+ * Fâ³p[^Ìæ¾
+ * initialize() ÀsãÉÀsÂ\
+ *
+ * @param[in,out] p EasyHDR CX^X
+ * @param[out] y_offset Ýèli[æ
+ * @param[out] y_gain Ýèli[æ
+ * @param[out] y_gamma Ýèli[æ
+ * @param[out] c_offset Ýèli[æ
+ * @param[out] c_gain Ýèli[æ
+ * @param[out] c_gamma Ýèli[æ
+ *
+ * @return G[R[h (see morpho_error.h)
+ */
+MORPHO_API(int)
+morpho_EasyHDR_getColorCorrectionParameters(
+ morpho_EasyHDR *p,
+ int *y_offset,
+ int *y_gain,
+ int *y_gamma,
+ int *c_offset,
+ int *c_gain,
+ int *c_gamma);
+
+/*-----------------------------------------------------------------*/
+
+/**
+ * ¬Xe[^XÌæ¾
+ * initialize() ÀsãÉÀsÂ\
+ *
+ * Xe[^XR[h
+ * MORPHO_EASY_HDR_OK
+ * MORPHO_EASY_HDR_ERROR_*
+ *
+ * @param[in,out] p EasyHDR CX^X
+ *
+ * @return Xe[^XR[h (MORPHO_EASMORPHO_EASY_HDR_ERROR_
+ */
+MORPHO_API(int)
+morpho_EasyHDR_getMergeStatus(
+ morpho_EasyHDR *p);
+
+/*-----------------------------------------------------------------*/
+
+/**
+ * TlCÌì¬ (oÍæÌk¬)
+ * morpho_EasyHDR_setImageFormat() ÀsãÉÀsÂ\
+ *
+ * @param[in,out] p EasyHDR CX^X
+ * @param[out] thumbnail_image oÍæ
+ * @param[in] output_image üÍæ
+ *
+ * @return G[R[h (see morpho_error.h)
+ */
+MORPHO_API(int)
+morpho_EasyHDR_makeThumbnail(
+ morpho_EasyHDR *p,
+ morpho_ImageData *thumbnail_image,
+ morpho_ImageData const *output_image);
+
+/*-----------------------------------------------------------------*/
+
+#ifdef __cplusplus
+} /* extern "C" */
+#endif
+
+/*******************************************************************/
+
+#endif /* !MORPHO_EASY_HDR_H */
+
+/*******************************************************************/
+/* [EOF] */
diff --git a/camera/hdr/include/morpho_easy_hdr_ext.h b/camera/hdr/include/morpho_easy_hdr_ext.h
new file mode 100644
index 0000000..2a4bd7c
--- /dev/null
+++ b/camera/hdr/include/morpho_easy_hdr_ext.h
@@ -0,0 +1,19 @@
+#ifndef MORPHO_EASY_HDR_EXT_H
+#define MORPHO_EASY_HDR_EXT_H
+
+#include "morpho_easy_hdr.h"
+/*
+return == 0 : OK
+return != 0 : NG (Please print the return value to check Error types)
+*/
+MORPHO_API(int)
+LINK_mm_camera_HDR(
+ unsigned char* yuvInput01,
+ unsigned char* yuvInput02,
+ unsigned char* yuvInput03,
+ unsigned char* pHDROutImage,
+ int width,
+ int height,
+ int indoor);
+
+#endif //MORPHO_EASY_HDR_EXT_H
diff --git a/camera/hdr/include/morpho_error.h b/camera/hdr/include/morpho_error.h
new file mode 100644
index 0000000..148216f
--- /dev/null
+++ b/camera/hdr/include/morpho_error.h
@@ -0,0 +1,29 @@
+/**
+ * @file morpho_error.h
+ * @brief G[R[hÌè`
+ * @version 1.0.0
+ * @date 2008-06-09
+ *
+ * Copyright (C) 2006-2012 Morpho, Inc.
+ */
+
+#ifndef MORPHO_ERROR_H
+#define MORPHO_ERROR_H
+
+/** G[R[h .*/
+#define MORPHO_OK (0x00000000) /**< ¬÷ */
+#define MORPHO_DOPROCESS (0x00000001) /**< */
+#define MORPHO_CANCELED (0x00000002) /**< LZ³ê½ */
+#define MORPHO_SUSPENDED (0x00000008) /**< f³ê½ */
+
+#define MORPHO_ERROR_GENERAL_ERROR (0x80000000) /**< êÊIÈG[. */
+#define MORPHO_ERROR_PARAM (0x80000001) /**< øªs³. */
+#define MORPHO_ERROR_STATE (0x80000002) /**< àóÔâÖÄoªs³. */
+#define MORPHO_ERROR_MALLOC (0x80000004) /**< AP[VG[. */
+#define MORPHO_ERROR_IO (0x80000008) /**< üoÍG[. */
+#define MORPHO_ERROR_UNSUPPORTED (0x80000010) /**< @\ðT|[gµÄ¢È¢. */
+#define MORPHO_ERROR_NOTFOUND (0x80000020) /**< õÎÛª©Â©çÈ¢ */
+#define MORPHO_ERROR_INTERNAL (0x80000040) /**< àG[. */
+#define MORPHO_ERROR_UNKNOWN (0xC0000000) /**< ãLÈOÌG[. */
+
+#endif /* #ifndef MORPHO_ERROR_H */
diff --git a/camera/hdr/include/morpho_get_image_size.h b/camera/hdr/include/morpho_get_image_size.h
new file mode 100644
index 0000000..b0f538f
--- /dev/null
+++ b/camera/hdr/include/morpho_get_image_size.h
@@ -0,0 +1,89 @@
+/**
+ * @file morpho_get_image_size.h
+ * @brief æÉKvÈTCYðæ¾·éÖ
+ * @version 1.0.0
+ * @date 2008-07-01
+ *
+ * Copyright (C) 2006-2012 Morpho, Inc.
+ */
+
+#ifndef MORPHO_GET_IMAGE_SIZE_H
+#define MORPHO_GET_IMAGE_SIZE_H
+
+#include "morpho_api.h"
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+/**
+ * ƳÆtH[}bg¼©çAæði[·éÌÉKvÈTCYð¾é.
+ *
+ * @param width
+ * @param height ³
+ * @param p_format ætH[}bg¶ñ
+ * @return æÉKvÈTCY
+ */
+#define morpho_getImageSize mor_noise_reduction_IF_getImageSize
+
+MORPHO_API(int)
+morpho_getImageSize(int width, int height, const char *p_format);
+
+/**
+ * Yæf[^TCYðæ¾.
+ *
+ * @param width
+ * @param height ³
+ * @param p_format ætH[}bg¶ñ
+ * @return Yæf[^TCY
+ */
+#define morpho_getImageSizeY mor_noise_reduction_IF_getImageSizeY
+
+MORPHO_API(int)
+morpho_getImageSizeY(int width, int height, const char *p_format);
+
+/**
+ * Uæf[^TCYðæ¾.
+ *
+ * @param width
+ * @param height ³
+ * @param p_format ætH[}bg¶ñ
+ * @return Uæf[^TCY
+ */
+#define morpho_getImageSizeU mor_noise_reduction_IF_getImageSizeU
+
+MORPHO_API(int)
+morpho_getImageSizeU(int width, int height, const char *p_format);
+
+/**
+ * Væf[^TCYðæ¾.
+ *
+ * @param width
+ * @param height ³
+ * @param p_format ætH[}bg¶ñ
+ * @return Væf[^TCY
+ */
+#define morpho_getImageSizeV mor_noise_reduction_IF_getImageSizeV
+
+MORPHO_API(int)
+morpho_getImageSizeV(int width, int height, const char *p_format);
+
+/**
+ * UVæf[^TCYðæ¾.
+ *
+ * @param width
+ * @param height ³
+ * @param p_format ætH[}bg¶ñ
+ * @return UVæf[^TCY
+ */
+#define morpho_getImageSizeUV mor_noise_reduction_IF_getImageSizeUV
+
+MORPHO_API(int)
+morpho_getImageSizeUV(int width, int height, const char *p_format);
+
+
+#ifdef __cplusplus
+} /* extern "C" { */
+#endif
+
+#endif /* MORPHO_GET_IMAGE_SIZE_H */
diff --git a/camera/hdr/include/morpho_hdr_checker.h b/camera/hdr/include/morpho_hdr_checker.h
new file mode 100644
index 0000000..662cfc1
--- /dev/null
+++ b/camera/hdr/include/morpho_hdr_checker.h
@@ -0,0 +1,155 @@
+/*******************************************************************
+ * morpho_hdr_checker.h
+ * [CP932/CRLF] { »û®©®»èp }
+ *------------------------------------------------------------------
+ * Copyright (C) 2011-2012 Morpho,Inc.
+ *******************************************************************/
+
+#ifndef MORPHO_HDR_CHECKER_H
+#define MORPHO_HDR_CHECKER_H
+
+/*******************************************************************/
+
+#include "morpho_api.h"
+#include "morpho_error.h"
+#include "morpho_image_data.h"
+
+/*******************************************************************/
+
+#define MORPHO_HDR_CHECKER_VER "Morpho DR Checker Ver.1.1.0 2012/1/17"
+
+/*-----------------------------------------------------------------*/
+
+#define MORPHO_HDR_CHECKER_MIN_IMAGE_WIDTH 2
+#define MORPHO_HDR_CHECKER_MAX_IMAGE_WIDTH 8192
+#define MORPHO_HDR_CHECKER_MIN_IMAGE_HEIGHT 2
+#define MORPHO_HDR_CHECKER_MAX_IMAGE_HEIGHT 8192
+
+/*******************************************************************/
+
+typedef struct _morpho_HDRChecker morpho_HDRChecker;
+
+/* HDRwW]¿í */
+struct _morpho_HDRChecker
+{
+ void *p; /**< à\¢ÌÖÌ|C^ */
+};
+
+/* òÑEÂÔê»èÌq´x */
+typedef enum {
+ MORPHO_HDR_CHECKER_SENSITIVITY_SENSITIVE,
+ MORPHO_HDR_CHECKER_SENSITIVITY_NORMAL,
+ MORPHO_HDR_CHECKER_SENSITIVITY_INSENSITIVE,
+} MORPHO_HDR_CHECKER_SENSITIVITY;
+
+/*******************************************************************/
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+/**
+ * o[W¶ñðæ¾
+ *
+ * @return o[W¶ñ(MORPHO_EASY_HDR_VER)
+ */
+MORPHO_API(const char*)
+morpho_HDRChecker_getVersion(void);
+
+/**
+ * KvÈTCYðæ¾
+ *
+ * @param[in] width üÍæÌ
+ * @param[in] height üÍæ̳
+ * @param[in] format üÍæÌtH[}bg
+ * @return KvÈTCY(byte)
+ */
+MORPHO_API(int)
+morpho_HDRChecker_getBufferSize(
+ int width,
+ int height,
+ const char *format);
+
+/**
+ * ú»
+ *
+ * @param[in,out] p HDRCheckerCX^X
+ * @param[in] buffer HDRCheckerÉèÄéÖÌ|C^
+ * @param[in] buffer_size HDRCheckerÉèÄéÌTCY
+ * @param[in] width üÍæÌ
+ * @param[in] height üÍæ̳
+ * @param[in] format üÍæÌtH[}bg
+ * @return G[R[h (see morpho_error.h)
+ */
+MORPHO_API(int)
+morpho_HDRChecker_initialize(
+ morpho_HDRChecker * const p,
+ void * const buffer,
+ const int buffer_size,
+ const int width,
+ const int height,
+ const char *format);
+
+/**
+ * N[Abv
+ * initialize()ÀsãÉÀsÂ\
+ *
+ * @param[in,out] p HDRCheckerCX^X
+ * @return G[R[h (see morpho_error.h)
+ */
+MORPHO_API(int)
+morpho_HDRChecker_finalize(
+ morpho_HDRChecker *p);
+
+/*-----------------------------------------------------------------*/
+
+/**
+ * HDRwWvZÌq´«ÌÝè
+ * initialize()ÀsãÉÀsÂ\
+ *
+ * @param[in,out] p HDRCheckerCX^X
+ * @param[in] sensitivity q´«(MORPHO_HDR_CHECKER_SENSITIVIYñÌÅwè)
+ * @return G[R[h (see morpho_error.h)
+ */
+MORPHO_API(int)
+morpho_HDRChecker_setSensitivity(
+ morpho_HDRChecker * const p,
+ MORPHO_HDR_CHECKER_SENSITIVITY sensitivity);
+
+/**
+ * HDRwWvZÌq´«Ìæ¾
+ * initialize()ÀsãÉÀsÂ\
+ *
+ * @param[in,out] p HDRCheckerCX^X
+ * @param[out] sensitivity q´«ÖÌ|C^
+ * @return G[R[h (see morpho_error.h)
+ */
+MORPHO_API(int)
+morpho_HDRChecker_getSensitivity(
+ morpho_HDRChecker * const p,
+ MORPHO_HDR_CHECKER_SENSITIVITY *sensitivity);
+
+/**
+ * HDRwWÌ]¿
+ * initialize()ÀsãÉÀsÂ\
+ *
+ * @param[in,out] p HDRCheckerCX^X
+ * @param[out] result ]¿Êði[·ézñ(vf4Ìzñ)
+ * vfªñ[ÌêÉηéºLÌIoÌæªKvÆ»è
+ * {+2, +1, -1, -2}ÌÉ»èʪi[³êé
+ * @param[in] input_image üÍæ
+ * @return G[R[h (see morpho_error.h)
+ */
+MORPHO_API(int)
+morpho_HDRChecker_evaluate(
+ morpho_HDRChecker * const p,
+ int * const result,
+ const morpho_ImageData * const input_image);
+
+/*-----------------------------------------------------------------*/
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif /* MORPHO_HDR_CHECKER_H */
diff --git a/camera/hdr/include/morpho_image_data.h b/camera/hdr/include/morpho_image_data.h
new file mode 100644
index 0000000..2fc05aa
--- /dev/null
+++ b/camera/hdr/include/morpho_image_data.h
@@ -0,0 +1,43 @@
+/**
+ * @file morpho_image_data.h
+ * @brief æf[^Ì\¢Ìè`
+ * @version 1.0.0
+ * @date 2008-06-09
+ *
+ * Copyright (C) 2006-2012 Morpho, Inc.
+ */
+
+#ifndef MORPHO_IMAGE_DATA_H
+#define MORPHO_IMAGE_DATA_H
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+typedef struct{
+ void * y; /**< YæÌæª|C^ */
+ void * u; /**< UæÌæª|C^ */
+ void * v; /**< VæÌæª|C^ */
+} morpho_ImageYuvPlanar;
+
+typedef struct{
+ void * y; /**< YæÌæª|C^ */
+ void * uv; /**< UVæÌæª|C^ */
+} morpho_ImageYuvSemiPlanar;
+
+/** æf[^. */
+typedef struct {
+ int width; /**< */
+ int height; /**< ³ */
+ union{
+ void * p; /**< æf[^Ìæª|C^ */
+ morpho_ImageYuvPlanar planar;
+ morpho_ImageYuvSemiPlanar semi_planar;
+ } dat;
+} morpho_ImageData;
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif /* #ifndef MORPHO_IMAGE_DATA_H */
diff --git a/camera/hdr/include/morpho_image_data_ex.h b/camera/hdr/include/morpho_image_data_ex.h
new file mode 100644
index 0000000..ef33f35
--- /dev/null
+++ b/camera/hdr/include/morpho_image_data_ex.h
@@ -0,0 +1,51 @@
+/**
+ * @file morpho_image_data_ex.h
+ * @brief 画像データの構造体定義
+ * @version 1.0.0
+ * @date 2010-03-30
+ *
+ * Copyright (C) 2010-2011 Morpho, Inc.
+ */
+
+#ifndef MORPHO_IMAGE_DATA_EX_H
+#define MORPHO_IMAGE_DATA_EX_H
+
+#include "morpho_image_data.h"
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+typedef struct{
+ int y;
+ int u;
+ int v;
+} morpho_ImageYuvPlanarPitch;
+
+typedef struct{
+ int y;
+ int uv;
+} morpho_ImageYuvSemiPlanarPitch;
+
+/** 画像データ. */
+typedef struct {
+ int width; /**< 幅 */
+ int height; /**< 高さ */
+ union{
+ void *p; /**< 画像データの先頭ポインタ */
+ morpho_ImageYuvPlanar planar;
+ morpho_ImageYuvSemiPlanar semi_planar;
+ } dat;
+ union{
+ int p; /**< ラインの先頭から次のライン先頭までのバイト数 */
+ morpho_ImageYuvPlanarPitch planar;
+ morpho_ImageYuvSemiPlanarPitch semi_planar;
+ } pitch;
+} morpho_ImageDataEx;
+
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif /* #ifndef MORPHO_IMAGE_DATA_EX_H */
diff --git a/camera/hdr/include/morpho_motion_data.h b/camera/hdr/include/morpho_motion_data.h
new file mode 100644
index 0000000..4db78db
--- /dev/null
+++ b/camera/hdr/include/morpho_motion_data.h
@@ -0,0 +1,27 @@
+/**
+ * @file morpho_motion_data.h
+ * @brief ®«f[^Ì\¢Ìè`
+ * @version 1.0.0
+ * @date 2008-06-09
+ *
+ * Copyright (C) 2006-2012 Morpho, Inc.
+ */
+
+#ifndef MORPHO_MOTION_DATA_H
+#define MORPHO_MOTION_DATA_H
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+/** ®«f[^. */
+typedef struct {
+ int v[6]; /**< ®«f[^ */
+ int fcode; /**< ¬÷:0 / ¸s:0ÈOi¸sµ½´öj */
+} morpho_MotionData;
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif /* #ifndef MORPHO_MOTION_DATA_H */
diff --git a/camera/hdr/include/morpho_noise_reduction.h b/camera/hdr/include/morpho_noise_reduction.h
new file mode 100644
index 0000000..886c2c1
--- /dev/null
+++ b/camera/hdr/include/morpho_noise_reduction.h
@@ -0,0 +1,204 @@
+//====================================================================
+// morpho_noise_reduction.h
+// [SJIS/CRLF] { »û®©®»èp }
+//
+// Copyright(c) 2006-2012 Morpho,Inc.
+//====================================================================
+
+#ifndef MORPHO_NOISE_REDUCTION_H
+# define MORPHO_NOISE_REDUCTION_H
+
+//--------------------------------------------------------------------
+
+# include "morpho_api.h"
+# include "morpho_error.h"
+# include "morpho_image_data.h"
+# include "morpho_motion_data.h"
+# include "morpho_rect_int.h"
+
+//--------------------------------------------------------------------
+
+# ifdef __cplusplus
+extern "C" {
+# endif
+
+//====================================================================
+
+/** o[W¶ñ */
+# define MORPHO_NOISE_REDUCTION_VERSION "Morpho Noise Reduction Ver.0.9.0 2012/08/09"
+
+//--------------------------------------------------------------------
+/** mCYí */
+typedef struct
+{
+ void *p; /**< à\¢ÌÖÌ|C^ */
+} morpho_NoiseReduction;
+
+//--------------------------------------------------------------------
+
+/**
+ * o[W¶ñðæ¾
+ *
+ * @return o[W¶ñ(MORPHO_IMAGE_STABILIZER_VERSION)
+ */
+MORPHO_API(const char *)
+morpho_NoiseReduction_getVersion(void);
+
+/**
+ * mCYÉKvÈTCYðæ¾
+ * wèÅ«étH[}bgÍTRMðQÆB
+ *
+ * @param[in] width üÍæÌ
+ * @param[in] height üÍæ̳
+ * @param[in] format ætH[}bg¶ñ
+ * @return KvÈTCY(byte)
+ */
+MORPHO_API(int)
+morpho_NoiseReduction_getBufferSize(
+ int width,
+ int height,
+ const char *format);
+
+/**
+ * mCYíÌú»
+ *
+ * @param[out] reducer mCYí
+ * @param[out] buffer mCYíÉèÄéÖÌ|C^
+ * @param[in] buffer_size mCYíÉèÄéÌTCY.
+ * @return G[R[h(morpho_error.h)
+ */
+MORPHO_API(int)
+morpho_NoiseReduction_initialize(
+ morpho_NoiseReduction *reducer,
+ void *buffer,
+ int buffer_size);
+
+/**
+ * mCYíÌN[Abv
+ *
+ * @param[in,out] reducer mCYí
+ * @return G[R[h(morpho_error.h)
+ */
+MORPHO_API(int)
+morpho_NoiseReduction_finalize(
+ morpho_NoiseReduction *reducer);
+
+/**
+ * mCY: Jn
+ * oÍæ(output_image)Í1ÚÌüÍæƯ¶ÅàÇ¢
+ *
+ * @param[in,out] reducer mCYí
+ * @param[out] output_image oÍæ
+ * @return G[R[h(morpho_error.h)
+ */
+MORPHO_API(int)
+morpho_NoiseReduction_start(
+ morpho_NoiseReduction *reducer,
+ morpho_ImageData *output_image);
+
+/**
+ * mCY: mCY
+ *
+ * @param[in,out] reducer mCYí
+ * @param[out] input_image oÍæ
+ * @return G[R[h(morpho_error.h)
+ */
+MORPHO_API(int)
+morpho_NoiseReduction_reduceNoise(
+ morpho_NoiseReduction *reducer,
+ morpho_ImageData *input_image);
+
+/**
+ * ætH[}bgðæ¾
+ * initialize()ÀsãÉæ¾Â\
+ * obt@TCYÍ32ÈãÆ·é±Æ
+ *
+ * @param[in,out] reducer mCYí
+ * @param[out] format ætH[}bg¶ñªi[³êé
+ * @param[in] buffer_size obt@TCY
+ * @return G[R[h(morpho_error.h)
+ */
+MORPHO_API(int)
+morpho_NoiseReduction_getImageFormat(
+ morpho_NoiseReduction *reducer,
+ char *format,
+ const int buffer_size);
+
+/**
+ * PxmCYxxðæ¾
+ * initialize()ÀsãÉæ¾Â\
+ *
+ * @param[in,out] reducer mCYí
+ * @param[out] level PxmCYxxªi[³êé
+ * @return G[R[h(morpho_error.h)
+ */
+MORPHO_API(int)
+morpho_NoiseReduction_getLumaNoiseReductionLevel(
+ morpho_NoiseReduction *reducer,
+ int *level);
+
+/**
+ * N}mCYxxðæ¾
+ * initialize()ÀsãÉæ¾Â\
+ *
+ * @param[in,out] reducer mCYí
+ * @param[out] level N}mCYxxªi[³êé
+ * @return G[R[h(morpho_error.h)
+ */
+MORPHO_API(int)
+morpho_NoiseReduction_getChromaNoiseReductionLevel(
+ morpho_NoiseReduction *reducer,
+ int *level);
+
+/**
+ * ætH[}bgðÝè
+ * initialize()Àsã©Âstart()ÀsOÉÝèÂ\
+ * wèÅ«étH[}bgÍTRMðQÆB
+ *
+ * @param[in,out] reducer mCYí
+ * @param[in] format ætH[}bg¶ñ
+ * @return G[R[h(morpho_error.h)
+ */
+MORPHO_API(int)
+morpho_NoiseReduction_setImageFormat(
+ morpho_NoiseReduction *reducer,
+ const char *format);
+
+/**
+ * PxmCYxxðÝè
+ * initialize()Àsã©Âstart()ÀsOÉÝèÂ\
+ *
+ * @param[in,out] reducer mCYí
+ * @param[in] level PxmCYxx(0-7)
+ * @return G[R[h(morpho_error.h)
+ */
+MORPHO_API(int)
+morpho_NoiseReduction_setLumaNoiseReductionLevel(
+ morpho_NoiseReduction *reducer,
+ int level);
+
+/**
+ * N}mCYxxðÝè
+ * initialize()Àsã©Âstart()ÀsOÉÝèÂ\
+ *
+ * @param[in,out] reducer mCYí
+ * @param[in] level N}mCYxx(0-7)
+ * @return G[R[h(morpho_error.h)
+ */
+MORPHO_API(int)
+morpho_NoiseReduction_setChromaNoiseReductionLevel(
+ morpho_NoiseReduction *reducer,
+ int level);
+
+//====================================================================
+
+# ifdef __cplusplus
+} // extern "C"
+# endif
+
+//--------------------------------------------------------------------
+
+#endif // !MORPHO_IMAGE_STABILIZER3_H
+
+//====================================================================
+// [EOF]
diff --git a/camera/hdr/include/morpho_noise_reduction_ext.h b/camera/hdr/include/morpho_noise_reduction_ext.h
new file mode 100644
index 0000000..4f15934
--- /dev/null
+++ b/camera/hdr/include/morpho_noise_reduction_ext.h
@@ -0,0 +1,17 @@
+#ifndef MORPHO_NR_EXT_H
+#define MORPHO_NR_EXT_H
+
+#include "morpho_noise_reduction.h"
+/*
+return == 0 : OK
+return != 0 : NG (Please print the return value to check Error types)
+*/
+MORPHO_API(int)
+LINK_mm_camera_morpho_noise_reduction(
+ unsigned char* yuvImage,
+ int width,
+ int height,
+ int y_level,
+ int c_level);
+
+#endif //MORPHO_NR_EXT_H
diff --git a/camera/hdr/include/morpho_rect_int.h b/camera/hdr/include/morpho_rect_int.h
new file mode 100644
index 0000000..b3534a4
--- /dev/null
+++ b/camera/hdr/include/morpho_rect_int.h
@@ -0,0 +1,37 @@
+/**
+ * @file morpho_rect_int.h
+ * @brief é`f[^Ì\¢Ìè`
+ * @version 1.0.0
+ * @date 2008-06-09
+ *
+ * Copyright (C) 2006-2012 Morpho, Inc.
+ */
+
+#ifndef MORPHO_RECT_INT_H
+#define MORPHO_RECT_INT_H
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+/** é`f[^. */
+typedef struct {
+ int sx; /**< left */
+ int sy; /**< top */
+ int ex; /**< right */
+ int ey; /**< bottom */
+} morpho_RectInt;
+
+/** é`Ìæ rect ̶ãÀW (l,t) ÆEºÀW (r,b) ðÝè·é. */
+#define morpho_RectInt_setRect(rect,l,t,r,b) do { \
+ (rect)->sx=(l);\
+ (rect)->sy=(t);\
+ (rect)->ex=(r);\
+ (rect)->ey=(b);\
+ } while(0)
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif /* #ifndef MORPHO_RECT_INT_H */
diff --git a/camera/inc/omx_debug.h b/camera/inc/omx_debug.h
new file mode 100644
index 0000000..1e3bdfd
--- /dev/null
+++ b/camera/inc/omx_debug.h
@@ -0,0 +1,75 @@
+/* Copyright (c) 2012, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ * * Neither the name of The Linux Foundation nor the names of its
+ * contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef _OMXLOG_H
+#define _OMXGLOG_H
+
+#define OMX_DBG_ERROR_ENABLE 1
+#define OMX_DBG_WARNG_ENABLE 0
+#define OMX_DBG_HIGH_ENABLE 1
+#define OMX_DBG_INFO_ENABLE 0
+
+#ifdef ANDROID
+ #define LOG_NIDEBUG 0
+ #define LOG_TAG "mm-still-omx"
+ #include <utils/Log.h>
+ #ifdef NEW_LOG_API
+ #define OMXDBG(fmt, args...) ALOGV(fmt, ##args)
+ #else
+ #define OMXDBG(fmt, args...) LOGV(fmt, ##args)
+ #endif
+#endif
+
+#if(OMX_DBG_ERROR_ENABLE)
+ #define OMX_DBG_ERROR(...) OMXDBG(__VA_ARGS__)
+#else
+ #define OMX_DBG_ERROR(...) do{}while(0)
+#endif
+
+#if(OMX_DBG_WARNG_ENABLE)
+ #define OMX_DBG_WARNG(...) OMXDBG(__VA_ARGS__)
+#else
+ #define OMX_DBG_WARNG(...) do{}while(0)
+#endif
+
+
+#if(OMX_DBG_INFO_ENABLE)
+ #define OMX_DBG_INFO(...) OMXDBG(__VA_ARGS__)
+#else
+ #define OMX_DBG_INFO(...) do{}while(0)
+#endif
+
+#if(OMX_DBG_HIGH_ENABLE)
+ #define OMX_DBG_HIGH(...) OMXDBG(__VA_ARGS__)
+#else
+ #define OMX_DBG_HIGH(...) do{}while(0)
+#endif
+
+
+#endif /* _OMXGLOG_H */
diff --git a/camera/inc/omx_jpeg_common.h b/camera/inc/omx_jpeg_common.h
new file mode 100644
index 0000000..ce4bc6a
--- /dev/null
+++ b/camera/inc/omx_jpeg_common.h
@@ -0,0 +1,302 @@
+/* Copyright (c) 2012, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ * * Neither the name of The Linux Foundation nor the names of its
+ * contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef _OMX_JPEG_COMMON_H_
+#define _OMX_JPEG_COMMON_H_
+#include "OMX_Core.h"
+#include "stdio.h"
+#include "omx_debug.h"
+
+#define INPUT_PORT 0
+#define OUTPUT_PORT 1
+#define CEILING16(X) (((X) + 0x000F) & 0xFFF0)
+#define YUV_SIZER(W, H) ((W * H * 3) /2 )
+
+#define DEFAULT_PICTURE_WIDTH 640
+#define DEFAULT_PICTURE_HEIGHT 480
+
+#define GET_COMP(X) ((OMX_PTR)((OMX_COMPONENTTYPE*)X)->pComponentPrivate)
+#define PAD_TO_WORD(a) (((a)+3)&~3)
+#define OMX_JPEG_QUEUE_CAPACITY 100
+
+
+#define OMX_MM_MALLOC(size) jpeg_malloc(size, __FILE__, __LINE__)
+#define OMX_MM_ZERO(pointer, type) memset(pointer, 0 ,sizeof(type))
+#define OMX_MM_FREE(pointer) {if (pointer) \
+ jpeg_free(pointer); \
+ else \
+ OMX_DBG_INFO("%s:%d: null pointer", __FILE__, __LINE__); \
+ pointer = NULL; \
+ }
+#define ONERROR(X, handler) if(X) { \
+ OMX_DBG_ERROR("Failure at %s:%d", __FILE__, __LINE__); \
+ handler; \
+ return X; \
+ }
+#define ERROR_ENCODING(X, handler) if(X) { \
+ OMX_DBG_ERROR("Failure at %s:%d", __FILE__, __LINE__); \
+ handler; \
+ pthread_mutex_unlock(&comp->abort_mutex); \
+ return X; \
+ }
+#define ONWARNING(X) if(X) {\
+ OMX_DBG_ERROR("Warning: Failure at %s:%d", __FILE__, __LINE__); \
+ }
+
+#define MIN(a,b) (((a) < (b)) ? (a) : (b))
+
+/* These are custom events added as an extention to the OMX Events*/
+typedef enum {
+ OMX_EVENT_ETB_DONE = OMX_EventVendorStartUnused+1,
+ OMX_EVENT_FTB_DONE,
+ OMX_EVENT_THUMBNAIL_DROPPED,
+ OMX_EVENT_JPEG_ERROR,
+ OMX_EVENT_JPEG_ABORT,
+ OMX_EVENT_MAIN_IMAGE,
+ OMX_EVENT_THUMBNAIL_IMAGE,
+ OMX_EVENT_DONE
+}omx_jpeg_events;
+
+/*This enum is consistent with the jpeg_color_format enum.
+Please be careful while changing the order. It has to
+match exactly*/
+typedef enum {
+ OMX_YCRCBLP_H2V2 = 0,
+ OMX_YCBCRLP_H2V2 = 1,
+
+ OMX_YCRCBLP_H2V1 = 2,
+ OMX_YCBCRLP_H2V1 = 3,
+
+ OMX_YCRCBLP_H1V2 = 4,
+ OMX_YCBCRLP_H1V2 = 5,
+
+ OMX_YCRCBLP_H1V1 = 6,
+ OMX_YCBCRLP_H1V1 = 7,
+
+ OMX_RGB565 = 8,
+ OMX_RGB888 = 9,
+ OMX_RGBa = 10,
+
+ OMX_JPEG_BITSTREAM_H2V2 = 12,
+ OMX_JPEG_BITSTREAM_H2V1 = 14,
+ OMX_JPEG_BITSTREAM_H1V2 = 16,
+ OMX_JPEG_BITSTREAM_H1V1 = 18,
+
+ OMX_JPEG_COLOR_FORMAT_MAX,
+
+} omx_jpeg_color_format;
+
+/*This enum is consistent with the jpege_preferences_t and
+jpegd_preferences_t enum.Please be careful while changing the order.
+It has to match exactly*/
+typedef enum {
+ OMX_JPEG_PREF_HW_ACCELERATED_PREFERRED = 0,
+ OMX_JPEG_PREF_HW_ACCELERATED_ONLY,
+ OMX_JPEG_PREF_SOFTWARE_PREFERRED,
+ OMX_JPEG_PREF_SOFTWARE_ONLY,
+ OMX_JPEG_PREF_MAX,
+
+} omx_jpeg_preference;
+
+typedef enum {
+ OMX_JPEG_MESSAGE_INITIAL=0,
+ OMX_JPEG_MESSAGE_ETB,
+ OMX_JPEG_MESSAGE_FTB,
+ OMX_JPEG_MESSAGE_ETB_DONE,
+ OMX_JPEG_MESSAGE_FTB_DONE,
+ OMX_JPEG_MESSAGE_START_ENCODE,
+ OMX_JPEG_MESSAGE_START_DECODE,
+ OMX_JPEG_MESSAGE_CHANGE_STATE,
+ OMX_JPEG_MESSAGE_FLUSH,
+ OMX_JPEG_MESSAGE_STOP,
+ OMX_JPEG_MESSAGE_FLUSH_COMPLETE,
+ OMX_JPEG_MESSAGE_TRANSACT_COMPLETE,
+ OMX_JPEG_MESSAGE_DEINIT,
+ OMX_JPEG_MESSAGE_EVENT, //for event callback args contain event info
+ OMX_JPEG_MESSAGE_DECODED_IMAGE,
+ OMX_JPEG_MESSAGE_DECODE_DONE,
+
+}omx_jpeg_message;
+
+typedef enum omx_jpeg_image_type{
+ OMX_JPEG =0,
+ OMX_JPS,
+ OMX_MPO,
+
+}omx_jpeg_image_type;
+
+//message arg
+typedef union omx_jpeg_message_arg{
+ void * pValue;
+ int iValue;
+}omx_jpeg_message_arg;
+
+//message
+typedef struct omx_jpeg_queue_item{
+ omx_jpeg_message message;
+ omx_jpeg_message_arg args[3];
+
+}omx_jpeg_queue_item;
+
+
+typedef enum {
+ OMX_JPEG_QUEUE_COMMAND=0,
+ OMX_JPEG_QUEUE_ETB,
+ OMX_JPEG_QUEUE_FTB,
+ OMX_JPEG_QUEUE_ABORT,
+}omx_jpeg_queue_type;
+
+typedef struct omx_jpeg_queue{
+ omx_jpeg_queue_item container[OMX_JPEG_QUEUE_CAPACITY];
+ int front;
+ int back;
+ int size;
+ omx_jpeg_queue_type type;
+}omx_jpeg_queue;
+
+typedef struct omx_jpeg_message_queue{
+ omx_jpeg_queue command;
+ omx_jpeg_queue etb;
+ omx_jpeg_queue ftb;
+ omx_jpeg_queue abort;
+ pthread_mutex_t lock;
+ pthread_cond_t cond;
+ int messageCount;
+ int initialized;
+}omx_jpeg_message_queue;
+
+int omx_jpeg_queue_insert(omx_jpeg_queue* queue, omx_jpeg_queue_item * item);
+int omx_jpeg_queue_remove(omx_jpeg_queue * queue, omx_jpeg_queue_item* item);
+void omx_jpeg_message_queue_init(omx_jpeg_message_queue * queue);
+void omx_jpeg_queue_init(omx_jpeg_queue * queue);
+int omx_jpeg_queue_flush(omx_jpeg_queue * queue);
+//Common OMX Functions
+OMX_ERRORTYPE
+omx_component_image_init(OMX_IN OMX_HANDLETYPE hComp, OMX_IN OMX_STRING componentName);
+
+
+OMX_ERRORTYPE
+omx_component_image_get_version(OMX_IN OMX_HANDLETYPE hComp,
+ OMX_OUT OMX_STRING componentName,
+ OMX_OUT OMX_VERSIONTYPE* componentVersion,
+ OMX_OUT OMX_VERSIONTYPE* specVersion,
+ OMX_OUT OMX_UUIDTYPE* componentUUID);
+
+OMX_ERRORTYPE
+omx_component_image_send_command(OMX_IN OMX_HANDLETYPE hComp,
+ OMX_IN OMX_COMMANDTYPE cmd,
+ OMX_IN OMX_U32 param1,
+ OMX_IN OMX_PTR cmdData);
+
+OMX_ERRORTYPE
+omx_component_image_get_parameter(OMX_IN OMX_HANDLETYPE hComp,
+ OMX_IN OMX_INDEXTYPE paramIndex,
+ OMX_INOUT OMX_PTR paramData);
+
+OMX_ERRORTYPE
+omx_component_image_set_parameter(OMX_IN OMX_HANDLETYPE hComp,
+ OMX_IN OMX_INDEXTYPE paramIndex,
+ OMX_IN OMX_PTR paramData);
+
+OMX_ERRORTYPE
+omx_component_image_get_config(OMX_IN OMX_HANDLETYPE hComp,
+ OMX_IN OMX_INDEXTYPE configIndex,
+ OMX_INOUT OMX_PTR configData);
+
+OMX_ERRORTYPE
+omx_component_image_set_config(OMX_IN OMX_HANDLETYPE hComp,
+ OMX_IN OMX_INDEXTYPE configIndex,
+ OMX_IN OMX_PTR configData);
+
+OMX_ERRORTYPE
+omx_component_image_get_extension_index(OMX_IN OMX_HANDLETYPE hComp,
+ OMX_IN OMX_STRING paramName,
+ OMX_OUT OMX_INDEXTYPE* indexType);
+
+OMX_ERRORTYPE
+omx_component_image_get_state(OMX_IN OMX_HANDLETYPE hComp,
+ OMX_OUT OMX_STATETYPE* state);
+
+OMX_ERRORTYPE
+omx_component_image_tunnel_request(OMX_IN OMX_HANDLETYPE hComp,
+ OMX_IN OMX_U32 port,
+ OMX_IN OMX_HANDLETYPE peerComponent,
+ OMX_IN OMX_U32 peerPort,
+ OMX_INOUT OMX_TUNNELSETUPTYPE* tunnelSetup);
+
+OMX_ERRORTYPE
+omx_component_image_use_buffer(OMX_IN OMX_HANDLETYPE hComp,
+ OMX_INOUT OMX_BUFFERHEADERTYPE** bufferHdr,
+ OMX_IN OMX_U32 port,
+ OMX_IN OMX_PTR appData,
+ OMX_IN OMX_U32 bytes,
+ OMX_IN OMX_U8* buffer);
+
+
+OMX_ERRORTYPE
+omx_component_image_allocate_buffer(OMX_IN OMX_HANDLETYPE hComp,
+ OMX_INOUT OMX_BUFFERHEADERTYPE** bufferHdr,
+ OMX_IN OMX_U32 port,
+ OMX_IN OMX_PTR appData,
+ OMX_IN OMX_U32 bytes);
+
+OMX_ERRORTYPE
+omx_component_image_free_buffer(OMX_IN OMX_HANDLETYPE hComp,
+ OMX_IN OMX_U32 port,
+ OMX_IN OMX_BUFFERHEADERTYPE* buffer);
+
+OMX_ERRORTYPE
+omx_component_image_empty_this_buffer(OMX_IN OMX_HANDLETYPE hComp,
+ OMX_IN OMX_BUFFERHEADERTYPE* buffer);
+
+OMX_ERRORTYPE
+omx_component_image_fill_this_buffer(OMX_IN OMX_HANDLETYPE hComp,
+ OMX_IN OMX_BUFFERHEADERTYPE* buffer);
+
+OMX_ERRORTYPE
+omx_component_image_set_callbacks(OMX_IN OMX_HANDLETYPE hComp,
+ OMX_IN OMX_CALLBACKTYPE* callbacks,
+ OMX_IN OMX_PTR appData);
+
+OMX_ERRORTYPE
+omx_component_image_deinit(OMX_IN OMX_HANDLETYPE hComp);
+
+OMX_ERRORTYPE
+omx_component_image_use_EGL_image(OMX_IN OMX_HANDLETYPE hComp,
+ OMX_INOUT OMX_BUFFERHEADERTYPE** bufferHdr,
+ OMX_IN OMX_U32 port,
+ OMX_IN OMX_PTR appData,
+ OMX_IN void* eglImage);
+
+OMX_ERRORTYPE
+omx_component_image_role_enum(OMX_IN OMX_HANDLETYPE hComp,
+ OMX_OUT OMX_U8* role,
+ OMX_IN OMX_U32 index);
+
+#endif
diff --git a/camera/inc/omx_jpeg_ext.h b/camera/inc/omx_jpeg_ext.h
new file mode 100644
index 0000000..0dc51e3
--- /dev/null
+++ b/camera/inc/omx_jpeg_ext.h
@@ -0,0 +1,121 @@
+/* Copyright (c) 2012, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ * * Neither the name of The Linux Foundation nor the names of its
+ * contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ *
+ */
+
+#ifndef OMX_JPEG_EXT_H_
+#define OMX_JPEG_EXT_H_
+
+#include <OMX_Image.h>
+#include "QCamera_Intf.h"
+#include "omx_jpeg_common.h"
+
+typedef struct omx_jpeg_pmem_info {
+ int fd;
+ int offset;
+} omx_jpeg_pmem_info;
+
+typedef struct omx_jpeg_exif_info_tag {
+ exif_tag_id_t tag_id;
+ exif_tag_entry_t tag_entry;
+
+} omx_jpeg_exif_info_tag;
+
+typedef struct omx_jpeg_buffer_offset {
+ int width;
+ int height;
+ int yOffset;
+ int cbcrOffset;
+ int totalSize;
+ int paddedFrameSize;
+} omx_jpeg_buffer_offset;
+
+
+#define OMX_JPEG_PREFIX "omx.qcom.jpeg.exttype."
+#define OMX_JPEG_PREFIX_LENGTH 22
+
+/*adding to enum also add to the char name array down*/
+typedef enum {
+ OMX_JPEG_EXT_START = 0x7F000000,
+ OMX_JPEG_EXT_EXIF,
+ OMX_JPEG_EXT_THUMBNAIL,
+ OMX_JPEG_EXT_THUMBNAIL_QUALITY,
+ OMX_JPEG_EXT_BUFFER_OFFSET,
+ OMX_JPEG_EXT_ACBCR_OFFSET,
+ OMX_JPEG_EXT_USER_PREFERENCES,
+ OMX_JPEG_EXT_REGION,
+ OMX_JPEG_EXT_IMAGE_TYPE,
+ OMX_JPEG_EXT_END,
+} omx_jpeg_ext_index;
+
+extern char * omx_jpeg_ext_name[];
+/*char * omx_jpeg_ext_name[] = {
+ "start",
+ "exif",
+ "thumbnail",
+ "thumbnail_quality",
+ "buffer_offset",
+ "acbcr_offset",
+ "user_preferences",
+ "region",
+ "end"
+};*/
+
+/*assume main img scaling*/
+
+typedef struct omx_jpeg_thumbnail {
+ int width;
+ int height;
+ int scaling;
+ int cropWidth;
+ int cropHeight;
+ int left;
+ int top;
+} omx_jpeg_thumbnail;
+
+typedef struct omx_jpeg_thumbnail_quality {
+ OMX_U32 nQFactor;
+} omx_jpeg_thumbnail_quality;
+
+typedef struct omx_jpeg_user_preferences {
+ omx_jpeg_color_format color_format;
+ omx_jpeg_color_format thumbnail_color_format;
+ omx_jpeg_preference preference;
+} omx_jpeg_user_preferences;
+
+typedef struct omx_jpeg_region{
+ int32_t left;
+ int32_t top;
+ int32_t right;
+ int32_t bottom;
+}omx_jpeg_region;
+
+typedef struct omx_jpeg_type{
+ omx_jpeg_image_type image_type;
+}omx_jpeg_type;
+
+#endif /* OMX_JPEG_EXT_H_ */
diff --git a/camera/mm-camera-interface/Android.mk b/camera/mm-camera-interface/Android.mk
new file mode 100755
index 0000000..0a5c3f5
--- /dev/null
+++ b/camera/mm-camera-interface/Android.mk
@@ -0,0 +1,35 @@
+LOCAL_PATH:= $(call my-dir)
+LOCAL_DIR_PATH:= $(call my-dir)
+include $(CLEAR_VARS)
+
+MM_CAM_FILES:= \
+ mm_camera_interface2.c \
+ mm_camera_stream.c \
+ mm_camera_channel.c \
+ mm_camera.c \
+ mm_camera_poll_thread.c \
+ mm_camera_notify.c \
+ mm_camera_sock.c \
+ mm_camera_helper.c \
+ mm_omx_jpeg_encoder.c
+
+LOCAL_CFLAGS+= -D_ANDROID_
+LOCAL_COPY_HEADERS_TO := mm-camera-interface
+LOCAL_COPY_HEADERS += mm_camera_interface2.h
+LOCAL_COPY_HEADERS += mm_omx_jpeg_encoder.h
+
+LOCAL_C_INCLUDES+= $(LOCAL_PATH)/..
+LOCAL_C_INCLUDES+= $(LOCAL_PATH)/../inc
+
+LOCAL_C_INCLUDES+= hardware/qcom/media/mm-core/inc
+LOCAL_CFLAGS += -include bionic/libc/include/sys/socket.h
+LOCAL_CFLAGS += -include bionic/libc/include/sys/un.h
+
+LOCAL_SRC_FILES := $(MM_CAM_FILES)
+
+LOCAL_MODULE := libmmcamera_interface2
+LOCAL_PRELINK_MODULE := false
+LOCAL_SHARED_LIBRARIES := libdl libcutils liblog
+LOCAL_MODULE_TAGS := optional
+
+include $(BUILD_SHARED_LIBRARY)
diff --git a/camera/mm-camera-interface/mm_camera.c b/camera/mm-camera-interface/mm_camera.c
new file mode 100644
index 0000000..eb3fe00
--- /dev/null
+++ b/camera/mm-camera-interface/mm_camera.c
@@ -0,0 +1,1132 @@
+/*
+Copyright (c) 2011-2012, The Linux Foundation. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following
+ disclaimer in the documentation and/or other materials provided
+ with the distribution.
+ * Neither the name of The Linux Foundation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+
+#include <pthread.h>
+#include "mm_camera_dbg.h"
+#include <errno.h>
+#include <sys/ioctl.h>
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <fcntl.h>
+#include <poll.h>
+#include "mm_camera_sock.h"
+#include "mm_camera_interface2.h"
+#include "mm_camera.h"
+
+static int32_t mm_camera_send_native_ctrl_cmd(mm_camera_obj_t * my_obj,
+ cam_ctrl_type type, uint32_t length, void *value);
+static int32_t mm_camera_send_native_ctrl_timeout_cmd(mm_camera_obj_t * my_obj,
+ cam_ctrl_type type, uint32_t length, void *value, int timeout);
+static int32_t mm_camera_ctrl_set_specialEffect (mm_camera_obj_t *my_obj, int effect) {
+ struct v4l2_control ctrl;
+ if (effect == CAMERA_EFFECT_MAX)
+ effect = CAMERA_EFFECT_OFF;
+ int rc = 0;
+
+ ctrl.id = MSM_V4L2_PID_EFFECT;
+ ctrl.value = effect;
+ rc = ioctl(my_obj->ctrl_fd, VIDIOC_S_CTRL, &ctrl);
+ return rc;
+}
+
+static int32_t mm_camera_ctrl_set_antibanding (mm_camera_obj_t *my_obj, int antibanding) {
+ int rc = 0;
+ struct v4l2_control ctrl;
+
+ ctrl.id = V4L2_CID_POWER_LINE_FREQUENCY;
+ ctrl.value = antibanding;
+ rc = ioctl(my_obj->ctrl_fd, VIDIOC_S_CTRL, &ctrl);
+ return rc;
+}
+
+static int32_t mm_camera_ctrl_set_auto_focus (mm_camera_obj_t *my_obj, int value)
+{
+ int rc = 0;
+ struct v4l2_queryctrl queryctrl;
+
+ memset (&queryctrl, 0, sizeof (queryctrl));
+ queryctrl.id = V4L2_CID_FOCUS_AUTO;
+
+ if(value != 0 && value != 1) {
+ CDBG("%s:boolean required, invalid value = %d\n",__func__, value);
+ return -MM_CAMERA_E_INVALID_INPUT;
+ }
+ if (-1 == ioctl (my_obj->ctrl_fd, VIDIOC_QUERYCTRL, &queryctrl)) {
+ CDBG ("V4L2_CID_FOCUS_AUTO is not supported\n");
+ } else if (queryctrl.flags & V4L2_CTRL_FLAG_DISABLED) {
+ CDBG ("%s:V4L2_CID_FOCUS_AUTO is not supported\n", __func__);
+ } else {
+ if(0 != (rc = mm_camera_util_s_ctrl(my_obj->ctrl_fd,
+ V4L2_CID_FOCUS_AUTO, value))){
+ CDBG("%s: error, id=0x%x, value=%d, rc = %d\n",
+ __func__, V4L2_CID_FOCUS_AUTO, value, rc);
+ rc = -1;
+ }
+ }
+ return rc;
+}
+
+static int32_t mm_camera_ctrl_set_whitebalance (mm_camera_obj_t *my_obj, int mode) {
+
+ int rc = 0, value;
+ uint32_t id;
+
+ switch(mode) {
+ case MM_CAMERA_WHITE_BALANCE_AUTO:
+ id = V4L2_CID_AUTO_WHITE_BALANCE;
+ value = 1; /* TRUE */
+ break;
+ case MM_CAMERA_WHITE_BALANCE_OFF:
+ id = V4L2_CID_AUTO_WHITE_BALANCE;
+ value = 0; /* FALSE */
+ break;
+ default:
+ id = V4L2_CID_WHITE_BALANCE_TEMPERATURE;
+ if(mode == WHITE_BALANCE_DAYLIGHT) value = 6500;
+ else if(mode == WHITE_BALANCE_INCANDESCENT) value = 2800;
+ else if(mode == WHITE_BALANCE_FLUORESCENT ) value = 4200;
+ else if(mode == WHITE_BALANCE_CLOUDY) value = 7500;
+ else
+ value = 4200;
+ }
+ if(0 != (rc = mm_camera_util_s_ctrl(my_obj->ctrl_fd,
+ id, value))){
+ CDBG("%s: error, exp_metering_action_param=%d, rc = %d\n", __func__, value, rc);
+ goto end;
+ }
+end:
+ return rc;
+}
+
+static int32_t mm_camera_ctrl_set_toggle_afr (mm_camera_obj_t *my_obj) {
+ int rc = 0;
+ int value = 0;
+ if(0 != (rc = mm_camera_util_g_ctrl(my_obj->ctrl_fd,
+ V4L2_CID_EXPOSURE_AUTO, &value))){
+ goto end;
+ }
+ /* V4L2_CID_EXPOSURE_AUTO needs to be AUTO or SHUTTER_PRIORITY */
+ if (value != V4L2_EXPOSURE_AUTO && value != V4L2_EXPOSURE_SHUTTER_PRIORITY) {
+ CDBG("%s: V4L2_CID_EXPOSURE_AUTO needs to be AUTO/SHUTTER_PRIORITY\n",
+ __func__);
+ return -1;
+ }
+ if(0 != (rc = mm_camera_util_g_ctrl(my_obj->ctrl_fd,
+ V4L2_CID_EXPOSURE_AUTO_PRIORITY, &value))){
+ goto end;
+ }
+ value = !value;
+ if(0 != (rc = mm_camera_util_s_ctrl(my_obj->ctrl_fd,
+ V4L2_CID_EXPOSURE_AUTO_PRIORITY, value))){
+ goto end;
+ }
+end:
+ return rc;
+}
+
+static mm_camera_channel_type_t mm_camera_util_opcode_2_ch_type(
+ mm_camera_obj_t *my_obj,
+ mm_camera_ops_type_t opcode)
+{
+ mm_camera_channel_type_t type = MM_CAMERA_CH_MAX;
+ switch(opcode) {
+ case MM_CAMERA_OPS_PREVIEW:
+ return MM_CAMERA_CH_PREVIEW;
+ case MM_CAMERA_OPS_VIDEO:
+ return MM_CAMERA_CH_VIDEO;
+ case MM_CAMERA_OPS_SNAPSHOT:
+ return MM_CAMERA_CH_SNAPSHOT;
+ case MM_CAMERA_OPS_PREPARE_SNAPSHOT:
+ return MM_CAMERA_CH_SNAPSHOT;
+ case MM_CAMERA_OPS_RAW:
+ return MM_CAMERA_CH_RAW;
+ case MM_CAMERA_OPS_ZSL:
+ return MM_CAMERA_CH_SNAPSHOT;
+ default:
+ break;
+ }
+ return type;
+}
+
+static int32_t mm_camera_util_set_op_mode(mm_camera_obj_t * my_obj,
+ mm_camera_op_mode_type_t *op_mode)
+{
+ int32_t rc = MM_CAMERA_OK;
+ uint32_t v4l2_op_mode = MSM_V4L2_CAM_OP_DEFAULT;
+
+ if (my_obj->op_mode == *op_mode)
+ goto end;
+ if(mm_camera_poll_busy(my_obj) == TRUE) {
+ CDBG("%s: cannot change op_mode while stream on\n", __func__);
+ rc = -MM_CAMERA_E_INVALID_OPERATION;
+ goto end;
+ }
+ switch(*op_mode) {
+ case MM_CAMERA_OP_MODE_ZSL:
+ v4l2_op_mode = MSM_V4L2_CAM_OP_ZSL;
+ break;
+ case MM_CAMERA_OP_MODE_CAPTURE:
+ v4l2_op_mode = MSM_V4L2_CAM_OP_CAPTURE;
+ break;
+ case MM_CAMERA_OP_MODE_VIDEO:
+ v4l2_op_mode = MSM_V4L2_CAM_OP_VIDEO;
+ break;
+ default:
+ rc = - MM_CAMERA_E_INVALID_INPUT;
+ goto end;
+ break;
+ }
+ if(0 != (rc = mm_camera_util_s_ctrl(my_obj->ctrl_fd,
+ MSM_V4L2_PID_CAM_MODE, v4l2_op_mode))){
+ CDBG("%s: input op_mode=%d, s_ctrl rc=%d\n", __func__, *op_mode, rc);
+ goto end;
+ }
+ /* if success update mode field */
+ my_obj->op_mode = *op_mode;
+end:
+ CDBG("%s: op_mode=%d,rc=%d\n", __func__, *op_mode, rc);
+ return rc;
+}
+
+int32_t mm_camera_set_general_parm(mm_camera_obj_t * my_obj, mm_camera_parm_t *parm)
+{
+ int rc = -MM_CAMERA_E_NOT_SUPPORTED;
+ int isZSL =0;
+
+ switch(parm->parm_type) {
+ case MM_CAMERA_PARM_EXPOSURE:
+ return mm_camera_util_s_ctrl(my_obj->ctrl_fd,
+ MSM_V4L2_PID_EXP_METERING,
+ *((int *)(parm->p_value)));
+ case MM_CAMERA_PARM_SHARPNESS:
+ return mm_camera_util_s_ctrl(my_obj->ctrl_fd, V4L2_CID_SHARPNESS,
+ *((int *)(parm->p_value)));
+ case MM_CAMERA_PARM_CONTRAST:
+ return mm_camera_util_s_ctrl(my_obj->ctrl_fd, V4L2_CID_CONTRAST,
+ *((int *)(parm->p_value)));
+ case MM_CAMERA_PARM_SATURATION:
+ return mm_camera_util_s_ctrl(my_obj->ctrl_fd, V4L2_CID_SATURATION,
+ *((int *)(parm->p_value)));
+ case MM_CAMERA_PARM_BRIGHTNESS:
+ return mm_camera_util_s_ctrl(my_obj->ctrl_fd, V4L2_CID_BRIGHTNESS,
+ *((int *)(parm->p_value)));
+ case MM_CAMERA_PARM_WHITE_BALANCE:
+ return mm_camera_ctrl_set_whitebalance (my_obj, *((int *)(parm->p_value)));
+ case MM_CAMERA_PARM_ISO:
+ return mm_camera_util_s_ctrl(my_obj->ctrl_fd, MSM_V4L2_PID_ISO,
+ *((int *)(parm->p_value)));
+ case MM_CAMERA_PARM_ZOOM:
+ return mm_camera_util_s_ctrl(my_obj->ctrl_fd, V4L2_CID_ZOOM_ABSOLUTE,
+ *((int *)(parm->p_value)));
+ case MM_CAMERA_PARM_LUMA_ADAPTATION:
+ return mm_camera_util_s_ctrl(my_obj->ctrl_fd, MSM_V4L2_PID_LUMA_ADAPTATION,
+ *((int *)(parm->p_value)));
+ case MM_CAMERA_PARM_ANTIBANDING:
+ return mm_camera_ctrl_set_antibanding (my_obj, *((int *)(parm->p_value)));
+ case MM_CAMERA_PARM_CONTINUOUS_AF:
+ return mm_camera_ctrl_set_auto_focus(my_obj, *((int *)(parm->p_value)));
+ case MM_CAMERA_PARM_HJR:
+ return mm_camera_util_s_ctrl(my_obj->ctrl_fd, MSM_V4L2_PID_HJR, *((int *)(parm->p_value)));
+ case MM_CAMERA_PARM_EFFECT:
+ return mm_camera_ctrl_set_specialEffect (my_obj, *((int *)(parm->p_value)));
+ case MM_CAMERA_PARM_FPS:
+ return mm_camera_send_native_ctrl_cmd(my_obj,
+ CAMERA_SET_PARM_FPS, sizeof(uint32_t), (void *)parm->p_value);
+ case MM_CAMERA_PARM_FPS_MODE:
+ return mm_camera_send_native_ctrl_cmd(my_obj,
+ CAMERA_SET_FPS_MODE, sizeof(int32_t), (void *)parm->p_value);
+ case MM_CAMERA_PARM_EXPOSURE_COMPENSATION:
+ return mm_camera_send_native_ctrl_cmd(my_obj,
+ CAMERA_SET_PARM_EXPOSURE_COMPENSATION, sizeof(int32_t), (void *)parm->p_value);
+ case MM_CAMERA_PARM_LED_MODE:
+ return mm_camera_send_native_ctrl_cmd(my_obj,
+ CAMERA_SET_PARM_LED_MODE, sizeof(int32_t), (void *)parm->p_value);
+ case MM_CAMERA_PARM_ROLLOFF:
+ return mm_camera_send_native_ctrl_cmd(my_obj,
+ CAMERA_SET_PARM_ROLLOFF, sizeof(int32_t), (void *)parm->p_value);
+ case MM_CAMERA_PARM_MODE:
+ my_obj->current_mode = *((camera_mode_t *)parm->p_value);
+ break;
+ case MM_CAMERA_PARM_FOCUS_RECT:
+ return mm_camera_send_native_ctrl_cmd(my_obj,
+ CAMERA_SET_PARM_FOCUS_RECT, sizeof(int32_t), (void *)parm->p_value);
+ case MM_CAMERA_PARM_AEC_ROI:
+ return mm_camera_send_native_ctrl_cmd(my_obj,
+ CAMERA_SET_PARM_AEC_ROI, sizeof(cam_set_aec_roi_t), (void *)parm->p_value);
+ case MM_CAMERA_PARM_AF_ROI:
+ return mm_camera_send_native_ctrl_cmd(my_obj,
+ CAMERA_SET_PARM_AF_ROI, sizeof(roi_info_t), (void *)parm->p_value);
+ case MM_CAMERA_PARM_FOCUS_MODE:
+ return mm_camera_send_native_ctrl_cmd(my_obj,
+ CAMERA_SET_PARM_AF_MODE, sizeof(int32_t), (void *)parm->p_value);
+#if 0 //to be enabled later: @punits
+ case MM_CAMERA_PARM_AF_MTR_AREA:
+ return mm_camera_send_native_ctrl_cmd(my_obj,
+ CAMERA_SET_PARM_AF_MTR_AREA, sizeof(af_mtr_area_t), (void *)parm->p_value);*/
+ case MM_CAMERA_PARM_AEC_MTR_AREA:
+ return mm_camera_send_native_ctrl_cmd(my_obj,
+ CAMERA_SET_AEC_MTR_AREA, sizeof(aec_mtr_area_t), (void *)parm->p_value);
+#endif
+ case MM_CAMERA_PARM_CAF_ENABLE:
+ return mm_camera_send_native_ctrl_cmd(my_obj,
+ CAMERA_SET_PARM_CAF, sizeof(uint32_t), (void *)parm->p_value);
+ case MM_CAMERA_PARM_BESTSHOT_MODE:
+ CDBG("%s : MM_CAMERA_PARM_BESTSHOT_MODE value : %d",__func__,*((int *)(parm->p_value)));
+ return mm_camera_send_native_ctrl_cmd(my_obj,
+ CAMERA_SET_PARM_BESTSHOT_MODE, sizeof(int32_t), (void *)parm->p_value);
+ break;
+ case MM_CAMERA_PARM_VIDEO_DIS:
+ return mm_camera_send_native_ctrl_cmd(my_obj,
+ CAMERA_SET_VIDEO_DIS_PARAMS, sizeof(video_dis_param_ctrl_t), (void *)parm->p_value);
+ case MM_CAMERA_PARM_VIDEO_ROT:
+ return mm_camera_send_native_ctrl_cmd(my_obj,
+ CAMERA_SET_VIDEO_ROT_PARAMS, sizeof(video_rotation_param_ctrl_t), (void *)parm->p_value);
+ case MM_CAMERA_PARM_SCE_FACTOR:
+ return mm_camera_send_native_ctrl_cmd(my_obj,
+ CAMERA_SET_SCE_FACTOR, sizeof(int32_t), (void *)parm->p_value);
+ case MM_CAMERA_PARM_FD:
+ return mm_camera_send_native_ctrl_cmd(my_obj,
+ CAMERA_SET_PARM_FD, sizeof(fd_set_parm_t), (void *)parm->p_value);
+ case MM_CAMERA_PARM_AEC_LOCK:
+ return mm_camera_send_native_ctrl_cmd(my_obj,
+ CAMERA_SET_AEC_LOCK, sizeof(int32_t), (void *)parm->p_value);
+ case MM_CAMERA_PARM_AWB_LOCK:
+ return mm_camera_send_native_ctrl_cmd(my_obj, CAMERA_SET_AWB_LOCK,
+ sizeof(int32_t), (void *)parm->p_value);
+ case MM_CAMERA_PARM_MCE:
+ return mm_camera_send_native_ctrl_cmd(my_obj, CAMERA_SET_PARM_MCE,
+ sizeof(int32_t), (void *)parm->p_value);
+ case MM_CAMERA_PARM_HORIZONTAL_VIEW_ANGLE:
+ return mm_camera_send_native_ctrl_cmd(my_obj, CAMERA_GET_PARM_HORIZONTAL_VIEW_ANGLE,
+ sizeof(focus_distances_info_t), (void *)parm->p_value);
+ case MM_CAMERA_PARM_VERTICAL_VIEW_ANGLE:
+ return mm_camera_send_native_ctrl_cmd(my_obj, CAMERA_GET_PARM_VERTICAL_VIEW_ANGLE,
+ sizeof(focus_distances_info_t), (void *)parm->p_value);
+ case MM_CAMERA_PARM_RESET_LENS_TO_INFINITY:
+ return mm_camera_send_native_ctrl_cmd(my_obj,
+ CAMERA_SET_PARM_RESET_LENS_TO_INFINITY,
+ 0, NULL);
+ case MM_CAMERA_PARM_SNAPSHOTDATA:
+ return mm_camera_send_native_ctrl_cmd(my_obj, CAMERA_GET_PARM_SNAPSHOTDATA,
+ sizeof(snapshotData_info_t), (void *)parm->p_value);
+ case MM_CAMERA_PARM_HFR:
+ return mm_camera_send_native_ctrl_cmd(my_obj,
+ CAMERA_SET_PARM_HFR, sizeof(int32_t), (void *)parm->p_value);
+ case MM_CAMERA_PARM_REDEYE_REDUCTION:
+ return mm_camera_send_native_ctrl_cmd(my_obj,
+ CAMERA_SET_REDEYE_REDUCTION, sizeof(int32_t), (void *)parm->p_value);
+ case MM_CAMERA_PARM_WAVELET_DENOISE:
+ return mm_camera_send_native_ctrl_cmd(my_obj,
+ CAMERA_SET_PARM_WAVELET_DENOISE, sizeof(denoise_param_t), (void *)parm->p_value);
+ case MM_CAMERA_PARM_3D_DISPLAY_DISTANCE:
+ return mm_camera_send_native_ctrl_cmd(my_obj,
+ CAMERA_SET_PARM_3D_DISPLAY_DISTANCE, sizeof(float), (void *)parm->p_value);
+ case MM_CAMERA_PARM_3D_VIEW_ANGLE:
+ return mm_camera_send_native_ctrl_cmd(my_obj,
+ CAMERA_SET_PARM_3D_VIEW_ANGLE, sizeof(uint32_t), (void *)parm->p_value);
+ case MM_CAMERA_PARM_ZOOM_RATIO:
+ break;
+ case MM_CAMERA_PARM_HISTOGRAM:
+ return mm_camera_send_native_ctrl_cmd(my_obj,
+ CAMERA_SET_PARM_HISTOGRAM, sizeof(int8_t), (void *)parm->p_value);
+ case MM_CAMERA_PARM_JPEG_ROTATION:
+ if(my_obj->op_mode == MM_CAMERA_OP_MODE_ZSL){
+ isZSL =1;
+ }
+ mm_jpeg_encoder_setRotation(*((int *)parm->p_value),isZSL);
+ return MM_CAMERA_OK;
+
+ case MM_CAMERA_PARM_ASD_ENABLE:
+ return mm_camera_send_native_ctrl_cmd(my_obj,
+ CAMERA_SET_ASD_ENABLE, sizeof(uint32_t), (void *)parm->p_value);
+
+ case MM_CAMERA_PARM_RECORDING_HINT:
+ return mm_camera_send_native_ctrl_cmd(my_obj,
+ CAMERA_SET_RECORDING_HINT, sizeof(uint32_t), (void *)parm->p_value);
+
+ case MM_CAMERA_PARM_PREVIEW_FORMAT:
+ return mm_camera_send_native_ctrl_cmd(my_obj,
+ CAMERA_SET_PARM_PREVIEW_FORMAT, sizeof(uint32_t), (void *)parm->p_value);
+
+ case MM_CAMERA_PARM_DIS_ENABLE:
+ return mm_camera_send_native_ctrl_cmd(my_obj,
+ CAMERA_SET_DIS_ENABLE, sizeof(uint32_t), (void *)parm->p_value);
+
+ case MM_CAMERA_PARM_FULL_LIVESHOT: {
+ my_obj->full_liveshot = *((int *)(parm->p_value));
+ return mm_camera_send_native_ctrl_cmd(my_obj,
+ CAMERA_SET_FULL_LIVESHOT, sizeof(uint32_t), (void *)parm->p_value);
+ }
+
+ case MM_CAMERA_PARM_LOW_POWER_MODE:
+ return mm_camera_send_native_ctrl_cmd(my_obj,
+ CAMERA_SET_LOW_POWER_MODE, sizeof(uint32_t), (void *)parm->p_value);
+
+ case MM_CAMERA_PARM_CAF_TYPE:
+ CDBG("SM : %s : MM_CAMERA_PARM_CAF_TYPE value : %d",__func__,*((int *)(parm->p_value)));
+ return mm_camera_send_native_ctrl_cmd(my_obj,
+ CAMERA_SET_PARM_CAF_TYPE, sizeof(int32_t), (void *)parm->p_value);
+
+ case MM_CAMERA_PARM_HDR:
+ return mm_camera_send_native_ctrl_cmd(my_obj,
+ CAMERA_SET_PARM_HDR, sizeof(exp_bracketing_t), (void *)parm->p_value);
+
+ case MM_CAMERA_PARM_CAF_LOCK_CANCEL:
+ return mm_camera_send_native_ctrl_cmd(my_obj,
+ CAMERA_SET_CAF_LOCK_CANCEL, sizeof(uint32_t), (void *)parm->p_value);
+
+ default:
+ CDBG("%s: default: parm %d not supported\n", __func__, parm->parm_type);
+ break;
+ }
+ return rc;
+}
+static int32_t mm_camera_send_native_ctrl_cmd(mm_camera_obj_t * my_obj,
+ cam_ctrl_type type, uint32_t length, void *value)
+{
+ int rc = -1;
+ struct msm_ctrl_cmd ctrl_cmd;
+ memset(&ctrl_cmd, 0, sizeof(ctrl_cmd));
+ ctrl_cmd.type = type;
+ ctrl_cmd.length = (uint16_t)length;
+ ctrl_cmd.timeout_ms = 1000;
+ ctrl_cmd.value = value;
+ ctrl_cmd.status = CAM_CTRL_SUCCESS;
+ rc = mm_camera_util_private_s_ctrl(my_obj->ctrl_fd, MSM_V4L2_PID_CTRL_CMD,
+ (int)&ctrl_cmd);
+ CDBG("%s: type=%d, rc = %d, status = %d\n",
+ __func__, type, rc, ctrl_cmd.status);
+
+ if(rc != MM_CAMERA_OK || ((ctrl_cmd.status != CAM_CTRL_ACCEPTED) &&
+ (ctrl_cmd.status != CAM_CTRL_SUCCESS) &&
+ (ctrl_cmd.status != CAM_CTRL_INVALID_PARM)))
+ rc = -1;
+ return rc;
+}
+
+static int32_t mm_camera_send_native_ctrl_timeout_cmd(mm_camera_obj_t * my_obj,
+ cam_ctrl_type type, uint32_t length, void *value,int timeout)
+{
+ int rc = -1;
+ struct msm_ctrl_cmd ctrl_cmd;
+ memset(&ctrl_cmd, 0, sizeof(ctrl_cmd));
+ ctrl_cmd.type = type;
+ ctrl_cmd.length = (uint16_t)length;
+ ctrl_cmd.timeout_ms = timeout;
+ ctrl_cmd.value = value;
+ ctrl_cmd.status = CAM_CTRL_SUCCESS;
+ rc = mm_camera_util_private_s_ctrl(my_obj->ctrl_fd, MSM_V4L2_PID_CTRL_CMD,
+ (int)&ctrl_cmd);
+ CDBG("%s: type=%d, rc = %d, status = %d\n",
+ __func__, type, rc, ctrl_cmd.status);
+ if(rc != MM_CAMERA_OK || ((ctrl_cmd.status != CAM_CTRL_ACCEPTED) &&
+ (ctrl_cmd.status != CAM_CTRL_SUCCESS) &&
+ (ctrl_cmd.status != CAM_CTRL_INVALID_PARM)))
+ rc = -1;
+ return rc;
+}
+
+int32_t mm_camera_set_parm(mm_camera_obj_t * my_obj,
+ mm_camera_parm_t *parm)
+{
+ int32_t rc = -1;
+ uint16_t len;
+ CDBG("%s type =%d", __func__, parm->parm_type);
+ switch(parm->parm_type) {
+ case MM_CAMERA_PARM_OP_MODE:
+ rc = mm_camera_util_set_op_mode(my_obj,
+ (mm_camera_op_mode_type_t *)parm->p_value);
+ break;
+ case MM_CAMERA_PARM_DIMENSION:
+ rc = mm_camera_send_native_ctrl_cmd(my_obj,
+ CAMERA_SET_PARM_DIMENSION, sizeof(cam_ctrl_dimension_t), parm->p_value);
+ if(rc != MM_CAMERA_OK) {
+ CDBG("%s: mm_camera_send_native_ctrl_cmd err=%d\n", __func__, rc);
+ break;
+ }
+ memcpy(&my_obj->dim, (cam_ctrl_dimension_t *)parm->p_value,
+ sizeof(cam_ctrl_dimension_t));
+ CDBG("%s: dw=%d,dh=%d,vw=%d,vh=%d,pw=%d,ph=%d,tw=%d,th=%d,raw_w=%d,raw_h=%d\n",
+ __func__,
+ my_obj->dim.display_width,my_obj->dim.display_height,
+ my_obj->dim.video_width, my_obj->dim.video_height,
+ my_obj->dim.picture_width,my_obj->dim.picture_height,
+ my_obj->dim.ui_thumbnail_width,my_obj->dim.ui_thumbnail_height,
+ my_obj->dim.raw_picture_width,my_obj->dim.raw_picture_height);
+ break;
+ case MM_CAMERA_PARM_SNAPSHOT_BURST_NUM:
+ CDBG("%s: Setting snapshot burst number: %d\n", __func__, *((int *)parm->p_value));
+ my_obj->snap_burst_num_by_user = *((int *)parm->p_value);
+ rc = MM_CAMERA_OK;
+ break;
+ case MM_CAMERA_PARM_CH_IMAGE_FMT:
+ {
+ mm_camera_ch_image_fmt_parm_t *fmt;
+ fmt = (mm_camera_ch_image_fmt_parm_t *)parm->p_value;
+ rc = mm_camera_ch_fn(my_obj, fmt->ch_type,
+ MM_CAMERA_STATE_EVT_SET_FMT, fmt);
+ }
+ break;
+ case MM_CAMERA_PARM_CHECK_AF_RETRY:
+ return mm_camera_send_native_ctrl_cmd(my_obj, CAMERA_CHECK_AF_RETRY,
+ 0, (void *)NULL);
+ case MM_CAMERA_PARM_LG_CAF_LOCK:
+ return mm_camera_send_native_ctrl_cmd(my_obj, CAMERA_SET_LG_CAF_LOCK,
+ 0, (void *)NULL);
+ case MM_CAMERA_PARM_INFORM_STARTPRVIEW:
+ return mm_camera_send_native_ctrl_cmd(my_obj, CAMERA_SET_INFORM_STARTPREVIEW,
+ 0, (void *)NULL);
+ default:
+ rc = mm_camera_set_general_parm(my_obj, parm);
+ break;
+ }
+ return rc;
+}
+
+int32_t mm_camera_get_parm(mm_camera_obj_t * my_obj,
+ mm_camera_parm_t *parm)
+{
+ int32_t rc = MM_CAMERA_OK;
+
+ switch(parm->parm_type) {
+ case MM_CAMERA_PARM_CROP:
+ return rc = mm_camera_ch_fn(my_obj,
+ ((mm_camera_ch_crop_t *)parm->p_value)->ch_type,
+ MM_CAMERA_STATE_EVT_GET_CROP, parm->p_value);
+ break;
+ case MM_CAMERA_PARM_DIMENSION:
+ memcpy(parm->p_value, &my_obj->dim, sizeof(my_obj->dim));
+ CDBG("%s: dw=%d,dh=%d,vw=%d,vh=%d,pw=%d,ph=%d,tw=%d,th=%d,ovx=%x,ovy=%d,opx=%d,opy=%d, m_fmt=%d, t_ftm=%d\n",
+ __func__,
+ my_obj->dim.display_width,my_obj->dim.display_height,
+ my_obj->dim.video_width,my_obj->dim.video_height,
+ my_obj->dim.picture_width,my_obj->dim.picture_height,
+ my_obj->dim.ui_thumbnail_width,my_obj->dim.ui_thumbnail_height,
+ my_obj->dim.orig_video_width,my_obj->dim.orig_video_height,
+ my_obj->dim.orig_picture_width,my_obj->dim.orig_picture_height,
+ my_obj->dim.main_img_format, my_obj->dim.thumb_format);
+ break;
+ case MM_CAMERA_PARM_MAX_PICTURE_SIZE: {
+ mm_camera_dimension_t *dim =
+ (mm_camera_dimension_t *)parm->p_value;
+ dim->height = my_obj->properties.max_pict_height;
+ dim->width = my_obj->properties.max_pict_width;
+ CDBG("%s: Max Picture Size: %d X %d\n", __func__,
+ dim->width, dim->height);
+ }
+ break;
+ case MM_CAMERA_PARM_RAW_SNAPSHOT_FMT:
+ *((cam_format_t *)parm->p_value) = my_obj->properties.pxlcode;
+ break;
+ case MM_CAMERA_PARM_PREVIEW_FORMAT:
+ *((int *)parm->p_value) = my_obj->properties.preview_format;
+ break;
+ case MM_CAMERA_PARM_PREVIEW_SIZES_CNT:
+ *((int *)parm->p_value) = my_obj->properties.preview_sizes_cnt;
+ break;
+ case MM_CAMERA_PARM_VIDEO_SIZES_CNT:
+ *((int *)parm->p_value) = my_obj->properties.video_sizes_cnt;
+ break;
+ case MM_CAMERA_PARM_THUMB_SIZES_CNT:
+ *((int *)parm->p_value) = my_obj->properties.thumb_sizes_cnt;
+ break;
+ case MM_CAMERA_PARM_HFR_SIZES_CNT:
+ *((int *)parm->p_value) = my_obj->properties.hfr_sizes_cnt;
+ break;
+ case MM_CAMERA_PARM_HFR_FRAME_SKIP:
+ *((int *)parm->p_value) = my_obj->properties.hfr_frame_skip;
+ break;
+ case MM_CAMERA_PARM_DEFAULT_PREVIEW_WIDTH:
+ *((int *)parm->p_value) = my_obj->properties.default_preview_width;
+ break;
+ case MM_CAMERA_PARM_DEFAULT_PREVIEW_HEIGHT:
+ *((int *)parm->p_value) = my_obj->properties.default_preview_height;
+ break;
+ case MM_CAMERA_PARM_BESTSHOT_RECONFIGURE:
+ *((int *)parm->p_value) = my_obj->properties.bestshot_reconfigure;
+ break;
+ case MM_CAMERA_PARM_MAX_PREVIEW_SIZE: {
+ mm_camera_dimension_t *dim =
+ (mm_camera_dimension_t *)parm->p_value;
+ dim->height = my_obj->properties.max_preview_height;
+ dim->width = my_obj->properties.max_preview_width;
+ CDBG("%s: Max Preview Size: %d X %d\n", __func__,
+ dim->width, dim->height);
+ }
+ break;
+ case MM_CAMERA_PARM_MAX_VIDEO_SIZE: {
+ mm_camera_dimension_t *dim =
+ (mm_camera_dimension_t *)parm->p_value;
+ dim->height = my_obj->properties.max_video_height;
+ dim->width = my_obj->properties.max_video_width;
+ CDBG("%s: Max Video Size: %d X %d\n", __func__,
+ dim->width, dim->height);
+ }
+ break;
+ case MM_CAMERA_PARM_MAX_HFR_MODE:
+ return mm_camera_send_native_ctrl_cmd(my_obj, CAMERA_GET_PARM_MAX_HFR_MODE,
+ sizeof(camera_hfr_mode_t), (void *)parm->p_value);
+ case MM_CAMERA_PARM_FOCAL_LENGTH:
+ return mm_camera_send_native_ctrl_cmd(my_obj, CAMERA_GET_PARM_FOCAL_LENGTH,
+ sizeof(focus_distances_info_t), (void *)parm->p_value);
+ case MM_CAMERA_PARM_HORIZONTAL_VIEW_ANGLE:
+ return mm_camera_send_native_ctrl_cmd(my_obj, CAMERA_GET_PARM_HORIZONTAL_VIEW_ANGLE,
+ sizeof(focus_distances_info_t), (void *)parm->p_value);
+ case MM_CAMERA_PARM_VERTICAL_VIEW_ANGLE:
+ return mm_camera_send_native_ctrl_cmd(my_obj, CAMERA_GET_PARM_VERTICAL_VIEW_ANGLE,
+ sizeof(focus_distances_info_t), (void *)parm->p_value);
+ case MM_CAMERA_PARM_FOCUS_DISTANCES:
+ return mm_camera_send_native_ctrl_cmd(my_obj, CAMERA_GET_PARM_FOCUS_DISTANCES,
+ sizeof(focus_distances_info_t), (void *)parm->p_value);
+ case MM_CAMERA_PARM_QUERY_FALSH4SNAP:
+ return mm_camera_send_native_ctrl_cmd(my_obj, CAMERA_QUERY_FLASH_FOR_SNAPSHOT,
+ sizeof(int), (void *)parm->p_value);
+ case MM_CAMERA_PARM_3D_FRAME_FORMAT:
+ return mm_camera_send_native_ctrl_cmd(my_obj, CAMERA_GET_PARM_3D_FRAME_FORMAT,
+ sizeof(camera_3d_frame_t), (void *)parm->p_value);
+ case MM_CAMERA_PARM_MAXZOOM:
+ return mm_camera_send_native_ctrl_cmd(my_obj, CAMERA_GET_PARM_MAXZOOM,
+ sizeof(int), (void *)parm->p_value);
+ case MM_CAMERA_PARM_ZOOM_RATIO: {
+ mm_camera_zoom_tbl_t *tbl = (mm_camera_zoom_tbl_t *)parm->p_value;
+ return mm_camera_send_native_ctrl_cmd(my_obj, CAMERA_GET_PARM_ZOOMRATIOS,
+ sizeof(int16_t)*tbl->size, tbl->zoom_ratio_tbl);
+ }
+ case MM_CAMERA_PARM_DEF_PREVIEW_SIZES: {
+ default_sizes_tbl_t *tbl = (default_sizes_tbl_t*)parm->p_value;
+ return mm_camera_send_native_ctrl_cmd(my_obj, CAMERA_GET_PARM_DEF_PREVIEW_SIZES,
+ sizeof(struct camera_size_type)*tbl->tbl_size, tbl->sizes_tbl);
+ }
+ case MM_CAMERA_PARM_DEF_VIDEO_SIZES: {
+ default_sizes_tbl_t *tbl = (default_sizes_tbl_t*)parm->p_value;
+ return mm_camera_send_native_ctrl_cmd(my_obj, CAMERA_GET_PARM_DEF_VIDEO_SIZES,
+ sizeof(struct camera_size_type)*tbl->tbl_size, tbl->sizes_tbl);
+ }
+ case MM_CAMERA_PARM_DEF_THUMB_SIZES: {
+ default_sizes_tbl_t *tbl = (default_sizes_tbl_t*)parm->p_value;
+ return mm_camera_send_native_ctrl_cmd(my_obj, CAMERA_GET_PARM_DEF_THUMB_SIZES,
+ sizeof(struct camera_size_type)*tbl->tbl_size, tbl->sizes_tbl);
+ }
+ case MM_CAMERA_PARM_DEF_HFR_SIZES:{
+ default_sizes_tbl_t *tbl = (default_sizes_tbl_t*)parm->p_value;
+ return mm_camera_send_native_ctrl_cmd(my_obj, CAMERA_GET_PARM_DEF_HFR_SIZES,
+ sizeof(struct camera_size_type)*tbl->tbl_size, tbl->sizes_tbl);
+ }
+ case MM_CAMERA_PARM_OP_MODE:
+ *((mm_camera_op_mode_type_t *)parm->p_value) = my_obj->op_mode;
+ break;
+ case MM_CAMERA_PARM_SNAPSHOT_BURST_NUM:
+ *((int *)parm->p_value) = my_obj->snap_burst_num_by_user;
+ break;
+ case MM_CAMERA_PARM_VFE_OUTPUT_ENABLE:
+ *((int *)parm->p_value) = my_obj->properties.vfe_output_enable;
+ break;
+ case MM_CAMERA_PARM_LUX_IDX:
+ CDBG("%s: MM_CAMERA_PARM_LUX_IDX\n", __func__);
+ return mm_camera_send_native_ctrl_cmd(my_obj, CAMERA_GET_PARM_LUX_IDX,
+ sizeof(int), (void *)parm->p_value);
+ case MM_CAMERA_PARM_MAX_NUM_FACES_DECT:
+ return mm_camera_send_native_ctrl_cmd(my_obj, CAMERA_GET_MAX_NUM_FACES_DECT,
+ sizeof(int), (void *)parm->p_value);
+ case MM_CAMERA_PARM_FACIAL_FEATURE_INFO:
+ return mm_camera_send_native_ctrl_cmd(my_obj, CAMERA_GET_FACIAL_FEATURE_INFO,
+ sizeof(int), (void *)parm->p_value);
+ case MM_CAMERA_PARM_HDR:
+ rc = mm_camera_send_native_ctrl_cmd(my_obj,
+ CAMERA_GET_PARM_HDR, sizeof(exp_bracketing_t), (void *)parm->p_value);
+ //my_obj->channel_interface_mask = *((exp_bracketing_t *)(parm->p_value));
+ break;
+ case MM_CAMERA_GET_PARM_LOW_LIGHT_FOR_ZSL:
+ return mm_camera_send_native_ctrl_cmd(my_obj, CAMERA_GET_PARM_LOW_LIGHT_FOR_ZSL,
+ sizeof(aec_info_for_flash_t), (void *)parm->p_value);
+
+ case MM_CAMERA_PARM_GET_AF_STATUS:
+ return mm_camera_send_native_ctrl_cmd(my_obj, CAMERA_GET_PARM_AF_STATUS,
+ sizeof(af_actuator_status_t), (void *)parm->p_value);
+ default:
+ /* needs to add more implementation */
+ rc = -1;
+ break;
+ }
+ return rc;
+}
+
+int32_t mm_camera_request_buf(mm_camera_obj_t * my_obj, mm_camera_reg_buf_t *buf)
+{
+ int32_t rc = -MM_CAMERA_E_GENERAL;
+ rc = mm_camera_ch_fn(my_obj, buf->ch_type,
+ MM_CAMERA_STATE_EVT_REQUEST_BUF, (void *)&buf->preview);
+ return rc;
+}
+
+int32_t mm_camera_enqueue_buf(mm_camera_obj_t * my_obj, mm_camera_reg_buf_t *buf)
+{
+ int32_t rc = -MM_CAMERA_E_GENERAL;
+ rc = mm_camera_ch_fn(my_obj, buf->ch_type,
+ MM_CAMERA_STATE_EVT_ENQUEUE_BUF, (void *)&buf->preview);
+ return rc;
+}
+
+int32_t mm_camera_prepare_buf(mm_camera_obj_t * my_obj, mm_camera_reg_buf_t *buf)
+{
+ int32_t rc = -MM_CAMERA_E_GENERAL;
+ rc = mm_camera_ch_fn(my_obj, buf->ch_type,
+ MM_CAMERA_STATE_EVT_REG_BUF, (void *)&buf->preview);
+ return rc;
+}
+int32_t mm_camera_unprepare_buf(mm_camera_obj_t * my_obj, mm_camera_channel_type_t ch_type)
+{
+ int32_t rc = -MM_CAMERA_E_GENERAL;
+ pthread_mutex_lock(&my_obj->ch[ch_type].mutex);
+ rc = mm_camera_ch_fn(my_obj, ch_type,
+ MM_CAMERA_STATE_EVT_UNREG_BUF, NULL);
+ pthread_mutex_unlock(&my_obj->ch[ch_type].mutex);
+ return rc;
+}
+
+static int mm_camera_evt_sub(mm_camera_obj_t * my_obj,
+ mm_camera_event_type_t evt_type, int reg_count)
+{
+ int rc = MM_CAMERA_OK;
+ struct v4l2_event_subscription sub;
+
+ memset(&sub, 0, sizeof(sub));
+ sub.type = V4L2_EVENT_PRIVATE_START+MSM_CAM_APP_NOTIFY_EVENT;
+ if(reg_count == 0) {
+ /* unsubscribe */
+ if(my_obj->evt_type_mask == (uint32_t)(1 << evt_type)) {
+ rc = ioctl(my_obj->ctrl_fd, VIDIOC_UNSUBSCRIBE_EVENT, &sub);
+ CDBG("%s: unsubscribe event 0x%x, rc = %d", __func__, sub.type, rc);
+ sub.type = V4L2_EVENT_PRIVATE_START+MSM_CAM_APP_NOTIFY_ERROR_EVENT;
+ rc = ioctl(my_obj->ctrl_fd, VIDIOC_UNSUBSCRIBE_EVENT, &sub);
+ CDBG("%s: unsubscribe event 0x%x, rc = %d", __func__, sub.type, rc);
+ }
+ my_obj->evt_type_mask &= ~(1 << evt_type);
+ if(my_obj->evt_type_mask == 0) {
+ /* kill the polling thraed when unreg the last event */
+ mm_camera_poll_thread_release(my_obj, MM_CAMERA_CH_MAX);
+ }
+ } else {
+ if(!my_obj->evt_type_mask) {
+ /* this is the first reg event */
+ rc = ioctl(my_obj->ctrl_fd, VIDIOC_SUBSCRIBE_EVENT, &sub);
+ CDBG("%s: subscribe event 0x%x, rc = %d", __func__, sub.type, rc);
+ if (rc < 0)
+ goto end;
+ sub.type = V4L2_EVENT_PRIVATE_START+MSM_CAM_APP_NOTIFY_ERROR_EVENT;
+ rc = ioctl(my_obj->ctrl_fd, VIDIOC_SUBSCRIBE_EVENT, &sub);
+ CDBG("%s: subscribe event 0x%x, rc = %d", __func__, sub.type, rc);
+ if (rc < 0)
+ goto end;
+ }
+ my_obj->evt_type_mask |= (1 << evt_type);
+ if(my_obj->evt_type_mask == (uint32_t)(1 << evt_type)) {
+ /* launch event polling when subscribe the first event */
+ rc = mm_camera_poll_thread_launch(my_obj, MM_CAMERA_CH_MAX);
+ }
+ }
+end:
+ return rc;
+}
+
+int mm_camera_reg_event(mm_camera_obj_t * my_obj, mm_camera_event_notify_t evt_cb,
+ void *user_data, mm_camera_event_type_t evt_type)
+{
+ int i;
+ int rc = -1;
+ mm_camera_evt_obj_t *evt_array = &my_obj->evt[evt_type];
+ if(evt_cb) {
+ /* this is reg case */
+ for(i = 0; i < MM_CAMERA_EVT_ENTRY_MAX; i++) {
+ if(evt_array->evt[i].user_data == NULL) {
+ evt_array->evt[i].evt_cb = evt_cb;
+ evt_array->evt[i].user_data = user_data;
+ evt_array->reg_count++;
+ rc = MM_CAMERA_OK;
+ break;
+ }
+ }
+ } else {
+ /* this is unreg case */
+ for(i = 0; i < MM_CAMERA_EVT_ENTRY_MAX; i++) {
+ if(evt_array->evt[i].user_data == user_data) {
+ evt_array->evt[i].evt_cb = NULL;
+ evt_array->evt[i].user_data = NULL;
+ evt_array->reg_count--;
+ rc = MM_CAMERA_OK;
+ break;
+ }
+ }
+ }
+ if(rc == MM_CAMERA_OK && evt_array->reg_count <= 1) {
+ /* subscribe/unsubscribe event to kernel */
+ rc = mm_camera_evt_sub(my_obj, evt_type, evt_array->reg_count);
+ }
+ return rc;
+}
+
+
+static int32_t mm_camera_send_af_failed_event(mm_camera_obj_t *my_obj)
+{
+ int rc = 0;
+ mm_camera_event_t event;
+ event.event_type = MM_CAMERA_EVT_TYPE_CTRL;
+ event.e.ctrl.evt= MM_CAMERA_CTRL_EVT_AUTO_FOCUS_DONE;
+ event.e.ctrl.status=CAM_CTRL_FAILED;
+ CDBG_HIGH("%s: Issuing call",__func__);
+ rc = mm_camera_poll_send_ch_event(my_obj, &event);
+ return rc;
+}
+
+static int32_t mm_camera_send_ch_on_off_event(mm_camera_obj_t *my_obj,
+ mm_camera_channel_type_t ch_type,
+ mm_camera_ch_event_type_t evt)
+{
+ int rc = 0;
+ mm_camera_event_t event;
+ event.event_type = MM_CAMERA_EVT_TYPE_CH;
+ event.e.ch.evt = evt;
+ event.e.ch.ch = ch_type;
+ CDBG("%s: stream on event, type=0x%x, ch=%d, evt=%d",
+ __func__, event.event_type, event.e.ch.ch, event.e.ch.evt);
+ rc = mm_camera_poll_send_ch_event(my_obj, &event);
+ return rc;
+}
+
+int32_t mm_camera_action_start(mm_camera_obj_t *my_obj,
+ mm_camera_ops_type_t opcode, void *parm)
+{
+ int32_t rc = -MM_CAMERA_E_GENERAL;
+ int send_on_off_evt = 1;
+ mm_camera_channel_type_t ch_type;
+ switch(opcode) {
+ case MM_CAMERA_OPS_FOCUS: {
+ if(!parm) return rc;
+ if(0 > mm_camera_send_native_ctrl_cmd(my_obj,
+ CAMERA_SET_PARM_AUTO_FOCUS,
+ sizeof(isp3a_af_mode_t), parm))
+ mm_camera_send_af_failed_event(my_obj);
+ return MM_CAMERA_OK;
+ }
+ case MM_CAMERA_OPS_GET_BUFFERED_FRAME: {
+ mm_camera_ops_parm_get_buffered_frame_t *tmp =
+ (mm_camera_ops_parm_get_buffered_frame_t *)parm;
+ rc = mm_camera_ch_fn(my_obj, tmp->ch_type,
+ MM_CAMERA_STATE_EVT_DISPATCH_BUFFERED_FRAME, NULL);
+ return rc;
+ }
+ default:
+ break;
+ }
+ ch_type = mm_camera_util_opcode_2_ch_type(my_obj, opcode);
+ CDBG("%s:ch=%d,op_mode=%d,opcode=%d\n",
+ __func__,ch_type,my_obj->op_mode,opcode);
+ switch(my_obj->op_mode) {
+ case MM_CAMERA_OP_MODE_ZSL:
+ case MM_CAMERA_OP_MODE_CAPTURE:
+ switch(opcode) {
+ case MM_CAMERA_OPS_PREVIEW:
+ case MM_CAMERA_OPS_SNAPSHOT:
+ case MM_CAMERA_OPS_ZSL:
+ case MM_CAMERA_OPS_RAW:
+ rc = mm_camera_ch_fn(my_obj, ch_type,
+ MM_CAMERA_STATE_EVT_STREAM_ON, NULL);
+ break;
+ default:
+ break;
+ }
+ break;
+ case MM_CAMERA_OP_MODE_VIDEO:
+ switch(opcode) {
+ case MM_CAMERA_OPS_PREVIEW:
+ case MM_CAMERA_OPS_VIDEO:
+ case MM_CAMERA_OPS_SNAPSHOT:
+ rc = mm_camera_ch_fn(my_obj, ch_type,
+ MM_CAMERA_STATE_EVT_STREAM_ON, NULL);
+ CDBG("%s: op_mode=%d, ch %d, rc=%d\n",
+ __func__, MM_CAMERA_OP_MODE_VIDEO, ch_type ,rc);
+ break;
+ case MM_CAMERA_OPS_PREPARE_SNAPSHOT:
+ send_on_off_evt = 0;
+ rc = mm_camera_send_native_ctrl_timeout_cmd(my_obj,CAMERA_PREPARE_SNAPSHOT, 0, NULL, 2000);
+ CDBG("%s: prepare snapshot done opcode = %d, rc= %d\n", __func__, opcode, rc);
+ break;
+ default:
+ break;
+ }
+ break;
+ default:
+ break;
+ }
+ CDBG("%s: ch=%d,op_mode=%d,opcode=%d\n", __func__, ch_type,
+ my_obj->op_mode, opcode);
+ if(send_on_off_evt)
+ rc = mm_camera_send_ch_on_off_event(my_obj,ch_type,MM_CAMERA_CH_EVT_STREAMING_ON);
+ return rc;
+}
+
+int32_t mm_camera_action_stop(mm_camera_obj_t *my_obj,
+ mm_camera_ops_type_t opcode, void *parm)
+{
+ int32_t rc = -MM_CAMERA_E_GENERAL;
+ mm_camera_channel_type_t ch_type;
+
+ if(opcode == MM_CAMERA_OPS_FOCUS) {
+ return mm_camera_send_native_ctrl_cmd(my_obj,
+ CAMERA_AUTO_FOCUS_CANCEL, 0, NULL);
+ }
+
+ ch_type = mm_camera_util_opcode_2_ch_type(my_obj, opcode);
+ switch(my_obj->op_mode) {
+ case MM_CAMERA_OP_MODE_ZSL:
+ case MM_CAMERA_OP_MODE_CAPTURE:
+ switch(opcode) {
+ case MM_CAMERA_OPS_PREVIEW:
+ case MM_CAMERA_OPS_SNAPSHOT:
+ case MM_CAMERA_OPS_ZSL:
+ case MM_CAMERA_OPS_RAW:
+ rc = mm_camera_ch_fn(my_obj, ch_type,
+ MM_CAMERA_STATE_EVT_STREAM_OFF, NULL);
+ CDBG("%s:CAPTURE mode STREAMOFF rc=%d\n",__func__, rc);
+ break;
+ default:
+ break;
+ }
+ break;
+ case MM_CAMERA_OP_MODE_VIDEO:
+ switch(opcode) {
+ case MM_CAMERA_OPS_PREVIEW:
+ case MM_CAMERA_OPS_VIDEO:
+ case MM_CAMERA_OPS_SNAPSHOT:
+ rc = mm_camera_ch_fn(my_obj , ch_type,
+ MM_CAMERA_STATE_EVT_STREAM_OFF, NULL);
+ CDBG("%s:VIDEO mode STREAMOFF rc=%d\n",__func__, rc);
+ break;
+ default:
+ break;
+ }
+ break;
+ default:
+ break;
+ }
+ CDBG("%s:ch=%d\n",__func__, ch_type);
+ rc = mm_camera_send_ch_on_off_event(my_obj,ch_type,MM_CAMERA_CH_EVT_STREAMING_OFF);
+ return rc;
+}
+
+static void mm_camera_init_ch_stream_count(mm_camera_obj_t *my_obj)
+{
+ int i;
+
+ for(i = 0; i < MM_CAMERA_CH_MAX; i++) {
+ if(i == MM_CAMERA_CH_SNAPSHOT) {
+ my_obj->ch_stream_count[i].stream_on_count_cfg = 2;
+ my_obj->ch_stream_count[i].stream_off_count_cfg = 2;
+ } else {
+ my_obj->ch_stream_count[i].stream_on_count_cfg = 1;
+ my_obj->ch_stream_count[i].stream_off_count_cfg = 1;
+ }
+ }
+}
+
+int32_t mm_camera_open(mm_camera_obj_t *my_obj,
+ mm_camera_op_mode_type_t op_mode)
+{
+ char dev_name[MM_CAMERA_DEV_NAME_LEN];
+ int32_t rc = MM_CAMERA_OK;
+ int8_t n_try=MM_CAMERA_DEV_OPEN_TRIES;
+ uint8_t sleep_msec=MM_CAMERA_DEV_OPEN_RETRY_SLEEP;
+ uint8_t i;
+
+ CDBG("%s: begin\n", __func__);
+
+ if(my_obj->op_mode != MM_CAMERA_OP_MODE_NOTUSED) {
+ CDBG("%s: not allowed in existing op mode %d\n",
+ __func__, my_obj->op_mode);
+ return -MM_CAMERA_E_INVALID_OPERATION;
+ }
+ if(op_mode >= MM_CAMERA_OP_MODE_MAX) {
+ CDBG("%s: invalid input %d\n",
+ __func__, op_mode);
+ return -MM_CAMERA_E_INVALID_INPUT;
+ }
+ snprintf(dev_name, sizeof(dev_name), "/dev/%s", mm_camera_util_get_dev_name(my_obj));
+ //rc = mm_camera_dev_open(&my_obj->ctrl_fd, dev_name);
+ CDBG("%s: mm_camera_dev_open rc = %d\n", __func__, rc);
+
+ do{
+ n_try--;
+ my_obj->ctrl_fd = open(dev_name,O_RDWR | O_NONBLOCK);
+ ALOGV("%s: ctrl_fd = %d", __func__, my_obj->ctrl_fd);
+ ALOGV("Errno:%d",errno);
+ if((my_obj->ctrl_fd > 0) || (errno != EIO) || (n_try <= 0 )) {
+ ALOGV("%s: opened, break out while loop", __func__);
+
+ break;
+ }
+ CDBG("%s:failed with I/O error retrying after %d milli-seconds",
+ __func__,sleep_msec);
+ usleep(sleep_msec*1000);
+ }while(n_try>0);
+
+ ALOGV("%s: after while loop", __func__);
+ if (my_obj->ctrl_fd <= 0) {
+ CDBG("%s: cannot open control fd of '%s' Errno = %d\n",
+ __func__, mm_camera_util_get_dev_name(my_obj),errno);
+ return -MM_CAMERA_E_GENERAL;
+ }
+ ALOGV("%s: 2\n", __func__);
+
+ /* open domain socket*/
+ n_try=MM_CAMERA_DEV_OPEN_TRIES;
+ do{
+ n_try--;
+ my_obj->ds_fd = mm_camera_socket_create(my_obj->my_id, MM_CAMERA_SOCK_TYPE_UDP); // TODO: UDP for now, change to TCP
+ ALOGV("%s: ds_fd = %d", __func__, my_obj->ds_fd);
+ ALOGV("Errno:%d",errno);
+ if((my_obj->ds_fd > 0) || (n_try <= 0 )) {
+ ALOGV("%s: opened, break out while loop", __func__);
+ break;
+ }
+ CDBG("%s:failed with I/O error retrying after %d milli-seconds",
+ __func__,sleep_msec);
+ usleep(sleep_msec*1000);
+ }while(n_try>0);
+
+ ALOGV("%s: after while loop for domain socket open", __func__);
+ if (my_obj->ds_fd <= 0) {
+ CDBG_ERROR("%s: cannot open domain socket fd of '%s' Errno = %d\n",
+ __func__, mm_camera_util_get_dev_name(my_obj),errno);
+ close(my_obj->ctrl_fd);
+ my_obj->ctrl_fd = -1;
+ return -MM_CAMERA_E_GENERAL;
+ }
+
+ /* set ctrl_fd to be the mem_mapping fd */
+ rc = mm_camera_util_s_ctrl(my_obj->ctrl_fd,
+ MSM_V4L2_PID_MMAP_INST, 0);
+ if (rc != MM_CAMERA_OK) {
+ CDBG_ERROR("error: ioctl VIDIOC_S_CTRL MSM_V4L2_PID_MMAP_INST failed: %s\n",
+ strerror(errno));
+ close(my_obj->ctrl_fd);
+ close(my_obj->ds_fd);
+ my_obj->ctrl_fd = -1;
+ my_obj->ds_fd = -1;
+ return -MM_CAMERA_E_GENERAL;
+ }
+ if(op_mode != MM_CAMERA_OP_MODE_NOTUSED)
+ rc = mm_camera_util_s_ctrl(my_obj->ctrl_fd,
+ MSM_V4L2_PID_CAM_MODE, op_mode);
+ if(!rc) {
+ my_obj->op_mode = op_mode;
+ my_obj->current_mode = CAMERA_MODE_2D; /* set geo mode to 2D by default */
+ }
+
+ /* get camera capabilities */
+ memset(&my_obj->properties, 0, sizeof(cam_prop_t));
+ rc = mm_camera_send_native_ctrl_cmd(my_obj,
+ CAMERA_GET_CAPABILITIES,
+ sizeof(cam_prop_t),
+ (void *)& my_obj->properties);
+ if (rc != MM_CAMERA_OK) {
+ CDBG_ERROR("%s: cannot get camera capabilities\n", __func__);
+ close(my_obj->ctrl_fd);
+ close(my_obj->ds_fd);
+ my_obj->ctrl_fd = -1;
+ my_obj->ds_fd = -1;
+ return -MM_CAMERA_E_GENERAL;
+ }
+
+ mm_camera_poll_threads_init(my_obj);
+ mm_camera_init_ch_stream_count(my_obj);
+ CDBG("%s : Launch Threads in Cam Open",__func__);
+ for(i = 0; i < MM_CAMERA_CH_MAX; i++) {
+ mm_camera_poll_thread_launch(my_obj,(mm_camera_channel_type_t)i);
+ }
+ CDBG("%s: '%s', ctrl_fd=%d,op_mode=%d,rc=%d\n",
+ __func__, dev_name, my_obj->ctrl_fd, my_obj->op_mode, rc);
+ return rc;
+}
+
+int32_t mm_camera_close(mm_camera_obj_t *my_obj)
+{
+ int i, rc = 0;
+
+ for(i = 0; i < MM_CAMERA_CH_MAX; i++){
+ mm_camera_ch_fn(my_obj, (mm_camera_channel_type_t)i,
+ MM_CAMERA_STATE_EVT_RELEASE, NULL);
+ }
+
+ CDBG("%s : Close Threads in Cam Close",__func__);
+ for(i = 0; i < MM_CAMERA_CH_MAX; i++) {
+ mm_camera_poll_thread_release(my_obj,(mm_camera_channel_type_t)i);
+ }
+ mm_camera_poll_threads_deinit(my_obj);
+ my_obj->op_mode = MM_CAMERA_OP_MODE_NOTUSED;
+ if(my_obj->ctrl_fd > 0) {
+ rc = close(my_obj->ctrl_fd);
+ if(rc < 0) {
+ /* this is a dead end. */
+ CDBG("%s: !!!!FATAL ERROR!!!! ctrl_fd = %d, rc = %d",
+ __func__, my_obj->ctrl_fd, rc);
+ }
+ my_obj->ctrl_fd = 0;
+ }
+ if(my_obj->ds_fd > 0) {
+ mm_camera_socket_close(my_obj->ds_fd);
+ my_obj->ds_fd = 0;
+ }
+ return MM_CAMERA_OK;
+}
+
+int32_t mm_camera_action(mm_camera_obj_t *my_obj, uint8_t start,
+ mm_camera_ops_type_t opcode, void *parm)
+{
+ int32_t rc = - MM_CAMERA_E_INVALID_OPERATION;
+
+ if(start) rc = mm_camera_action_start(my_obj, opcode, parm);
+ else rc = mm_camera_action_stop(my_obj, opcode, parm);
+ CDBG("%s:start_flag=%d,opcode=%d,parm=%p,rc=%d\n",__func__,start,opcode,parm, rc);
+ return rc;
+}
+
+int32_t mm_camera_ch_acquire(mm_camera_obj_t *my_obj, mm_camera_channel_type_t ch_type)
+{
+ return mm_camera_ch_fn(my_obj,ch_type, MM_CAMERA_STATE_EVT_ACQUIRE, 0);
+}
+
+void mm_camera_ch_release(mm_camera_obj_t *my_obj, mm_camera_channel_type_t ch_type)
+{
+ mm_camera_ch_fn(my_obj,ch_type, MM_CAMERA_STATE_EVT_RELEASE, 0);
+}
+
+int32_t mm_camera_sendmsg(mm_camera_obj_t *my_obj, void *msg, uint32_t buf_size, int sendfd)
+{
+ return mm_camera_socket_sendmsg(my_obj->ds_fd, msg, buf_size, sendfd);
+}
diff --git a/camera/mm-camera-interface/mm_camera.h b/camera/mm-camera-interface/mm_camera.h
new file mode 100644
index 0000000..0d62fbe
--- /dev/null
+++ b/camera/mm-camera-interface/mm_camera.h
@@ -0,0 +1,355 @@
+/*
+Copyright (c) 2011-2012, The Linux Foundation. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following
+ disclaimer in the documentation and/or other materials provided
+ with the distribution.
+ * Neither the name of The Linux Foundation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+
+#ifndef __MM_CAMERA_H__
+#define __MM_CAMERA_H__
+
+#define TRUE 1
+#define FALSE 0
+
+typedef enum {
+ MM_CAMERA_STREAM_STATE_NOTUSED, /* not used */
+ MM_CAMERA_STREAM_STATE_ACQUIRED, /* acquired, fd opened */
+ MM_CAMERA_STREAM_STATE_CFG, /* fmt & dim configured */
+ MM_CAMERA_STREAM_STATE_REG, /* buf regged, stream off */
+ MM_CAMERA_STREAM_STATE_ACTIVE, /* stream on */
+ MM_CAMERA_STREAM_STATE_MAX
+} mm_camera_stream_state_type_t;
+
+typedef enum {
+ MM_CAMERA_STATE_EVT_NOTUSED,
+ MM_CAMERA_STATE_EVT_ACQUIRE,
+ MM_CAMERA_STATE_EVT_ATTR,
+ MM_CAMERA_STATE_EVT_RELEASE,
+ MM_CAMERA_STATE_EVT_REG_BUF_CB,
+ MM_CAMERA_STATE_EVT_SET_FMT,
+ MM_CAMERA_STATE_EVT_SET_DIM,
+ MM_CAMERA_STATE_EVT_REG_BUF, // request amount of buffers and enqueue all buffers to kernel
+ MM_CAMERA_STATE_EVT_UNREG_BUF,
+ MM_CAMERA_STATE_EVT_STREAM_ON,
+ MM_CAMERA_STATE_EVT_STREAM_OFF,
+ MM_CAMERA_STATE_EVT_QBUF,
+ MM_CAMERA_STATE_EVT_GET_CROP,
+ MM_CAMERA_STATE_EVT_DISPATCH_BUFFERED_FRAME,
+ MM_CAMERA_STATE_EVT_REQUEST_BUF, // request amount of buffers to kernel only
+ MM_CAMERA_STATE_EVT_ENQUEUE_BUF, // enqueue some of buffers to kernel only
+ MM_CAMERA_STATE_EVT_MAX
+} mm_camera_state_evt_type_t;
+
+typedef struct {
+ mm_camera_event_notify_t evt_cb;
+ void * user_data;
+} mm_camera_notify_cb_t;
+
+typedef enum {
+ MM_CAMERA_BUF_CB_ONCE,
+ MM_CAMERA_BUF_CB_COUNT,
+ MM_CAMERA_BUF_CB_INFINITE
+} mm_camera_buf_cb_type_t;
+
+typedef struct {
+ mm_camera_buf_notify_t cb;
+ mm_camera_buf_cb_type_t cb_type;
+ uint32_t cb_count;
+ void *user_data;
+} mm_camera_buf_cb_t;
+
+typedef enum {
+ MM_CAMERA_STREAM_PIPE,
+ MM_CAMERA_STREAM_PREVIEW,
+ MM_CAMERA_STREAM_VIDEO,
+ MM_CAMERA_STREAM_SNAPSHOT,
+ MM_CAMERA_STREAM_THUMBNAIL,
+ MM_CAMERA_STREAM_RAW,
+ MM_CAMERA_STREAM_VIDEO_MAIN,
+ MM_CAMERA_STREAM_MAX
+} mm_camera_stream_type_t;
+
+typedef struct mm_camera_frame_t mm_camera_frame_t;
+struct mm_camera_frame_t{
+ struct msm_frame frame;
+ struct v4l2_plane planes[VIDEO_MAX_PLANES];
+ uint8_t num_planes;
+ int idx;
+ int match;
+ int valid_entry;
+ mm_camera_frame_t *next;
+};
+
+typedef struct {
+ pthread_mutex_t mutex;
+ int cnt;
+ int match_cnt;
+ mm_camera_frame_t *head;
+ mm_camera_frame_t *tail;
+} mm_camera_frame_queue_t;
+
+typedef struct {
+ mm_camera_frame_queue_t readyq;
+ int32_t num_frame;
+ uint32_t frame_len;
+ int8_t reg_flag;
+ uint32_t frame_offset[MM_CAMERA_MAX_NUM_FRAMES];
+ mm_camera_frame_t frame[MM_CAMERA_MAX_NUM_FRAMES];
+ int8_t ref_count[MM_CAMERA_MAX_NUM_FRAMES];
+ int32_t use_multi_fd;
+ int qbuf;
+ pthread_mutex_t mutex;
+} mm_camera_stream_frame_t;
+
+typedef struct {
+ int32_t fd;
+ mm_camera_stream_state_type_t state;
+ mm_camera_stream_type_t stream_type;
+ struct v4l2_format fmt;
+ cam_format_t cam_fmt;
+ mm_camera_stream_frame_t frame;
+} mm_camera_stream_t;
+
+typedef struct {
+ mm_camera_stream_t stream;
+ mm_camera_raw_streaming_type_t mode;
+} mm_camera_ch_raw_t;
+
+typedef struct {
+ mm_camera_stream_t stream;
+} mm_camera_ch_preview_t;
+
+typedef struct {
+ mm_camera_stream_t thumbnail;
+ mm_camera_stream_t main;
+ int delivered_cnt;
+ int8_t pending_cnt;
+ uint32_t expected_matching_id;
+} mm_camera_ch_snapshot_t;
+
+typedef struct {
+ int8_t fifo[MM_CAMERA_MAX_FRAME_NUM];
+ int8_t low;
+ int8_t high;
+ int8_t len;
+ int8_t water_mark;
+} mm_camera_circule_fifo_t;
+
+typedef struct {
+ mm_camera_stream_t video;
+ mm_camera_stream_t main;
+ uint8_t has_main;
+} mm_camera_ch_video_t;
+
+#define MM_CAMERA_BUF_CB_MAX 4
+typedef struct {
+ mm_camera_channel_type_t type;
+ pthread_mutex_t mutex;
+ uint8_t acquired;
+ mm_camera_buf_cb_t buf_cb[MM_CAMERA_BUF_CB_MAX];
+ mm_camera_channel_attr_buffering_frame_t buffering_frame;
+ union {
+ mm_camera_ch_raw_t raw;
+ mm_camera_ch_preview_t preview;
+ mm_camera_ch_snapshot_t snapshot;
+ mm_camera_ch_video_t video;
+ };
+} mm_camera_ch_t;
+
+#define MM_CAMERA_EVT_ENTRY_MAX 4
+typedef struct {
+ mm_camera_event_notify_t evt_cb;
+ void *user_data;
+} mm_camera_evt_entry_t;
+
+typedef struct {
+ mm_camera_evt_entry_t evt[MM_CAMERA_EVT_ENTRY_MAX];
+ int reg_count;
+} mm_camera_evt_obj_t;
+
+#define MM_CAMERA_CH_STREAM_MAX 2
+typedef enum {
+ MM_CAMERA_POLL_TYPE_EVT,
+ MM_CAMERA_POLL_TYPE_CH,
+ MM_CAMERA_POLL_TYPE_MAX
+} mm_camera_poll_thread_type_t;
+
+typedef struct {
+ mm_camera_poll_thread_type_t poll_type;
+ int32_t pfds[2];
+ int poll_fd[MM_CAMERA_CH_STREAM_MAX+1];
+ int num_fds;
+ int used;
+ pthread_t pid;
+ int32_t state;
+ int timeoutms;
+ void *my_obj;
+ mm_camera_channel_type_t ch_type;
+ mm_camera_stream_t *poll_streams[MM_CAMERA_CH_STREAM_MAX];
+ uint32_t cmd;
+} mm_camera_poll_thread_data_t;
+
+typedef struct {
+ pthread_mutex_t mutex;
+ pthread_cond_t cond_v;
+ int32_t status;
+ mm_camera_poll_thread_data_t data;
+} mm_camera_poll_thread_t;
+
+typedef struct {
+ int stream_on_count_cfg;
+ int stream_off_count_cfg;
+ int stream_on_count;
+ int stream_off_count;
+} mm_camera_ch_stream_count_t;
+#define MM_CAMERA_POLL_THRAED_MAX (MM_CAMERA_CH_MAX+1)
+
+typedef struct {
+ struct msm_mem_map_info cookie;
+ uint32_t vaddr;
+} mm_camera_mem_map_entry_t;
+
+#define MM_CAMERA_MEM_MAP_MAX 8
+typedef struct {
+ int num;
+ mm_camera_mem_map_entry_t entry[MM_CAMERA_MEM_MAP_MAX];
+} mm_camera_mem_map_t;
+
+typedef struct {
+ int8_t my_id;
+ camera_mode_t current_mode;
+ mm_camera_op_mode_type_t op_mode;
+ mm_camera_notify_cb_t *notify;
+ mm_camera_ch_t ch[MM_CAMERA_CH_MAX];
+ int ref_count;
+ uint32_t ch_streaming_mask;
+ int32_t ctrl_fd;
+ int32_t ds_fd; // domain socket fd
+ cam_ctrl_dimension_t dim;
+ cam_prop_t properties;
+ pthread_mutex_t mutex;
+ mm_camera_evt_obj_t evt[MM_CAMERA_EVT_TYPE_MAX];
+ mm_camera_ch_stream_count_t ch_stream_count[MM_CAMERA_CH_MAX];
+ uint32_t evt_type_mask;
+ mm_camera_poll_thread_t poll_threads[MM_CAMERA_POLL_THRAED_MAX];
+ mm_camera_mem_map_t hist_mem_map;
+ int full_liveshot;
+ int snap_burst_num_by_user;
+} mm_camera_obj_t;
+
+#define MM_CAMERA_DEV_NAME_LEN 32
+#define MM_CAMERA_DEV_OPEN_TRIES 2
+#define MM_CAMERA_DEV_OPEN_RETRY_SLEEP 20
+
+typedef struct {
+ mm_camera_t camera[MSM_MAX_CAMERA_SENSORS];
+ int8_t num_cam;
+ char video_dev_name[MSM_MAX_CAMERA_SENSORS][MM_CAMERA_DEV_NAME_LEN];
+ mm_camera_obj_t *cam_obj[MSM_MAX_CAMERA_SENSORS];
+} mm_camera_ctrl_t;
+
+typedef struct {
+ mm_camera_parm_type_t parm_type;
+ void *p_value;
+} mm_camera_parm_t;
+
+extern int32_t mm_camera_stream_fsm_fn_vtbl (mm_camera_obj_t * my_obj,
+ mm_camera_stream_t *stream,
+ mm_camera_state_evt_type_t evt, void *val);
+extern const char *mm_camera_util_get_dev_name(mm_camera_obj_t * my_obj);
+extern int32_t mm_camera_util_s_ctrl( int32_t fd,
+ uint32_t id, int32_t value);
+extern int32_t mm_camera_util_private_s_ctrl( int32_t fd,
+ uint32_t id, int32_t value);
+extern int32_t mm_camera_util_g_ctrl( int32_t fd,
+ uint32_t id, int32_t *value);
+extern int32_t mm_camera_ch_fn(mm_camera_obj_t * my_obj,
+ mm_camera_channel_type_t ch_type,
+ mm_camera_state_evt_type_t evt, void *val);
+extern int32_t mm_camera_action(mm_camera_obj_t *my_obj, uint8_t start,
+ mm_camera_ops_type_t opcode, void *parm);
+extern int32_t mm_camera_open(mm_camera_obj_t *my_obj,
+ mm_camera_op_mode_type_t op_mode);
+extern int32_t mm_camera_close(mm_camera_obj_t *my_obj);
+extern int32_t mm_camera_start(mm_camera_obj_t *my_obj,
+ mm_camera_ops_type_t opcode, void *parm);
+extern int32_t mm_camera_stop(mm_camera_obj_t *my_obj,
+ mm_camera_ops_type_t opcode, void *parm);
+extern int32_t mm_camera_get_parm(mm_camera_obj_t * my_obj,
+ mm_camera_parm_t *parm);
+extern int32_t mm_camera_set_parm(mm_camera_obj_t * my_obj,
+ mm_camera_parm_t *parm);
+extern int32_t mm_camera_request_buf(mm_camera_obj_t * my_obj, mm_camera_reg_buf_t *buf);
+extern int32_t mm_camera_enqueue_buf(mm_camera_obj_t * my_obj, mm_camera_reg_buf_t *buf);
+extern int32_t mm_camera_prepare_buf(mm_camera_obj_t * my_obj, mm_camera_reg_buf_t *buf);
+extern int32_t mm_camera_unprepare_buf(mm_camera_obj_t * my_obj, mm_camera_channel_type_t ch_type);
+extern int mm_camera_poll_thread_launch(mm_camera_obj_t * my_obj, int ch_type);
+
+int mm_camera_poll_thread_del_ch(mm_camera_obj_t * my_obj, int ch_type);
+int mm_camera_poll_thread_add_ch(mm_camera_obj_t * my_obj, int ch_type);
+extern int32_t mm_camera_poll_dispatch_buffered_frames(mm_camera_obj_t * my_obj, int ch_type);
+extern int mm_camera_poll_thread_release(mm_camera_obj_t * my_obj, int ch_type);
+extern void mm_camera_poll_threads_init(mm_camera_obj_t * my_obj);
+extern void mm_camera_poll_threads_deinit(mm_camera_obj_t * my_obj);
+extern int mm_camera_poll_busy(mm_camera_obj_t * my_obj);
+extern void mm_camera_msm_data_notify(mm_camera_obj_t * my_obj, int fd,
+ mm_camera_stream_type_t stream_type);
+extern void mm_camera_msm_evt_notify(mm_camera_obj_t * my_obj, int fd);
+extern int mm_camera_read_msm_frame(mm_camera_obj_t * my_obj,
+ mm_camera_stream_t *stream);
+extern int32_t mm_camera_ch_acquire(mm_camera_obj_t *my_obj, mm_camera_channel_type_t ch_type);
+extern void mm_camera_ch_release(mm_camera_obj_t *my_obj, mm_camera_channel_type_t ch_type);
+extern int mm_camera_ch_is_active(mm_camera_obj_t * my_obj, mm_camera_channel_type_t ch_type);
+extern void mm_camera_ch_util_get_stream_objs(mm_camera_obj_t * my_obj,
+ mm_camera_channel_type_t ch_type,
+ mm_camera_stream_t **stream1,
+ mm_camera_stream_t **stream2);
+extern int mm_camera_stream_qbuf(mm_camera_obj_t * my_obj,
+ mm_camera_stream_t *stream,
+ int idx);
+extern int mm_camera_stream_frame_get_q_cnt(mm_camera_frame_queue_t *q);
+extern mm_camera_frame_t *mm_camera_stream_frame_deq(mm_camera_frame_queue_t *q);
+extern mm_camera_frame_t *mm_camera_stream_frame_deq_no_lock(mm_camera_frame_queue_t *q);
+extern void mm_camera_stream_frame_enq(mm_camera_frame_queue_t *q, mm_camera_frame_t *node);
+extern void mm_camera_stream_frame_enq_no_lock(mm_camera_frame_queue_t *q, mm_camera_frame_t *node);
+extern void mm_camera_stream_frame_refill_q(mm_camera_frame_queue_t *q, mm_camera_frame_t *node, int num);
+extern int mm_camera_stream_is_active(mm_camera_stream_t *stream);
+extern int32_t mm_camera_stream_util_buf_done(mm_camera_obj_t * my_obj,
+ mm_camera_stream_t *stream,
+ mm_camera_notify_frame_t *frame);
+//extern int mm_camera_poll_add_stream(mm_camera_obj_t * my_obj, mm_camera_stream_t *stream);
+//extern int mm_camera_poll_del_stream(mm_camera_obj_t * my_obj, mm_camera_stream_t *stream);
+extern int mm_camera_dev_open(int *fd, char *dev_name);
+extern int mm_camera_reg_event(mm_camera_obj_t * my_obj, mm_camera_event_notify_t evt_cb,
+ void *user_data, uint32_t evt_type);
+extern int mm_camera_poll_send_ch_event(mm_camera_obj_t * my_obj, mm_camera_event_t *event);
+extern void mm_camera_msm_proc_ch_event(mm_camera_obj_t *my_obj, mm_camera_event_t *event);
+extern void mm_camera_dispatch_app_event(mm_camera_obj_t *my_obj, mm_camera_event_t *event);
+extern void mm_camera_dispatch_buffered_frames(mm_camera_obj_t *my_obj, mm_camera_channel_type_t ch_type);
+extern void mm_camera_check_pending_zsl_frames(mm_camera_obj_t *my_obj,
+ mm_camera_channel_type_t ch_type);
+extern int mm_camera_ch_util_get_num_stream(mm_camera_obj_t * my_obj,mm_camera_channel_type_t ch_type);
+extern int32_t mm_camera_sendmsg(mm_camera_obj_t *my_obj, void *msg, uint32_t buf_size, int sendfd);
+#endif /* __MM_CAMERA_H__ */
diff --git a/camera/mm-camera-interface/mm_camera_channel.c b/camera/mm-camera-interface/mm_camera_channel.c
new file mode 100644
index 0000000..6e01c8d
--- /dev/null
+++ b/camera/mm-camera-interface/mm_camera_channel.c
@@ -0,0 +1,767 @@
+/*
+Copyright (c) 2011-2012, The Linux Foundation. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following
+ disclaimer in the documentation and/or other materials provided
+ with the distribution.
+ * Neither the name of The Linux Foundation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+
+#include <pthread.h>
+#include "mm_camera_dbg.h"
+#include <errno.h>
+#include <linux/msm_ion.h>
+#include <sys/ioctl.h>
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <fcntl.h>
+#include <poll.h>
+#include "mm_camera_interface2.h"
+#include "mm_camera.h"
+
+#if 0
+#undef CDBG
+#define CDBG ALOGV
+#endif
+/* static functions prototype declarations */
+static int mm_camera_channel_skip_frames(mm_camera_obj_t *my_obj,
+ mm_camera_frame_queue_t *mq,
+ mm_camera_frame_queue_t *sq,
+ mm_camera_stream_t *mstream,
+ mm_camera_stream_t *sstream,
+ mm_camera_channel_attr_buffering_frame_t *frame_attr);
+static int mm_camera_channel_get_starting_frame(mm_camera_obj_t *my_obj,
+ mm_camera_ch_t *ch,
+ mm_camera_stream_t *mstream,
+ mm_camera_stream_t *sstream,
+ mm_camera_frame_queue_t *mq,
+ mm_camera_frame_queue_t *sq,
+ mm_camera_frame_t **mframe,
+ mm_camera_frame_t **sframe);
+static int mm_camera_ch_search_frame_based_on_time(mm_camera_obj_t *my_obj,
+ mm_camera_ch_t *ch,
+ mm_camera_stream_t *mstream,
+ mm_camera_stream_t *sstream,
+ mm_camera_frame_queue_t *mq,
+ mm_camera_frame_queue_t *sq,
+ mm_camera_frame_t **mframe,
+ mm_camera_frame_t **sframe);
+
+
+
+int mm_camera_ch_util_get_num_stream(mm_camera_obj_t * my_obj,mm_camera_channel_type_t ch_type)
+{
+ int num = 0;
+ switch(ch_type) {
+ case MM_CAMERA_CH_RAW:
+ num = 1;
+ break;
+ case MM_CAMERA_CH_PREVIEW:
+ num = 1;
+ break;
+ case MM_CAMERA_CH_VIDEO:
+ num = 1;
+ if(my_obj->ch[ch_type].video.has_main) {
+ num += 1;
+ }
+ break;
+ case MM_CAMERA_CH_SNAPSHOT:
+ num = 2;
+ break;
+ default:
+ break;
+ }
+ return num;
+}
+
+void mm_camera_ch_util_get_stream_objs(mm_camera_obj_t * my_obj,
+ mm_camera_channel_type_t ch_type,
+ mm_camera_stream_t **stream1,
+ mm_camera_stream_t **stream2)
+{
+ *stream1 = NULL;
+ *stream2 = NULL;
+
+ switch(ch_type) {
+ case MM_CAMERA_CH_RAW:
+ *stream1 = &my_obj->ch[ch_type].raw.stream;
+ break;
+ case MM_CAMERA_CH_PREVIEW:
+ *stream1 = &my_obj->ch[ch_type].preview.stream;
+ break;
+ case MM_CAMERA_CH_VIDEO:
+ *stream1 = &my_obj->ch[ch_type].video.video;
+ if(my_obj->ch[ch_type].video.has_main) {
+ *stream2 = &my_obj->ch[ch_type].video.main;
+ }
+ break;
+ case MM_CAMERA_CH_SNAPSHOT:
+ *stream1 = &my_obj->ch[ch_type].snapshot.main;
+ if (!my_obj->full_liveshot)
+ *stream2 = &my_obj->ch[ch_type].snapshot.thumbnail;
+ break;
+ default:
+ break;
+ }
+}
+
+static int32_t mm_camera_ch_util_set_fmt(mm_camera_obj_t * my_obj,
+ mm_camera_channel_type_t ch_type,
+ mm_camera_ch_image_fmt_parm_t *fmt)
+{
+ int32_t rc = MM_CAMERA_OK;
+ mm_camera_stream_t *stream1 = NULL;
+ mm_camera_stream_t *stream2 = NULL;
+ mm_camera_image_fmt_t *fmt1 = NULL;
+ mm_camera_image_fmt_t *fmt2 = NULL;
+
+ switch(ch_type) {
+ case MM_CAMERA_CH_RAW:
+ stream1 = &my_obj->ch[ch_type].raw.stream;
+ fmt1 = &fmt->def;
+ break;
+ case MM_CAMERA_CH_PREVIEW:
+ stream1 = &my_obj->ch[ch_type].preview.stream;
+ fmt1 = &fmt->def;
+ break;
+ case MM_CAMERA_CH_VIDEO:
+ stream1 = &my_obj->ch[ch_type].video.video;
+ fmt1 = &fmt->video.video;
+ if(my_obj->ch[ch_type].video.has_main) {
+ CDBG("%s:video channel has main image stream\n", __func__);
+ stream2 = &my_obj->ch[ch_type].video.main;
+ fmt2 = &fmt->video.main;
+ }
+ break;
+ case MM_CAMERA_CH_SNAPSHOT:
+ stream1 = &my_obj->ch[ch_type].snapshot.main;
+ fmt1 = &fmt->snapshot.main;
+ if (!my_obj->full_liveshot) {
+ stream2 = &my_obj->ch[ch_type].snapshot.thumbnail;
+ fmt2 = &fmt->snapshot.thumbnail;
+ }
+ break;
+ default:
+ rc = -1;
+ break;
+ }
+ CDBG("%s:ch=%d, streams[0x%x,0x%x]\n", __func__, ch_type,
+ (uint32_t)stream1, (uint32_t)stream2);
+ if(stream1)
+ rc = mm_camera_stream_fsm_fn_vtbl(my_obj, stream1,
+ MM_CAMERA_STATE_EVT_SET_FMT, fmt1);
+ if(stream2 && !rc)
+ rc = mm_camera_stream_fsm_fn_vtbl(my_obj, stream2,
+ MM_CAMERA_STATE_EVT_SET_FMT, fmt2);
+ return rc;
+}
+
+static int32_t mm_camera_ch_util_acquire(mm_camera_obj_t * my_obj,
+ mm_camera_channel_type_t ch_type)
+{
+ int32_t rc = MM_CAMERA_OK;
+ mm_camera_stream_t *stream1 = NULL;
+ mm_camera_stream_t *stream2 = NULL;
+ mm_camera_stream_type_t type1;
+ mm_camera_stream_type_t type2;
+
+ if(my_obj->ch[ch_type].acquired) {
+ rc = MM_CAMERA_OK;
+ goto end;
+ }
+ pthread_mutex_init(&my_obj->ch[ch_type].mutex, NULL);
+ switch(ch_type) {
+ case MM_CAMERA_CH_RAW:
+ stream1 = &my_obj->ch[ch_type].raw.stream;
+ type1 = MM_CAMERA_STREAM_RAW;
+ break;
+ case MM_CAMERA_CH_PREVIEW:
+ stream1 = &my_obj->ch[ch_type].preview.stream;
+ type1 = MM_CAMERA_STREAM_PREVIEW;
+ break;
+ case MM_CAMERA_CH_VIDEO:
+ stream1 = &my_obj->ch[ch_type].video.video;
+ type1 = MM_CAMERA_STREAM_VIDEO;
+ /* no full image live shot by default */
+ my_obj->ch[ch_type].video.has_main = FALSE;
+ break;
+ case MM_CAMERA_CH_SNAPSHOT:
+ stream1 = &my_obj->ch[ch_type].snapshot.main;
+ type1 = MM_CAMERA_STREAM_SNAPSHOT;
+ if (!my_obj->full_liveshot) {
+ stream2 = &my_obj->ch[ch_type].snapshot.thumbnail;
+ type2 = MM_CAMERA_STREAM_THUMBNAIL;
+ }
+ break;
+ default:
+ return -1;
+ break;
+ }
+ if(stream1) rc = mm_camera_stream_fsm_fn_vtbl(my_obj, stream1,
+ MM_CAMERA_STATE_EVT_ACQUIRE, &type1);
+ if(stream2 && !rc) rc = mm_camera_stream_fsm_fn_vtbl(my_obj, stream2,
+ MM_CAMERA_STATE_EVT_ACQUIRE, &type2);
+ if(rc == MM_CAMERA_OK) {
+ if(!my_obj->ch[ch_type].acquired) my_obj->ch[ch_type].acquired = TRUE;
+ }
+
+end:
+ return rc;
+}
+
+static int32_t mm_camera_ch_util_release(mm_camera_obj_t * my_obj,
+ mm_camera_channel_type_t ch_type,
+ mm_camera_state_evt_type_t evt)
+{
+ mm_camera_stream_t *stream1, *stream2;
+
+ if(!my_obj->ch[ch_type].acquired) return MM_CAMERA_OK;
+
+ mm_camera_ch_util_get_stream_objs(my_obj,ch_type, &stream1, &stream2);
+ if(stream1)
+ mm_camera_stream_fsm_fn_vtbl(my_obj, stream1, evt, NULL);
+ if(stream2)
+ mm_camera_stream_fsm_fn_vtbl(my_obj, stream2, evt, NULL);
+ pthread_mutex_destroy(&my_obj->ch[ch_type].mutex);
+ memset(&my_obj->ch[ch_type],0,sizeof(my_obj->ch[ch_type]));
+ return 0;
+}
+
+static int32_t mm_camera_ch_util_stream_null_val(mm_camera_obj_t * my_obj,
+ mm_camera_channel_type_t ch_type,
+ mm_camera_state_evt_type_t evt, void *val)
+{
+ int32_t rc = 0;
+ switch(ch_type) {
+ case MM_CAMERA_CH_RAW:
+ rc = mm_camera_stream_fsm_fn_vtbl(my_obj, &my_obj->ch[ch_type].raw.stream,
+ evt, NULL);
+ break;
+ case MM_CAMERA_CH_PREVIEW:
+ rc = mm_camera_stream_fsm_fn_vtbl(my_obj, &my_obj->ch[ch_type].preview.stream,
+ evt, NULL);
+ break;
+ case MM_CAMERA_CH_VIDEO:
+ rc = mm_camera_stream_fsm_fn_vtbl(my_obj,
+ &my_obj->ch[ch_type].video.video, evt,
+ NULL);
+ if(!rc && my_obj->ch[ch_type].video.main.fd)
+ rc = mm_camera_stream_fsm_fn_vtbl(my_obj,
+ &my_obj->ch[ch_type].video.main, evt,
+ NULL);
+ break;
+ case MM_CAMERA_CH_SNAPSHOT:
+ my_obj->ch[ch_type].snapshot.expected_matching_id = 0;
+ rc = mm_camera_stream_fsm_fn_vtbl(my_obj,
+ &my_obj->ch[ch_type].snapshot.main, evt,
+ NULL);
+ if(!rc && !my_obj->full_liveshot)
+ rc = mm_camera_stream_fsm_fn_vtbl(my_obj,
+ &my_obj->ch[ch_type].snapshot.thumbnail, evt,
+ NULL);
+ break;
+ default:
+ CDBG_ERROR("%s: Invalid ch_type=%d", __func__, ch_type);
+ rc = -1;
+ break;
+ }
+ return rc;
+}
+
+static int32_t mm_camera_ch_util_reg_buf(mm_camera_obj_t * my_obj,
+ mm_camera_channel_type_t ch_type,
+ mm_camera_state_evt_type_t evt, void *val)
+{
+ int32_t rc = 0;
+ switch(ch_type) {
+ case MM_CAMERA_CH_RAW:
+ rc = mm_camera_stream_fsm_fn_vtbl(my_obj,
+ &my_obj->ch[ch_type].raw.stream, evt,
+ (mm_camera_buf_def_t *)val);
+ break;
+ case MM_CAMERA_CH_PREVIEW:
+ rc = mm_camera_stream_fsm_fn_vtbl(my_obj,
+ &my_obj->ch[ch_type].preview.stream, evt,
+ (mm_camera_buf_def_t *)val);
+ break;
+ case MM_CAMERA_CH_VIDEO:
+ {
+ mm_camera_buf_video_t * buf = (mm_camera_buf_video_t *)val;
+ rc = mm_camera_stream_fsm_fn_vtbl(my_obj,
+ &my_obj->ch[ch_type].video.video, evt,
+ &buf->video);
+ if(!rc && my_obj->ch[ch_type].video.has_main) {
+ rc = mm_camera_stream_fsm_fn_vtbl(my_obj,
+ &my_obj->ch[ch_type].video.main, evt,
+ &buf->main);
+ }
+ }
+ break;
+ case MM_CAMERA_CH_SNAPSHOT:
+ {
+ mm_camera_buf_snapshot_t * buf = (mm_camera_buf_snapshot_t *)val;
+ rc = mm_camera_stream_fsm_fn_vtbl(my_obj,
+ &my_obj->ch[ch_type].snapshot.main, evt,
+ &buf->main);
+ if(!rc && !my_obj->full_liveshot) {
+ rc = mm_camera_stream_fsm_fn_vtbl(my_obj,
+ &my_obj->ch[ch_type].snapshot.thumbnail, evt,
+ & buf->thumbnail);
+ }
+ }
+ break;
+ default:
+ return -1;
+ break;
+ }
+ return rc;
+}
+
+static int32_t mm_camera_ch_util_attr(mm_camera_obj_t *my_obj,
+ mm_camera_channel_type_t ch_type,
+ mm_camera_channel_attr_t *val)
+{
+ int rc = -MM_CAMERA_E_NOT_SUPPORTED;
+ /*if(ch_type != MM_CAMERA_CH_RAW) {
+ CDBG("%s: attr type %d not support for ch %d\n", __func__, val->type, ch_type);
+ return rc;
+ }*/
+ if(my_obj->ch[ch_type].acquired== 0) {
+ CDBG_ERROR("%s Channel %d not yet acquired ", __func__, ch_type);
+ return -MM_CAMERA_E_INVALID_OPERATION;
+ }
+ switch(val->type) {
+ case MM_CAMERA_CH_ATTR_RAW_STREAMING_TYPE:
+ if(val->raw_streaming_mode == MM_CAMERA_RAW_STREAMING_CAPTURE_SINGLE) {
+ my_obj->ch[ch_type].raw.mode = val->raw_streaming_mode;
+ rc = MM_CAMERA_OK;
+ }
+ break;
+ case MM_CAMERA_CH_ATTR_BUFFERING_FRAME:
+ /* it's good to check the stream state. TBD later */
+ memcpy(&my_obj->ch[ch_type].buffering_frame, &val->buffering_frame, sizeof(val->buffering_frame));
+ break;
+ default:
+ break;
+ }
+ return MM_CAMERA_OK;
+}
+
+static int32_t mm_camera_ch_util_reg_buf_cb(mm_camera_obj_t *my_obj,
+ mm_camera_channel_type_t ch_type,
+ mm_camera_buf_cb_t *val)
+{
+ /* TODOhere: Need to return failure in case of MAX Cb registered
+ * but in order to return fail case need to set up rc.
+ * but the rc value needs to be thread safe
+ */
+ int i;
+ ALOGV("%s: Trying to register",__func__);
+// pthread_mutex_lock(&my_obj->ch[ch_type].mutex);
+ for( i=0 ;i < MM_CAMERA_BUF_CB_MAX; i++ ) {
+ if(my_obj->ch[ch_type].buf_cb[i].cb==NULL) {
+ memcpy(&my_obj->ch[ch_type].buf_cb[i],val,sizeof(mm_camera_buf_cb_t));
+ break;
+ }
+ }
+// pthread_mutex_unlock(&my_obj->ch[ch_type].mutex);
+ ALOGV("%s: Done register",__func__);
+ return MM_CAMERA_OK;
+}
+
+static int32_t mm_camera_ch_util_qbuf(mm_camera_obj_t *my_obj,
+ mm_camera_channel_type_t ch_type,
+ mm_camera_state_evt_type_t evt,
+ mm_camera_ch_data_buf_t *val)
+{
+ int32_t rc = -1;
+ mm_camera_stream_t *stream;
+ struct ion_flush_data cache_inv_data;
+ struct ion_custom_data custom_data;
+ int ion_fd;
+ struct msm_frame *cache_frame;
+ struct msm_frame *cache_frame1 = NULL;
+
+ CDBG("<DEBUG>: %s:ch_type:%d",__func__,ch_type);
+ switch(ch_type) {
+ case MM_CAMERA_CH_RAW:
+ rc = mm_camera_stream_fsm_fn_vtbl(my_obj,
+ &my_obj->ch[ch_type].raw.stream, evt,
+ &val->def);
+ cache_frame = val->def.frame;
+ break;
+ case MM_CAMERA_CH_PREVIEW:
+ rc = mm_camera_stream_fsm_fn_vtbl(my_obj,
+ &my_obj->ch[ch_type].preview.stream, evt,
+ &val->def);
+ cache_frame = val->def.frame;
+ CDBG("buffer fd = %d, length = %d, vaddr = %p\n",
+ val->def.frame->fd, val->def.frame->ion_alloc.len, val->def.frame->buffer);
+ break;
+ case MM_CAMERA_CH_VIDEO:
+ {
+ rc = mm_camera_stream_fsm_fn_vtbl(my_obj,
+ &my_obj->ch[ch_type].video.video, evt,
+ &val->video.video);
+ cache_frame = val->video.video.frame;
+ CDBG("buffer fd = %d, length = %d, vaddr = %p\n",
+ val->video.video.frame->fd, val->video.video.frame->ion_alloc.len, val->video.video.frame->buffer);
+
+ if(!rc && val->video.main.frame) {
+ rc = mm_camera_stream_fsm_fn_vtbl(my_obj,
+ &my_obj->ch[ch_type].video.main, evt,
+ &val->video.main);
+ cache_frame1 = val->video.main.frame;
+ }
+ }
+ break;
+ case MM_CAMERA_CH_SNAPSHOT:
+ {
+ rc = mm_camera_stream_fsm_fn_vtbl(my_obj,
+ &my_obj->ch[ch_type].snapshot.main, evt,
+ &val->snapshot.main);
+ cache_frame = val->snapshot.main.frame;
+ CDBG("buffer fd = %d, length = %d, vaddr = %p\n",
+ val->snapshot.main.frame->fd, val->snapshot.main.frame->ion_alloc.len, val->snapshot.main.frame->buffer);
+ if(!rc) {
+ if (my_obj->op_mode == MM_CAMERA_OP_MODE_ZSL)
+ stream = &my_obj->ch[MM_CAMERA_CH_PREVIEW].preview.stream;
+ else
+ stream = &my_obj->ch[ch_type].snapshot.thumbnail;
+ rc = mm_camera_stream_fsm_fn_vtbl(my_obj,
+ stream, evt,
+ &val->snapshot.thumbnail);
+ cache_frame1 = val->snapshot.thumbnail.frame;
+ CDBG("buffer fd = %d, length = %d, vaddr = %p\n",
+ val->snapshot.thumbnail.frame->fd, val->snapshot.thumbnail.frame->ion_alloc.len, val->snapshot.thumbnail.frame->buffer);
+ }
+ }
+ break;
+ default:
+ return -1;
+ break;
+ }
+#ifdef USE_ION
+ cache_inv_data.vaddr = cache_frame->buffer;
+ cache_inv_data.fd = cache_frame->fd;
+ cache_inv_data.handle = cache_frame->fd_data.handle;
+ cache_inv_data.length = cache_frame->ion_alloc.len;
+ custom_data.cmd = ION_IOC_INV_CACHES;
+ custom_data.arg = &cache_inv_data;
+ ion_fd = cache_frame->ion_dev_fd;
+ if(ion_fd > 0) {
+ if(ioctl(ion_fd, ION_IOC_CUSTOM, &custom_data) < 0)
+ CDBG_ERROR("%s: Cache Invalidate failed\n", __func__);
+ else {
+ CDBG("%s: Successful cache invalidate\n", __func__);
+ if(cache_frame1) {
+ ion_fd = cache_frame1->ion_dev_fd;
+ cache_inv_data.vaddr = cache_frame1->buffer;
+ cache_inv_data.fd = cache_frame1->fd;
+ cache_inv_data.handle = cache_frame1->fd_data.handle;
+ cache_inv_data.length = cache_frame1->ion_alloc.len;
+ custom_data.cmd = ION_IOC_INV_CACHES;
+ custom_data.arg = &cache_inv_data;
+ if(ioctl(ion_fd, ION_IOC_CUSTOM, &custom_data) < 0)
+ CDBG_ERROR("%s: Cache Invalidate failed\n", __func__);
+ else
+ CDBG("%s: Successful cache invalidate\n", __func__);
+ }
+ }
+ }
+#endif
+
+ return rc;
+}
+
+static int mm_camera_ch_util_get_crop(mm_camera_obj_t *my_obj,
+ mm_camera_channel_type_t ch_type,
+ mm_camera_state_evt_type_t evt,
+ mm_camera_ch_crop_t *crop)
+{
+ int rc = MM_CAMERA_OK;
+ switch(ch_type) {
+ case MM_CAMERA_CH_RAW:
+ rc = mm_camera_stream_fsm_fn_vtbl(my_obj,
+ &my_obj->ch[ch_type].raw.stream, evt,
+ &crop->crop);
+ break;
+ case MM_CAMERA_CH_PREVIEW:
+ rc = mm_camera_stream_fsm_fn_vtbl(my_obj,
+ &my_obj->ch[ch_type].preview.stream, evt,
+ &crop->crop);
+ break;
+ case MM_CAMERA_CH_VIDEO:
+ rc = mm_camera_stream_fsm_fn_vtbl(my_obj,
+ &my_obj->ch[ch_type].video.video, evt,
+ &crop->crop);
+ break;
+ case MM_CAMERA_CH_SNAPSHOT:
+ {
+ rc = mm_camera_stream_fsm_fn_vtbl(my_obj,
+ &my_obj->ch[ch_type].snapshot.main, evt,
+ &crop->snapshot.main_crop);
+ if(!rc && !my_obj->full_liveshot) {
+ ALOGV("%s: should not come here for Live Shot", __func__);
+ rc = mm_camera_stream_fsm_fn_vtbl(my_obj,
+ &my_obj->ch[ch_type].snapshot.thumbnail, evt,
+ &crop->snapshot.thumbnail_crop);
+ }
+ }
+ break;
+ default:
+ return -1;
+ break;
+ }
+ return rc;
+}
+
+static int mm_camera_ch_util_dispatch_buffered_frame(mm_camera_obj_t *my_obj,
+ mm_camera_channel_type_t ch_type)
+{
+ return mm_camera_poll_dispatch_buffered_frames(my_obj, ch_type);
+}
+
+int mm_camera_channel_get_time_diff(struct timespec *cur_ts, int usec_target, struct timespec *frame_ts)
+{
+ int dtusec = (cur_ts->tv_nsec - frame_ts->tv_nsec)/1000;
+ dtusec += (cur_ts->tv_sec - frame_ts->tv_sec)*1000000 - usec_target;
+ return dtusec;
+}
+
+static int mm_camera_channel_skip_frames(mm_camera_obj_t *my_obj,
+ mm_camera_frame_queue_t *mq,
+ mm_camera_frame_queue_t *sq,
+ mm_camera_stream_t *mstream,
+ mm_camera_stream_t *sstream,
+ mm_camera_channel_attr_buffering_frame_t *frame_attr)
+{
+ int count = 0;
+ int i = 0;
+ mm_camera_frame_t *mframe = NULL, *sframe = NULL;
+ mm_camera_notify_frame_t notify_frame;
+
+ count = mm_camera_stream_frame_get_q_cnt(mq);
+ if(count < mm_camera_stream_frame_get_q_cnt(sq))
+ count = mm_camera_stream_frame_get_q_cnt(sq);
+ CDBG("%s: Q-size=%d, look_back =%d, M_match=%d, T_match=%d", __func__,
+ count, frame_attr->look_back, mq->match_cnt, sq->match_cnt);
+
+ count -= frame_attr->look_back;
+ CDBG("count=%d, frame_attr->look_back=%d,mq->match_cnt=%d, sq->match_cnt=%d",
+ count, frame_attr->look_back, mq->match_cnt,sq->match_cnt);
+ for(i=0; i < count; i++) {
+ mframe = mm_camera_stream_frame_deq(mq);
+ sframe = mm_camera_stream_frame_deq(sq);
+ if(mframe && sframe && mframe->frame.frame_id ==
+ sframe->frame.frame_id) {
+ mq->match_cnt--;
+ sq->match_cnt--;
+ }
+ if(mframe) {
+ notify_frame.frame = &mframe->frame;
+ notify_frame.idx = mframe->idx;
+ mm_camera_stream_util_buf_done(my_obj, mstream, ¬ify_frame);
+ }
+ if(sframe) {
+ notify_frame.frame = &sframe->frame;
+ notify_frame.idx = sframe->idx;
+ mm_camera_stream_util_buf_done(my_obj, sstream, ¬ify_frame);
+ }
+ }
+
+ CDBG("Post %s: Q-size=%d, look_back =%d, M_match=%d, T_match=%d", __func__,
+ count, frame_attr->look_back, mq->match_cnt, sq->match_cnt);
+ return MM_CAMERA_OK;
+}
+
+/*for ZSL mode to send the image pair to client*/
+void mm_camera_dispatch_buffered_frames(mm_camera_obj_t *my_obj,
+ mm_camera_channel_type_t ch_type)
+{
+ int mcnt, i, rc = MM_CAMERA_E_GENERAL, scnt;
+ int num_of_req_frame = 0;
+ int j;
+ mm_camera_ch_data_buf_t data;
+ mm_camera_frame_t *mframe = NULL, *sframe = NULL;
+ mm_camera_frame_t *qmframe = NULL, *qsframe = NULL;
+ mm_camera_ch_t *ch = &my_obj->ch[ch_type];
+ mm_camera_frame_queue_t *mq = NULL;
+ mm_camera_frame_queue_t *sq = NULL;
+ mm_camera_stream_t *stream1 = NULL;
+ mm_camera_stream_t *stream2 = NULL;
+ ALOGV("%s: E", __func__);
+ mm_camera_ch_util_get_stream_objs(my_obj, ch_type, &stream1, &stream2);
+ stream2 = &my_obj->ch[MM_CAMERA_CH_PREVIEW].preview.stream;
+ if(stream1) {
+ mq = &stream1->frame.readyq;
+ }
+ if(stream2) {
+ sq = &stream2->frame.readyq;
+ }
+ CDBG("mq=%p, sq=%p, stream1=%p, stream2=%p", mq, sq, stream1, stream2);
+ pthread_mutex_lock(&my_obj->ch[MM_CAMERA_CH_PREVIEW].mutex);
+ pthread_mutex_lock(&my_obj->ch[MM_CAMERA_CH_SNAPSHOT].mutex);
+ if (mq && sq && stream1 && stream2) {
+ rc = mm_camera_channel_skip_frames(my_obj, mq, sq, stream1, stream2, &ch->buffering_frame);
+ if(rc != MM_CAMERA_OK) {
+ CDBG_ERROR("%s: Error getting right frame!", __func__);
+ goto end;
+ }
+ num_of_req_frame = my_obj->snap_burst_num_by_user;
+ ch->snapshot.pending_cnt = num_of_req_frame;
+
+ CDBG("num_of_req_frame =%d", num_of_req_frame);
+ for(i = 0; i < num_of_req_frame; i++) {
+ mframe = mm_camera_stream_frame_deq(mq);
+ sframe = mm_camera_stream_frame_deq(sq);
+ if(mframe && sframe) {
+ CDBG("%s: frame_id = 0x%x|0x%x, main idx = %d, thumbnail idx = %d", __func__,
+ mframe->frame.frame_id, sframe->frame.frame_id, mframe->idx, sframe->idx);
+ if(mframe->frame.frame_id != sframe->frame.frame_id) {
+ CDBG_ERROR("%s: ZSL algorithm error, main and thumbnail "
+ "frame_ids not same. Need bug fix", __func__);
+ }
+ memset(&data, 0, sizeof(data));
+ data.type = ch_type;
+ data.snapshot.main.frame = &mframe->frame;
+ data.snapshot.main.idx = mframe->idx;
+ data.snapshot.thumbnail.frame = &sframe->frame;
+ data.snapshot.thumbnail.idx = sframe->idx;
+ ch->snapshot.pending_cnt--;
+ mq->match_cnt--;
+ sq->match_cnt--;
+ for(j=0;j<MM_CAMERA_BUF_CB_MAX;j++) {
+ if( ch->buf_cb[j].cb!=NULL )
+ ch->buf_cb[j].cb(&data, ch->buf_cb[j].user_data);
+ }
+ } else {
+ CDBG_ERROR("%s: mframe %p, sframe = %p", __func__, mframe, sframe);
+ qmframe = mframe;
+ qsframe = sframe;
+ rc = -1;
+ break;
+ }
+ }
+ if(qmframe) {
+ mm_camera_stream_frame_enq(mq, &stream1->frame.frame[qmframe->idx]);
+ qmframe = NULL;
+ }
+ if(qsframe) {
+ mm_camera_stream_frame_enq(sq, &stream2->frame.frame[qsframe->idx]);
+ qsframe = NULL;
+ }
+ } else {
+ CDBG_ERROR(" mq =%p sq =%p stream1 =%p stream2 =%p", mq, sq , stream1 , stream2);
+
+ }
+ CDBG("%s: burst number: %d, pending_count: %d", __func__,
+ my_obj->snap_burst_num_by_user, ch->snapshot.pending_cnt);
+end:
+ pthread_mutex_unlock(&my_obj->ch[MM_CAMERA_CH_SNAPSHOT].mutex);
+ pthread_mutex_unlock(&my_obj->ch[MM_CAMERA_CH_PREVIEW].mutex);
+ /* If we are done sending callbacks for all the requested number of snapshots
+ send data delivery done event*/
+ if((rc == MM_CAMERA_OK) && (!ch->snapshot.pending_cnt)) {
+ mm_camera_event_t data;
+ data.event_type = MM_CAMERA_EVT_TYPE_CH;
+ data.e.ch.evt = MM_CAMERA_CH_EVT_DATA_DELIVERY_DONE;
+ data.e.ch.ch = ch_type;
+ mm_camera_poll_send_ch_event(my_obj, &data);
+ }
+}
+
+int32_t mm_camera_ch_fn(mm_camera_obj_t * my_obj,
+ mm_camera_channel_type_t ch_type,
+ mm_camera_state_evt_type_t evt, void *val)
+{
+ int32_t rc = MM_CAMERA_OK;
+
+ CDBG("%s:ch = %d, evt=%d\n", __func__, ch_type, evt);
+ switch(evt) {
+ case MM_CAMERA_STATE_EVT_ACQUIRE:
+ rc = mm_camera_ch_util_acquire(my_obj, ch_type);
+ break;
+ case MM_CAMERA_STATE_EVT_RELEASE:
+ /* safe code in case no stream off before release. */
+ //mm_camera_poll_thread_release(my_obj, ch_type);
+ rc = mm_camera_ch_util_release(my_obj, ch_type, evt);
+ break;
+ case MM_CAMERA_STATE_EVT_ATTR:
+ rc = mm_camera_ch_util_attr(my_obj, ch_type,
+ (mm_camera_channel_attr_t *)val);
+ break;
+ case MM_CAMERA_STATE_EVT_REG_BUF_CB:
+ rc = mm_camera_ch_util_reg_buf_cb(my_obj, ch_type,
+ (mm_camera_buf_cb_t *)val);
+ break;
+ case MM_CAMERA_STATE_EVT_SET_FMT:
+ rc = mm_camera_ch_util_set_fmt(my_obj, ch_type,
+ (mm_camera_ch_image_fmt_parm_t *)val);
+ break;
+ case MM_CAMERA_STATE_EVT_REG_BUF:
+ case MM_CAMERA_STATE_EVT_REQUEST_BUF:
+ case MM_CAMERA_STATE_EVT_ENQUEUE_BUF:
+ rc = mm_camera_ch_util_reg_buf(my_obj, ch_type, evt, val);
+ break;
+ case MM_CAMERA_STATE_EVT_UNREG_BUF:
+ rc = mm_camera_ch_util_stream_null_val(my_obj, ch_type, evt, NULL);
+ break;
+ case MM_CAMERA_STATE_EVT_STREAM_ON: {
+ if(ch_type == MM_CAMERA_CH_RAW &&
+ my_obj->ch[ch_type].raw.mode == MM_CAMERA_RAW_STREAMING_CAPTURE_SINGLE) {
+ if( MM_CAMERA_OK != (rc = mm_camera_util_s_ctrl(my_obj->ctrl_fd,
+ MSM_V4L2_PID_CAM_MODE, MSM_V4L2_CAM_OP_RAW))) {
+ CDBG("%s:set MM_CAMERA_RAW_STREAMING_CAPTURE_SINGLE err=%d\n", __func__, rc);
+ break;
+ }
+ }
+ mm_camera_poll_thread_add_ch(my_obj, ch_type);
+ rc = mm_camera_ch_util_stream_null_val(my_obj, ch_type, evt, NULL);
+ if(rc < 0) {
+ CDBG_ERROR("%s: Failed in STREAM ON", __func__);
+ mm_camera_poll_thread_release(my_obj, ch_type);
+ }
+ break;
+ }
+ case MM_CAMERA_STATE_EVT_STREAM_OFF: {
+ mm_camera_poll_thread_del_ch(my_obj, ch_type);
+ rc = mm_camera_ch_util_stream_null_val(my_obj, ch_type, evt, NULL);
+ break;
+ }
+ case MM_CAMERA_STATE_EVT_QBUF:
+ rc = mm_camera_ch_util_qbuf(my_obj, ch_type, evt,
+ (mm_camera_ch_data_buf_t *)val);
+ break;
+ case MM_CAMERA_STATE_EVT_GET_CROP:
+ rc = mm_camera_ch_util_get_crop(my_obj, ch_type, evt,
+ (mm_camera_ch_crop_t *)val);
+ break;
+ case MM_CAMERA_STATE_EVT_DISPATCH_BUFFERED_FRAME:
+ rc = mm_camera_ch_util_dispatch_buffered_frame(my_obj, ch_type);
+ break;
+ default:
+ break;
+ }
+ return rc;
+}
diff --git a/camera/mm-camera-interface/mm_camera_dbg.h b/camera/mm-camera-interface/mm_camera_dbg.h
new file mode 100644
index 0000000..10205db
--- /dev/null
+++ b/camera/mm-camera-interface/mm_camera_dbg.h
@@ -0,0 +1,70 @@
+/*
+Copyright (c) 2011, The Linux Foundation. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following
+ disclaimer in the documentation and/or other materials provided
+ with the distribution.
+ * Neither the name of The Linux Foundation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+
+#ifndef __MM_CAMERA_DBG_H__
+#define __MM_CAMERA_DBG_H__
+
+//#define LOG_DEBUG 1
+
+#ifndef LOG_DEBUG
+ #ifdef _ANDROID_
+ #undef LOG_NIDEBUG
+ #undef LOG_TAG
+ #define LOG_NIDEBUG 0
+ #define LOG_TAG "mm-libcamera2"
+ #include <utils/Log.h>
+ #else
+ #include <stdio.h>
+ #define ALOGV CDBG
+ #endif
+ #undef CDBG
+ #define CDBG(fmt, args...) do{}while(0)
+#else
+ #ifdef _ANDROID_
+ #undef LOG_NIDEBUG
+ #undef LOG_TAG
+ #define LOG_NIDEBUG 0
+ #define LOG_TAG "mm-libcamera2"
+ #include <utils/Log.h>
+ #define CDBG(fmt, args...) ALOGV(fmt, ##args)
+ #else
+ #include <stdio.h>
+ #define CDBG(fmt, args...) fprintf(stderr, fmt, ##args)
+ #define ALOGE(fmt, args...) fprintf(stderr, fmt, ##args)
+ #endif
+#endif
+
+#ifdef _ANDROID_
+ #define CDBG_HIGH(fmt, args...) ALOGE(fmt, ##args)
+ #define CDBG_ERROR(fmt, args...) ALOGE(fmt, ##args)
+#else
+ #define CDBG_HIGH(fmt, args...) fprintf(stderr, fmt, ##args)
+ #define CDBG_ERROR(fmt, args...) fprintf(stderr, fmt, ##args)
+#endif
+#endif /* __MM_CAMERA_DBG_H__ */
diff --git a/camera/mm-camera-interface/mm_camera_helper.c b/camera/mm-camera-interface/mm_camera_helper.c
new file mode 100644
index 0000000..cb90e0c
--- /dev/null
+++ b/camera/mm-camera-interface/mm_camera_helper.c
@@ -0,0 +1,335 @@
+/*
+Copyright (c) 2011, The Linux Foundation. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following
+ disclaimer in the documentation and/or other materials provided
+ with the distribution.
+ * Neither the name of The Linux Foundation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+
+#include <sys/mman.h>
+#include <fcntl.h>
+#include <stdio.h>
+#include <unistd.h>
+#include <stdlib.h>
+#include <ctype.h>
+#include <errno.h>
+#include <string.h>
+#include "mm_camera_dbg.h"
+#include <time.h>
+#include "mm_camera_interface2.h"
+#include <linux/msm_ion.h>
+
+#define MM_CAMERA_PROFILE 1
+
+struct file;
+struct inode;
+struct vm_area_struct;
+
+/*===========================================================================
+ * FUNCTION - do_mmap -
+ *
+ * DESCRIPTION: retured virtual addresss
+ *==========================================================================*/
+uint8_t *mm_camera_do_mmap(uint32_t size, int *pmemFd)
+{
+ void *ret; /* returned virtual address */
+ int pmem_fd = open("/dev/pmem_adsp", O_RDWR|O_SYNC);
+
+ if (pmem_fd <= 0) {
+ CDBG("do_mmap: Open device /dev/pmem_adsp failed!\n");
+ return NULL;
+ }
+ /* to make it page size aligned */
+ size = (size + 4095) & (~4095);
+ ret = mmap(NULL,
+ size,
+ PROT_READ | PROT_WRITE,
+ MAP_SHARED,
+ pmem_fd,
+ 0);
+ if (ret == MAP_FAILED) {
+ CDBG("do_mmap: pmem mmap() failed: %s (%d)\n", strerror(errno), errno);
+ close(pmem_fd);
+ return NULL;
+ }
+ CDBG("do_mmap: pmem mmap fd %d ptr %p len %u\n", pmem_fd, ret, size);
+ *pmemFd = pmem_fd;
+ return(uint8_t *)ret;
+}
+
+/*===========================================================================
+ * FUNCTION - do_munmap -
+ *
+ * DESCRIPTION:
+ *==========================================================================*/
+int mm_camera_do_munmap(int pmem_fd, void *addr, size_t size)
+{
+ int rc;
+
+ if (pmem_fd <= 0) {
+ CDBG("%s:invalid fd=%d\n", __func__, pmem_fd);
+ return -1;
+ }
+ size = (size + 4095) & (~4095);
+ CDBG("munmapped size = %d, virt_addr = 0x%p\n",
+ size, addr);
+ rc = (munmap(addr, size));
+ close(pmem_fd);
+ CDBG("do_mmap: pmem munmap fd %d ptr %p len %u rc %d\n", pmem_fd, addr,
+ size, rc);
+ return rc;
+}
+
+#ifdef USE_ION
+uint8_t *mm_camera_do_mmap_ion(int ion_fd, struct ion_allocation_data *alloc,
+ struct ion_fd_data *ion_info_fd, int *mapFd)
+{
+ void *ret; /* returned virtual address */
+ int rc = 0;
+ struct ion_handle_data handle_data;
+
+ /* to make it page size aligned */
+ alloc->len = (alloc->len + 4095) & (~4095);
+
+ rc = ioctl(ion_fd, ION_IOC_ALLOC, alloc);
+ if (rc < 0) {
+ CDBG_ERROR("ION allocation failed\n");
+ goto ION_ALLOC_FAILED;
+ }
+
+ ion_info_fd->handle = alloc->handle;
+ rc = ioctl(ion_fd, ION_IOC_SHARE, ion_info_fd);
+ if (rc < 0) {
+ CDBG_ERROR("ION map failed %s\n", strerror(errno));
+ goto ION_MAP_FAILED;
+ }
+ *mapFd = ion_info_fd->fd;
+ ret = mmap(NULL,
+ alloc->len,
+ PROT_READ | PROT_WRITE,
+ MAP_SHARED,
+ *mapFd,
+ 0);
+
+ if (ret == MAP_FAILED) {
+ CDBG_ERROR("ION_MMAP_FAILED: %s (%d)\n", strerror(errno), errno);
+ goto ION_MAP_FAILED;
+ }
+
+ return ret;
+
+ION_MAP_FAILED:
+ handle_data.handle = ion_info_fd->handle;
+ ioctl(ion_fd, ION_IOC_FREE, &handle_data);
+ION_ALLOC_FAILED:
+ return NULL;
+}
+
+int mm_camera_do_munmap_ion (int ion_fd, struct ion_fd_data *ion_info_fd,
+ void *addr, size_t size)
+{
+ int rc = 0;
+ rc = munmap(addr, size);
+ close(ion_info_fd->fd);
+
+ struct ion_handle_data handle_data;
+ handle_data.handle = ion_info_fd->handle;
+ ioctl(ion_fd, ION_IOC_FREE, &handle_data);
+ return rc;
+}
+#endif
+
+/*============================================================
+ FUNCTION mm_camera_dump_image
+ DESCRIPTION:
+==============================================================*/
+int mm_camera_dump_image(void *addr, uint32_t size, char *filename)
+{
+ int file_fd = open(filename, O_RDWR | O_CREAT, 0777);
+
+ if (file_fd < 0) {
+ CDBG_HIGH("%s: cannot open file\n", __func__);
+ return -1;
+ } else
+ write(file_fd, addr, size);
+ close(file_fd);
+ CDBG("%s: %s, size=%d\n", __func__, filename, size);
+ return 0;
+}
+
+uint32_t mm_camera_get_msm_frame_len(cam_format_t fmt_type,
+ camera_mode_t mode,
+ int width,
+ int height,
+ int image_type,
+ uint8_t *num_planes,
+ uint32_t plane[])
+{
+ uint32_t size;
+ *num_planes = 0;
+ int local_height;
+
+ switch (fmt_type) {
+ case CAMERA_YUV_420_NV12:
+ case CAMERA_YUV_420_NV21:
+ *num_planes = 2;
+ if(CAMERA_MODE_3D == mode) {
+ size = (uint32_t)(PAD_TO_2K(width*height)*3/2);
+ plane[0] = PAD_TO_WORD(width*height);
+ } else {
+ if (image_type == OUTPUT_TYPE_V) {
+ plane[0] = PAD_TO_2K(width * height);
+ plane[1] = PAD_TO_2K(width * height/2);
+ } else if (image_type == OUTPUT_TYPE_P) {
+ plane[0] = PAD_TO_WORD(width * height);
+ plane[1] = PAD_TO_WORD(width * height/2);
+ } else {
+ plane[0] = PAD_TO_WORD(width * CEILING16(height));
+ plane[1] = PAD_TO_WORD(width * CEILING16(height)/2);
+ }
+ size = plane[0] + plane[1];
+ }
+ break;
+ case CAMERA_YUV_420_YV12:
+ if (CAMERA_MODE_3D == mode) {
+ *num_planes = 1;
+ size = (uint32_t)(PAD_TO_2K(width*height)*3/2);
+ plane[0] = PAD_TO_WORD(width*height);
+ } else {
+ *num_planes = 3;
+ plane[0] = PAD_TO_2K(CEILING16(width) * height);
+ plane[1] = PAD_TO_2K(CEILING16(width/2) * height/2);
+ plane[2] = PAD_TO_2K(CEILING16(width/2) * height/2);
+ size = plane[0] + plane[1] + plane[2];
+ }
+ break;
+ case CAMERA_BAYER_SBGGR10:
+ *num_planes = 1;
+ plane[0] = PAD_TO_WORD(width * height);
+ size = plane[0];
+ break;
+ case CAMERA_YUV_422_YUYV:
+ *num_planes = 1;
+ plane[0] = PAD_TO_WORD(width * height);
+ size = plane[0];
+ break;
+ case CAMERA_YUV_422_NV16:
+ case CAMERA_YUV_422_NV61:
+ if( image_type == OUTPUT_TYPE_S || image_type == OUTPUT_TYPE_V) {
+ local_height = CEILING16(height);
+ } else {
+ local_height = height;
+ }
+ *num_planes = 2;
+ plane[0] = PAD_TO_WORD(width * height);
+ plane[1] = PAD_TO_WORD(width * height);
+ size = plane[0] + plane[1];
+ break;
+ default:
+ CDBG("%s: format %d not supported.\n",
+ __func__, fmt_type);
+ size = 0;
+ }
+ CDBG("%s:fmt=%d,image_type=%d,width=%d,height=%d,frame_len=%d\n",
+ __func__, fmt_type, image_type, width, height, size);
+ return size;
+}
+
+void mm_camera_util_profile(const char *str)
+{
+#if (MM_CAMERA_PROFILE)
+ struct timespec cur_time;
+
+ clock_gettime(CLOCK_REALTIME, &cur_time);
+ CDBG_HIGH("PROFILE %s: %ld.%09ld\n", str,
+ cur_time.tv_sec, cur_time.tv_nsec);
+#endif
+}
+
+/*===========================================================================
+ * FUNCTION - mm_camera_do_mmap_ion -
+ *
+ * DESCRIPTION:
+ *==========================================================================*/
+uint8_t *mm_camera_do_mmap_ion(int ion_fd, struct ion_allocation_data *alloc,
+ struct ion_fd_data *ion_info_fd, int *mapFd)
+{
+ void *ret; /* returned virtual address */
+ int rc = 0;
+ struct ion_handle_data handle_data;
+
+ /* to make it page size aligned */
+ alloc->len = (alloc->len + 4095) & (~4095);
+
+ rc = ioctl(ion_fd, ION_IOC_ALLOC, alloc);
+ if (rc < 0) {
+ CDBG_ERROR("ION allocation failed %s\n", strerror(errno));
+ goto ION_ALLOC_FAILED;
+ }
+
+ ion_info_fd->handle = alloc->handle;
+ rc = ioctl(ion_fd, ION_IOC_SHARE, ion_info_fd);
+ if (rc < 0) {
+ CDBG_ERROR("ION map failed %s\n", strerror(errno));
+ goto ION_MAP_FAILED;
+ }
+ *mapFd = ion_info_fd->fd;
+ ret = mmap(NULL,
+ alloc->len,
+ PROT_READ | PROT_WRITE,
+ MAP_SHARED,
+ *mapFd,
+ 0);
+
+ if (ret == MAP_FAILED) {
+ CDBG_ERROR("ION_MMAP_FAILED: %s (%d)\n", strerror(errno), errno);
+ goto ION_MAP_FAILED;
+ }
+
+ return ret;
+
+ION_MAP_FAILED:
+ handle_data.handle = ion_info_fd->handle;
+ ioctl(ion_fd, ION_IOC_FREE, &handle_data);
+ION_ALLOC_FAILED:
+ return NULL;
+}
+
+/*===========================================================================
+ * FUNCTION - mm_camera_do_munmap_ion -
+ *
+ * DESCRIPTION:
+ *==========================================================================*/
+int mm_camera_do_munmap_ion (int ion_fd, struct ion_fd_data *ion_info_fd,
+ void *addr, size_t size)
+{
+ int rc = 0;
+ rc = munmap(addr, size);
+ close(ion_info_fd->fd);
+
+ struct ion_handle_data handle_data;
+ handle_data.handle = ion_info_fd->handle;
+ ioctl(ion_fd, ION_IOC_FREE, &handle_data);
+ return rc;
+}
diff --git a/camera/mm-camera-interface/mm_camera_interface2.c b/camera/mm-camera-interface/mm_camera_interface2.c
new file mode 100644
index 0000000..433c612
--- /dev/null
+++ b/camera/mm-camera-interface/mm_camera_interface2.c
@@ -0,0 +1,996 @@
+/*
+Copyright (c) 2011-2012, The Linux Foundation. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following
+ disclaimer in the documentation and/or other materials provided
+ with the distribution.
+ * Neither the name of The Linux Foundation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+
+#include <pthread.h>
+#include "mm_camera_dbg.h"
+#include <errno.h>
+#include <sys/ioctl.h>
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <fcntl.h>
+#include <stddef.h>
+#include <poll.h>
+#include <linux/media.h>
+
+#include "mm_camera_interface2.h"
+#include "mm_camera.h"
+
+#define SET_PARM_BIT32(parm, parm_arr) \
+ (parm_arr[parm/32] |= (1<<(parm%32)))
+
+#define GET_PARM_BIT32(parm, parm_arr) \
+ ((parm_arr[parm/32]>>(parm%32))& 0x1)
+
+static pthread_mutex_t g_mutex = PTHREAD_MUTEX_INITIALIZER;
+
+static mm_camera_ctrl_t g_cam_ctrl;
+
+static int mm_camera_util_opcode_2_ch_type(mm_camera_obj_t *my_obj,
+ mm_camera_ops_type_t opcode)
+{
+ switch(opcode) {
+ case MM_CAMERA_OPS_PREVIEW:
+ return MM_CAMERA_CH_PREVIEW;
+ case MM_CAMERA_OPS_ZSL:
+ case MM_CAMERA_OPS_SNAPSHOT:
+ return MM_CAMERA_CH_SNAPSHOT;
+ case MM_CAMERA_OPS_PREPARE_SNAPSHOT:
+ return MM_CAMERA_CH_SNAPSHOT;
+ case MM_CAMERA_OPS_RAW:
+ return MM_CAMERA_CH_RAW;
+ default:
+ break;
+ }
+ return -1;
+}
+
+const char *mm_camera_util_get_dev_name(mm_camera_obj_t * my_obj)
+{
+ CDBG("%s: Returning %s at index :%d\n",
+ __func__,g_cam_ctrl.camera[my_obj->my_id].video_dev_name,my_obj->my_id);
+ return g_cam_ctrl.camera[my_obj->my_id].video_dev_name;
+}
+
+/* used for querying the camera_info of the given camera_id */
+static const qcamera_info_t * mm_camera_cfg_query_camera_info (int8_t camera_id)
+{
+ if(camera_id >= MSM_MAX_CAMERA_SENSORS)
+ return NULL;
+ return &g_cam_ctrl.camera[camera_id].camera_info;
+}
+/* check if the parm is supported */
+static uint8_t mm_camera_cfg_is_parm_supported (mm_camera_t * camera,
+ mm_camera_parm_type_t parm_type)
+{
+ int is_parm_supported = 0;
+ mm_camera_obj_t * my_obj = NULL;
+
+ pthread_mutex_lock(&g_mutex);
+ my_obj = g_cam_ctrl.cam_obj[camera->camera_info.camera_id];
+ pthread_mutex_unlock(&g_mutex);
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->mutex);
+ is_parm_supported = GET_PARM_BIT32(parm_type,
+ my_obj->properties.parm);
+ pthread_mutex_unlock(&my_obj->mutex);
+ }
+
+ return is_parm_supported;
+}
+
+/* check if the channel is supported */
+static uint8_t mm_camera_cfg_is_ch_supported (mm_camera_t * camera,
+ mm_camera_channel_type_t ch_type)
+{
+ switch(ch_type) {
+ case MM_CAMERA_CH_PREVIEW:
+ case MM_CAMERA_CH_VIDEO:
+ case MM_CAMERA_CH_SNAPSHOT:
+ case MM_CAMERA_CH_RAW:
+ return TRUE;
+ case MM_CAMERA_CH_MAX:
+ default:
+ return FALSE;
+ }
+ return FALSE;
+}
+
+/* set a parms current value */
+static int32_t mm_camera_cfg_set_parm (mm_camera_t * camera,
+ mm_camera_parm_type_t parm_type,
+ void *p_value)
+{
+ int32_t rc = -MM_CAMERA_E_GENERAL;
+ uint32_t tmp;
+ mm_camera_obj_t * my_obj = NULL;
+ mm_camera_parm_t parm = {.parm_type = parm_type, .p_value = p_value};
+
+ pthread_mutex_lock(&g_mutex);
+ my_obj = g_cam_ctrl.cam_obj[camera->camera_info.camera_id];
+ pthread_mutex_unlock(&g_mutex);
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->mutex);
+ rc = mm_camera_set_parm(my_obj, &parm);
+ pthread_mutex_unlock(&my_obj->mutex);
+ }
+ return rc;
+}
+
+/* get a parms current value */
+static int32_t mm_camera_cfg_get_parm (mm_camera_t * camera,
+ mm_camera_parm_type_t parm_type,
+ void* p_value)
+{
+ int32_t rc = -MM_CAMERA_E_GENERAL;
+ uint32_t tmp;
+ mm_camera_obj_t * my_obj = NULL;
+ mm_camera_parm_t parm = {.parm_type = parm_type, .p_value = p_value};
+
+ pthread_mutex_lock(&g_mutex);
+ my_obj = g_cam_ctrl.cam_obj[camera->camera_info.camera_id];
+ pthread_mutex_unlock(&g_mutex);
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->mutex);
+ rc = mm_camera_get_parm(my_obj, &parm);
+ pthread_mutex_unlock(&my_obj->mutex);
+ }
+ return rc;
+}
+
+static int32_t mm_camera_cfg_request_buf(mm_camera_t * camera,
+ mm_camera_reg_buf_t *buf)
+{
+ int32_t rc = -MM_CAMERA_E_GENERAL;
+ uint32_t tmp;
+ mm_camera_obj_t * my_obj = NULL;
+
+ pthread_mutex_lock(&g_mutex);
+ my_obj = g_cam_ctrl.cam_obj[camera->camera_info.camera_id];
+ pthread_mutex_unlock(&g_mutex);
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->mutex);
+ rc = mm_camera_request_buf(my_obj, buf);
+ pthread_mutex_unlock(&my_obj->mutex);
+ }
+ return rc;
+}
+
+static int32_t mm_camera_cfg_enqueue_buf(mm_camera_t * camera,
+ mm_camera_reg_buf_t *buf)
+{
+ int32_t rc = -MM_CAMERA_E_GENERAL;
+ uint32_t tmp;
+ mm_camera_obj_t * my_obj = NULL;
+
+ pthread_mutex_lock(&g_mutex);
+ my_obj = g_cam_ctrl.cam_obj[camera->camera_info.camera_id];
+ pthread_mutex_unlock(&g_mutex);
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->mutex);
+ rc = mm_camera_enqueue_buf(my_obj, buf);
+ pthread_mutex_unlock(&my_obj->mutex);
+ }
+ return rc;
+}
+
+static int32_t mm_camera_cfg_prepare_buf(mm_camera_t * camera,
+ mm_camera_reg_buf_t *buf)
+{
+ int32_t rc = -MM_CAMERA_E_GENERAL;
+ uint32_t tmp;
+ mm_camera_obj_t * my_obj = NULL;
+
+ pthread_mutex_lock(&g_mutex);
+ my_obj = g_cam_ctrl.cam_obj[camera->camera_info.camera_id];
+ pthread_mutex_unlock(&g_mutex);
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->mutex);
+ rc = mm_camera_prepare_buf(my_obj, buf);
+ pthread_mutex_unlock(&my_obj->mutex);
+ }
+ return rc;
+}
+static int32_t mm_camera_cfg_unprepare_buf(mm_camera_t * camera,
+ mm_camera_channel_type_t ch_type)
+{
+ int32_t rc = -MM_CAMERA_E_GENERAL;
+ uint32_t tmp;
+ mm_camera_obj_t * my_obj = NULL;
+
+ pthread_mutex_lock(&g_mutex);
+ my_obj = g_cam_ctrl.cam_obj[camera->camera_info.camera_id];
+ pthread_mutex_unlock(&g_mutex);
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->mutex);
+ rc = mm_camera_unprepare_buf(my_obj,ch_type);
+ pthread_mutex_unlock(&my_obj->mutex);
+ }
+ return rc;
+}
+
+static mm_camera_config_t mm_camera_cfg = {
+ .is_parm_supported = mm_camera_cfg_is_parm_supported,
+ .is_ch_supported = mm_camera_cfg_is_ch_supported,
+ .set_parm = mm_camera_cfg_set_parm,
+ .get_parm = mm_camera_cfg_get_parm,
+ .request_buf = mm_camera_cfg_request_buf,
+ .enqueue_buf = mm_camera_cfg_enqueue_buf,
+ .prepare_buf = mm_camera_cfg_prepare_buf,
+ .unprepare_buf = mm_camera_cfg_unprepare_buf
+};
+
+static uint8_t mm_camera_ops_is_op_supported (mm_camera_t * camera,
+ mm_camera_ops_type_t opcode)
+{
+ uint8_t is_ops_supported;
+ mm_camera_obj_t * my_obj = NULL;
+ int index = 0;
+ mm_camera_legacy_ops_type_t legacy_opcode = CAMERA_OPS_MAX;
+
+ /* Temp: We will be translating our new opcode
+ to legacy ops type. This is just a hack to
+ temporarily unblock APT team. New design is
+ under discussion */
+ switch (opcode) {
+ case MM_CAMERA_OPS_PREVIEW:
+ legacy_opcode = CAMERA_OPS_STREAMING_PREVIEW;
+ break;
+ case MM_CAMERA_OPS_VIDEO:
+ legacy_opcode = CAMERA_OPS_STREAMING_VIDEO;
+ break;
+ case MM_CAMERA_OPS_PREPARE_SNAPSHOT:
+ legacy_opcode = CAMERA_OPS_PREPARE_SNAPSHOT;
+ break;
+ case MM_CAMERA_OPS_SNAPSHOT:
+ legacy_opcode = CAMERA_OPS_SNAPSHOT;
+ break;
+ case MM_CAMERA_OPS_RAW:
+ legacy_opcode = CAMERA_OPS_RAW_CAPTURE;
+ break;
+ case MM_CAMERA_OPS_ZSL:
+ legacy_opcode = CAMERA_OPS_STREAMING_ZSL;
+ break;
+ case MM_CAMERA_OPS_FOCUS:
+ legacy_opcode = CAMERA_OPS_FOCUS;
+ break;
+ case MM_CAMERA_OPS_GET_BUFFERED_FRAME:
+ legacy_opcode = CAMERA_OPS_LOCAL;
+ is_ops_supported = TRUE;
+ CDBG("MM_CAMERA_OPS_GET_BUFFERED_FRAME not handled");
+ break;
+ default:
+ CDBG_ERROR("%s: case %d not handled", __func__, opcode);
+ legacy_opcode = CAMERA_OPS_LOCAL;
+ is_ops_supported = FALSE;
+ break;
+ }
+ if (legacy_opcode != CAMERA_OPS_LOCAL) {
+ pthread_mutex_lock(&g_mutex);
+ my_obj = g_cam_ctrl.cam_obj[camera->camera_info.camera_id];
+ pthread_mutex_unlock(&g_mutex);
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->mutex);
+ index = legacy_opcode/32; /* 32 bits */
+ is_ops_supported = ((my_obj->properties.ops[index] &
+ (1<<legacy_opcode)) != 0);
+ pthread_mutex_unlock(&my_obj->mutex);
+ } else {
+ is_ops_supported = FALSE;
+ }
+ }
+
+ return is_ops_supported;
+}
+
+static int32_t mm_camera_ops_action (mm_camera_t * camera, uint8_t start,
+ mm_camera_ops_type_t opcode, void *val)
+{
+ int32_t rc = -MM_CAMERA_E_GENERAL;
+ mm_camera_obj_t * my_obj = NULL;
+ pthread_mutex_lock(&g_mutex);
+ my_obj = g_cam_ctrl.cam_obj[camera->camera_info.camera_id];
+ pthread_mutex_unlock(&g_mutex);
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->mutex);
+ rc = mm_camera_action(my_obj, start, opcode, val);
+ pthread_mutex_unlock(&my_obj->mutex);
+ }
+ return rc;
+}
+
+/* open uses flags to optionally disable jpeg/vpe interface. */
+static int32_t mm_camera_ops_open (mm_camera_t * camera,
+ mm_camera_op_mode_type_t op_mode)
+{
+ int8_t camera_id = camera->camera_info.camera_id;
+ int32_t rc = MM_CAMERA_OK;
+
+ CDBG("%s: BEGIN\n", __func__);
+ pthread_mutex_lock(&g_mutex);
+ /* not first open */
+ if(g_cam_ctrl.cam_obj[camera_id]) {
+ g_cam_ctrl.cam_obj[camera_id]->ref_count++;
+ CDBG("%s: opened alreadyn", __func__);
+ goto end;
+ }
+ g_cam_ctrl.cam_obj[camera_id] =
+ (mm_camera_obj_t *)malloc(sizeof(mm_camera_obj_t));
+ if(!g_cam_ctrl.cam_obj[camera_id]) {
+ rc = -MM_CAMERA_E_NO_MEMORY;
+ CDBG("%s: no mem", __func__);
+ goto end;
+ }
+ memset(g_cam_ctrl.cam_obj[camera_id], 0,
+ sizeof(mm_camera_obj_t));
+ //g_cam_ctrl.cam_obj[camera_id]->ctrl_fd = -1;
+ g_cam_ctrl.cam_obj[camera_id]->ref_count++;
+ g_cam_ctrl.cam_obj[camera_id]->my_id=camera_id;
+
+ pthread_mutex_init(&g_cam_ctrl.cam_obj[camera_id]->mutex, NULL);
+ rc = mm_camera_open(g_cam_ctrl.cam_obj[camera_id], op_mode);
+ if(rc < 0) {
+ CDBG("%s: open failed, rc = %d\n", __func__, rc);
+ pthread_mutex_destroy(&g_cam_ctrl.cam_obj[camera_id]->mutex);
+ g_cam_ctrl.cam_obj[camera_id]->ref_count--;
+ free(g_cam_ctrl.cam_obj[camera_id]);
+ g_cam_ctrl.cam_obj[camera_id]=NULL;
+ CDBG("%s: mm_camera_open err = %d", __func__, rc);
+ goto end;
+ }else{
+ CDBG("%s: open succeded\n", __func__);
+ }
+end:
+ pthread_mutex_unlock(&g_mutex);
+ CDBG("%s: END, rc=%d\n", __func__, rc);
+ return rc;
+}
+
+static void mm_camera_ops_close (mm_camera_t * camera)
+{
+ mm_camera_obj_t * my_obj;
+ int i;
+ int8_t camera_id = camera->camera_info.camera_id;
+
+ pthread_mutex_lock(&g_mutex);
+ my_obj = g_cam_ctrl.cam_obj[camera_id];
+ if(my_obj) {
+ my_obj->ref_count--;
+ if(my_obj->ref_count > 0) {
+ CDBG("%s: ref_count=%d\n", __func__, my_obj->ref_count);
+ } else {
+ mm_camera_poll_thread_release(my_obj, MM_CAMERA_CH_MAX);
+ (void)mm_camera_close(g_cam_ctrl.cam_obj[camera_id]);
+ pthread_mutex_destroy(&my_obj->mutex);
+ free(my_obj);
+ g_cam_ctrl.cam_obj[camera_id] = NULL;
+ }
+ }
+ pthread_mutex_unlock(&g_mutex);
+}
+
+static int32_t mm_camera_ops_ch_acquire(mm_camera_t * camera,
+ mm_camera_channel_type_t ch_type)
+{
+ int32_t rc = -MM_CAMERA_E_GENERAL;
+ mm_camera_obj_t * my_obj = NULL;
+ pthread_mutex_lock(&g_mutex);
+ my_obj = g_cam_ctrl.cam_obj[camera->camera_info.camera_id];
+ pthread_mutex_unlock(&g_mutex);
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->mutex);
+ rc = mm_camera_ch_acquire(my_obj, ch_type);
+ pthread_mutex_unlock(&my_obj->mutex);
+ }
+ return rc;
+
+}
+static void mm_camera_ops_ch_release(mm_camera_t * camera, mm_camera_channel_type_t ch_type)
+{
+ mm_camera_obj_t * my_obj = NULL;
+ pthread_mutex_lock(&g_mutex);
+ my_obj = g_cam_ctrl.cam_obj[camera->camera_info.camera_id];
+ pthread_mutex_unlock(&g_mutex);
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->mutex);
+ mm_camera_ch_release(my_obj, ch_type);
+ pthread_mutex_unlock(&my_obj->mutex);
+ }
+}
+
+static int32_t mm_camera_ops_ch_attr(mm_camera_t * camera,
+ mm_camera_channel_type_t ch_type,
+ mm_camera_channel_attr_t *attr)
+{
+ mm_camera_obj_t * my_obj = NULL;
+ int32_t rc = -MM_CAMERA_E_GENERAL;
+ pthread_mutex_lock(&g_mutex);
+ my_obj = g_cam_ctrl.cam_obj[camera->camera_info.camera_id];
+ pthread_mutex_unlock(&g_mutex);
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->mutex);
+ rc = mm_camera_ch_fn(my_obj, ch_type, MM_CAMERA_STATE_EVT_ATTR,
+ (void *)attr);
+ pthread_mutex_unlock(&my_obj->mutex);
+ }
+ return rc;
+}
+
+static int32_t mm_camera_ops_sendmsg(mm_camera_t * camera,
+ void *msg,
+ uint32_t buf_size,
+ int sendfd)
+{
+ int32_t rc = -MM_CAMERA_E_GENERAL;
+ mm_camera_obj_t * my_obj = NULL;
+ pthread_mutex_lock(&g_mutex);
+ my_obj = g_cam_ctrl.cam_obj[camera->camera_info.camera_id];
+ pthread_mutex_unlock(&g_mutex);
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->mutex);
+ rc = mm_camera_sendmsg(my_obj, msg, buf_size, sendfd);
+ pthread_mutex_unlock(&my_obj->mutex);
+ }
+ return rc;
+}
+
+static mm_camera_ops_t mm_camera_ops = {
+ .is_op_supported = mm_camera_ops_is_op_supported,
+ .action = mm_camera_ops_action,
+ .open = mm_camera_ops_open,
+ .close = mm_camera_ops_close,
+ .ch_acquire = mm_camera_ops_ch_acquire,
+ .ch_release = mm_camera_ops_ch_release,
+ .ch_set_attr = mm_camera_ops_ch_attr,
+ .sendmsg = mm_camera_ops_sendmsg
+};
+
+static uint8_t mm_camera_notify_is_event_supported(mm_camera_t * camera,
+ mm_camera_event_type_t evt_type)
+{
+ switch(evt_type) {
+ case MM_CAMERA_EVT_TYPE_CH:
+ case MM_CAMERA_EVT_TYPE_CTRL:
+ case MM_CAMERA_EVT_TYPE_STATS:
+ case MM_CAMERA_EVT_TYPE_INFO:
+ return 1;
+ default:
+ return 0;
+ }
+ return 0;
+}
+
+static int32_t mm_camera_notify_register_event_cb(mm_camera_t * camera,
+ mm_camera_event_notify_t evt_cb,
+ void * user_data,
+ mm_camera_event_type_t evt_type)
+{
+ mm_camera_obj_t * my_obj = NULL;
+ mm_camera_buf_cb_t reg ;
+ int rc = -1;
+
+ pthread_mutex_lock(&g_mutex);
+ my_obj = g_cam_ctrl.cam_obj[camera->camera_info.camera_id];
+ pthread_mutex_unlock(&g_mutex);
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->mutex);
+ rc = mm_camera_reg_event(my_obj, evt_cb, user_data, evt_type);
+ pthread_mutex_unlock(&my_obj->mutex);
+ }
+ return rc;
+}
+
+static int32_t mm_camera_register_buf_notify (
+ mm_camera_t * camera,
+ mm_camera_channel_type_t ch_type,
+ mm_camera_buf_notify_t buf_cb,
+ mm_camera_register_buf_cb_type_t cb_type,
+ uint32_t cb_count,
+ void * user_data)
+{
+ mm_camera_obj_t * my_obj = NULL;
+ mm_camera_buf_cb_t reg ;
+ int rc = -1;
+
+ reg.cb = buf_cb;
+ reg.user_data = user_data;
+ reg.cb_type=cb_type;
+ reg.cb_count=cb_count;
+ pthread_mutex_lock(&g_mutex);
+ my_obj = g_cam_ctrl.cam_obj[camera->camera_info.camera_id];
+ pthread_mutex_unlock(&g_mutex);
+ if(my_obj) {
+ pthread_mutex_lock(&my_obj->mutex);
+ rc = mm_camera_ch_fn(my_obj,ch_type,
+ MM_CAMERA_STATE_EVT_REG_BUF_CB, (void *)®);
+ pthread_mutex_unlock(&my_obj->mutex);
+ }
+ return rc;
+}
+static int32_t mm_camera_buf_done(mm_camera_t * camera, mm_camera_ch_data_buf_t * bufs)
+{
+ mm_camera_obj_t * my_obj = NULL;
+ int rc = -1;
+ my_obj = g_cam_ctrl.cam_obj[camera->camera_info.camera_id];
+ if(my_obj) {
+ /*pthread_mutex_lock(&my_obj->mutex);*/
+ rc = mm_camera_ch_fn(my_obj, bufs->type,
+ MM_CAMERA_STATE_EVT_QBUF, (void *)bufs);
+ /*pthread_mutex_unlock(&my_obj->mutex);*/
+ }
+ return rc;
+}
+
+static mm_camera_notify_t mm_camera_notify = {
+ .is_event_supported = mm_camera_notify_is_event_supported,
+ .register_event_notify = mm_camera_notify_register_event_cb,
+ .register_buf_notify = mm_camera_register_buf_notify,
+ .buf_done = mm_camera_buf_done
+};
+
+static uint8_t mm_camera_jpeg_is_jpeg_supported (mm_camera_t * camera)
+{
+ return FALSE;
+}
+static int32_t mm_camera_jpeg_set_parm (mm_camera_t * camera,
+ mm_camera_jpeg_parm_type_t parm_type,
+ void* p_value)
+{
+ return -1;
+}
+static int32_t mm_camera_jpeg_get_parm (mm_camera_t * camera,
+ mm_camera_jpeg_parm_type_t parm_type,
+ void* p_value)
+{
+ return -1;
+}
+static int32_t mm_camera_jpeg_register_event_cb(mm_camera_t * camera,
+ mm_camera_jpeg_cb_t * evt_cb,
+ void * user_data)
+{
+ return -1;
+}
+static int32_t mm_camera_jpeg_encode (mm_camera_t * camera, uint8_t start,
+ mm_camera_jpeg_encode_t *data)
+{
+ return -1;
+}
+
+static mm_camera_jpeg_t mm_camera_jpeg = {
+ .is_jpeg_supported = mm_camera_jpeg_is_jpeg_supported,
+ .set_parm = mm_camera_jpeg_set_parm,
+ .get_parm = mm_camera_jpeg_get_parm,
+ .register_event_cb = mm_camera_jpeg_register_event_cb,
+ .encode = mm_camera_jpeg_encode,
+};
+
+extern mm_camera_t * mm_camera_query (uint8_t *num_cameras)
+{
+ int i = 0, rc = MM_CAMERA_OK;
+ int dev_fd = 0;
+ struct media_device_info mdev_info;
+ int num_media_devices = 0;
+ if (!num_cameras)
+ return NULL;
+ /* lock the mutex */
+ pthread_mutex_lock(&g_mutex);
+ *num_cameras = 0;
+ while (1) {
+ char dev_name[32];
+ snprintf(dev_name, sizeof(dev_name), "/dev/media%d", num_media_devices);
+ dev_fd = open(dev_name, O_RDWR | O_NONBLOCK);
+ if (dev_fd < 0) {
+ CDBG("Done discovering media devices\n");
+ break;
+ }
+ num_media_devices++;
+ rc = ioctl(dev_fd, MEDIA_IOC_DEVICE_INFO, &mdev_info);
+ if (rc < 0) {
+ CDBG_ERROR("Error: ioctl media_dev failed: %s\n", strerror(errno));
+ close(dev_fd);
+ break;
+ }
+
+ if(strncmp(mdev_info.model, QCAMERA_NAME, sizeof(mdev_info.model) != 0)) {
+ close(dev_fd);
+ continue;
+ }
+
+ char * mdev_cfg;
+ int cam_type = 0, mount_angle = 0, info_index = 0;
+ mdev_cfg = strtok(mdev_info.serial, "-");
+ while(mdev_cfg != NULL) {
+ if(info_index == 0) {
+ if(strcmp(mdev_cfg, QCAMERA_NAME))
+ break;
+ } else if(info_index == 1) {
+ mount_angle = atoi(mdev_cfg);
+ } else if(info_index == 2) {
+ cam_type = atoi(mdev_cfg);
+ }
+ mdev_cfg = strtok(NULL, "-");
+ info_index++;
+ }
+
+ if(info_index == 0) {
+ close(dev_fd);
+ continue;
+ }
+
+ int num_entities = 1;
+ while (1) {
+ struct media_entity_desc entity;
+ memset(&entity, 0, sizeof(entity));
+ entity.id = num_entities++;
+ rc = ioctl(dev_fd, MEDIA_IOC_ENUM_ENTITIES, &entity);
+ if (rc < 0) {
+ CDBG("Done enumerating media entities\n");
+ rc = 0;
+ break;
+ }
+ if(entity.type == MEDIA_ENT_T_DEVNODE_V4L && entity.group_id == QCAMERA_VNODE_GROUP_ID) {
+ strncpy(g_cam_ctrl.camera[*num_cameras].video_dev_name,
+ entity.name, sizeof(entity.name));
+ break;
+ }
+ }
+
+ g_cam_ctrl.camera[*num_cameras].camera_info.camera_id = *num_cameras;
+
+ g_cam_ctrl.camera[*num_cameras].
+ camera_info.modes_supported = CAMERA_MODE_2D;
+ if(cam_type > 1)
+ g_cam_ctrl.camera[*num_cameras].
+ camera_info.modes_supported |= CAMERA_MODE_3D;
+
+ g_cam_ctrl.camera[*num_cameras].camera_info.position =
+ (cam_type == 1) ? FRONT_CAMERA : BACK_CAMERA;
+ g_cam_ctrl.camera[*num_cameras].camera_info.sensor_mount_angle =
+ mount_angle;
+ g_cam_ctrl.camera[*num_cameras].sensor_type = 0;
+ g_cam_ctrl.camera[*num_cameras].cfg = &mm_camera_cfg;
+ g_cam_ctrl.camera[*num_cameras].ops = &mm_camera_ops;
+ g_cam_ctrl.camera[*num_cameras].evt = &mm_camera_notify;
+ g_cam_ctrl.camera[*num_cameras].jpeg_ops = NULL;
+
+ CDBG("%s: dev_info[id=%d,name='%s',pos=%d,modes=0x%x,sensor=%d]\n",
+ __func__, *num_cameras,
+ g_cam_ctrl.camera[*num_cameras].video_dev_name,
+ g_cam_ctrl.camera[*num_cameras].camera_info.position,
+ g_cam_ctrl.camera[*num_cameras].camera_info.modes_supported,
+ g_cam_ctrl.camera[*num_cameras].sensor_type);
+
+ *num_cameras+=1;
+ if (dev_fd > 0) {
+ close(dev_fd);
+ }
+ }
+ *num_cameras = *num_cameras;
+ g_cam_ctrl.num_cam = *num_cameras;
+end:
+ /* unlock the mutex */
+ pthread_mutex_unlock(&g_mutex);
+ CDBG("%s: num_cameras=%d\n", __func__, g_cam_ctrl.num_cam);
+ if(rc == 0)
+ return &g_cam_ctrl.camera[0];
+ else
+ return NULL;
+}
+
+
+static mm_camera_t * get_camera_by_id( int cam_id)
+{
+ mm_camera_t * mm_cam;
+ if( cam_id < 0 || cam_id >= g_cam_ctrl.num_cam) {
+ mm_cam = NULL;
+ } else {
+ mm_cam = & g_cam_ctrl.camera[cam_id];
+ }
+ return mm_cam;
+}
+
+/*configure methods*/
+uint8_t cam_config_is_parm_supported(
+ int cam_id,
+ mm_camera_parm_type_t parm_type)
+{
+ uint8_t rc = 0;
+ mm_camera_t * mm_cam = get_camera_by_id(cam_id);
+ if (mm_cam && mm_cam->cfg) {
+ rc = mm_cam->cfg->is_parm_supported(mm_cam, parm_type);
+ }
+ return rc;
+}
+
+uint8_t cam_config_is_ch_supported(
+ int cam_id,
+ mm_camera_channel_type_t ch_type)
+{
+ uint8_t rc = 0;
+ mm_camera_t * mm_cam = get_camera_by_id(cam_id);
+ if (mm_cam) {
+ rc = mm_cam->cfg->is_ch_supported(mm_cam, ch_type);
+ }
+ return rc;
+
+}
+
+/* set a parms current value */
+int32_t cam_config_set_parm(
+ int cam_id,
+ mm_camera_parm_type_t parm_type,
+ void* p_value)
+{
+ int32_t rc = -1;
+ mm_camera_t * mm_cam = get_camera_by_id(cam_id);
+ if (mm_cam) {
+ rc = mm_cam->cfg->set_parm(mm_cam, parm_type, p_value);
+ }
+ return rc;
+}
+
+/* get a parms current value */
+int32_t cam_config_get_parm(
+ int cam_id,
+ mm_camera_parm_type_t parm_type,
+ void* p_value)
+{
+ int32_t rc = -1;
+ mm_camera_t * mm_cam = get_camera_by_id(cam_id);
+ if (mm_cam) {
+ rc = mm_cam->cfg->get_parm(mm_cam, parm_type, p_value);
+ }
+ return rc;
+}
+
+int32_t cam_config_request_buf(int cam_id, mm_camera_reg_buf_t *buf)
+{
+
+ int32_t rc = -1;
+ mm_camera_t * mm_cam = get_camera_by_id(cam_id);
+ if (mm_cam) {
+ rc = mm_cam->cfg->request_buf(mm_cam, buf);
+ }
+ return rc;
+}
+
+int32_t cam_config_enqueue_buf(int cam_id, mm_camera_reg_buf_t *buf)
+{
+
+ int32_t rc = -1;
+ mm_camera_t * mm_cam = get_camera_by_id(cam_id);
+ if (mm_cam) {
+ rc = mm_cam->cfg->enqueue_buf(mm_cam, buf);
+ }
+ return rc;
+}
+
+int32_t cam_config_prepare_buf(int cam_id, mm_camera_reg_buf_t *buf)
+{
+
+ int32_t rc = -1;
+ mm_camera_t * mm_cam = get_camera_by_id(cam_id);
+ if (mm_cam) {
+ rc = mm_cam->cfg->prepare_buf(mm_cam, buf);
+ }
+ return rc;
+}
+int32_t cam_config_unprepare_buf(int cam_id, mm_camera_channel_type_t ch_type)
+{
+ int32_t rc = -1;
+ mm_camera_t * mm_cam = get_camera_by_id(cam_id);
+ if (mm_cam) {
+ rc = mm_cam->cfg->unprepare_buf(mm_cam, ch_type);
+ }
+ return rc;
+}
+
+/*operation methods*/
+uint8_t cam_ops_is_op_supported(int cam_id, mm_camera_ops_type_t opcode)
+{
+ uint8_t rc = 0;
+ mm_camera_t * mm_cam = get_camera_by_id(cam_id);
+ if (mm_cam) {
+ rc = mm_cam->ops->is_op_supported(mm_cam, opcode);
+ }
+ return rc;
+}
+/* val is reserved for some action such as MM_CAMERA_OPS_FOCUS */
+int32_t cam_ops_action(int cam_id, uint8_t start,
+ mm_camera_ops_type_t opcode, void *val)
+{
+ int32_t rc = -1;
+ mm_camera_t * mm_cam = get_camera_by_id(cam_id);
+ if (mm_cam) {
+ rc = mm_cam->ops->action(mm_cam, start, opcode, val);
+ }
+ return rc;
+}
+
+int32_t cam_ops_open(int cam_id, mm_camera_op_mode_type_t op_mode)
+{
+ int32_t rc = -1;
+ mm_camera_t * mm_cam = get_camera_by_id(cam_id);
+ if (mm_cam) {
+ rc = mm_cam->ops->open(mm_cam, op_mode);
+ }
+ return rc;
+}
+
+void cam_ops_close(int cam_id)
+{
+ mm_camera_t * mm_cam = get_camera_by_id(cam_id);
+ if (mm_cam) {
+ mm_cam->ops->close(mm_cam);
+ }
+}
+
+int32_t cam_ops_ch_acquire(int cam_id, mm_camera_channel_type_t ch_type)
+{
+ int32_t rc = -1;
+ mm_camera_t * mm_cam = get_camera_by_id(cam_id);
+ if (mm_cam) {
+ rc = mm_cam->ops->ch_acquire(mm_cam, ch_type);
+ }
+ return rc;
+}
+
+void cam_ops_ch_release(int cam_id, mm_camera_channel_type_t ch_type)
+{
+ mm_camera_t * mm_cam = get_camera_by_id(cam_id);
+ if (mm_cam) {
+ mm_cam->ops->ch_release(mm_cam, ch_type);
+ }
+}
+
+int32_t cam_ops_ch_set_attr(int cam_id, mm_camera_channel_type_t ch_type,
+ mm_camera_channel_attr_t *attr)
+{
+ int32_t rc = -1;
+ mm_camera_t * mm_cam = get_camera_by_id(cam_id);
+ if (mm_cam) {
+ rc = mm_cam->ops->ch_set_attr(mm_cam, ch_type, attr);
+ }
+ return rc;
+}
+
+int32_t cam_ops_sendmsg(int cam_id, void *msg, uint32_t buf_size, int sendfd)
+{
+ int32_t rc = -1;
+ mm_camera_t * mm_cam = get_camera_by_id(cam_id);
+ if (mm_cam) {
+ rc = mm_cam->ops->sendmsg(mm_cam, msg, buf_size, sendfd);
+ }
+ return rc;
+}
+
+/*call-back notify methods*/
+uint8_t cam_evt_is_event_supported(int cam_id, mm_camera_event_type_t evt_type)
+{
+ uint8_t rc = 0;
+ mm_camera_t * mm_cam = get_camera_by_id(cam_id);
+ if (mm_cam) {
+ rc = mm_cam->evt->is_event_supported(mm_cam, evt_type);
+ }
+ return rc;
+}
+
+int32_t cam_evt_register_event_notify(int cam_id,
+ mm_camera_event_notify_t evt_cb,
+ void * user_data,
+ mm_camera_event_type_t evt_type)
+{
+ int32_t rc = -1;
+ mm_camera_t * mm_cam = get_camera_by_id(cam_id);
+ if (mm_cam) {
+ rc = mm_cam->evt->register_event_notify(
+ mm_cam, evt_cb, user_data, evt_type);
+ }
+ return rc;
+}
+
+int32_t cam_evt_register_buf_notify(int cam_id,
+ mm_camera_channel_type_t ch_type,
+ mm_camera_buf_notify_t buf_cb,
+ mm_camera_register_buf_cb_type_t cb_type,
+ uint32_t cb_count,
+ void * user_data)
+{
+ int32_t rc = -1;
+ mm_camera_t * mm_cam = get_camera_by_id(cam_id);
+ if (mm_cam) {
+ rc = mm_cam->evt->register_buf_notify(
+ mm_cam, ch_type, buf_cb, cb_type,
+ cb_count, user_data);
+ }
+ return rc;
+}
+
+int32_t cam_evt_buf_done(int cam_id, mm_camera_ch_data_buf_t *bufs)
+{
+ int32_t rc = -1;
+ mm_camera_t * mm_cam = get_camera_by_id(cam_id);
+ if (mm_cam) {
+ rc = mm_cam->evt->buf_done(mm_cam, bufs);
+ }
+ return rc;
+}
+
+/*camera JPEG methods*/
+uint8_t cam_jpeg_is_jpeg_supported(int cam_id)
+{
+ uint8_t rc = 0;
+ mm_camera_t * mm_cam = get_camera_by_id(cam_id);
+ if (mm_cam) {
+ rc = mm_cam->jpeg_ops->is_jpeg_supported(mm_cam);
+ }
+ return rc;
+}
+
+int32_t cam_jpeg_set_parm(int cam_id, mm_camera_jpeg_parm_type_t parm_type,
+ void* p_value)
+{
+ int32_t rc = -1;
+ mm_camera_t * mm_cam = get_camera_by_id(cam_id);
+ if (mm_cam) {
+ rc = mm_cam->jpeg_ops->set_parm(mm_cam, parm_type, p_value);
+ }
+ return rc;
+}
+
+int32_t cam_jpeg_get_parm(int cam_id, mm_camera_jpeg_parm_type_t parm_type,
+ void* p_value)
+{
+ int32_t rc = -1;
+ mm_camera_t * mm_cam = get_camera_by_id(cam_id);
+ if (mm_cam) {
+ rc = mm_cam->jpeg_ops->get_parm(mm_cam, parm_type, p_value);
+ }
+ return rc;
+}
+int32_t cam_jpeg_register_event_cb(int cam_id, mm_camera_jpeg_cb_t * evt_cb,
+ void * user_data)
+{
+ int32_t rc = -1;
+ mm_camera_t * mm_cam = get_camera_by_id(cam_id);
+ if (mm_cam) {
+ rc = mm_cam->jpeg_ops->register_event_cb(mm_cam, evt_cb, user_data);
+ }
+ return rc;
+}
+int32_t cam_jpeg_encode(int cam_id, uint8_t start,
+ mm_camera_jpeg_encode_t *data)
+{
+ int32_t rc = -1;
+ mm_camera_t * mm_cam = get_camera_by_id(cam_id);
+ if (mm_cam) {
+ rc = mm_cam->jpeg_ops->encode(mm_cam, start, data);
+ }
+ return rc;
+}
+
diff --git a/camera/mm-camera-interface/mm_camera_interface2.h b/camera/mm-camera-interface/mm_camera_interface2.h
new file mode 100644
index 0000000..e674d79
--- /dev/null
+++ b/camera/mm-camera-interface/mm_camera_interface2.h
@@ -0,0 +1,528 @@
+/*
+Copyright (c) 2011-2012, The Linux Foundation. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following
+ disclaimer in the documentation and/or other materials provided
+ with the distribution.
+ * Neither the name of The Linux Foundation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+
+#ifndef __MM_CAMERA_INTERFACE2_H__
+#define __MM_CAMERA_INTERFACE2_H__
+#include <linux/msm_ion.h>
+#include <linux/videodev2.h>
+#include <media/msm_camera.h>
+#include "QCamera_Intf.h"
+#include "mm_omx_jpeg_encoder.h"
+
+#define MM_CAMERA_MAX_NUM_FRAMES 16
+
+typedef struct {
+ int num;
+ uint32_t frame_len;
+ struct msm_frame frame[MM_CAMERA_MAX_NUM_FRAMES];
+} mm_cameara_stream_buf_t;
+
+typedef struct {
+ int32_t width;
+ int32_t height;
+}mm_camera_dimension_t;
+
+typedef enum {
+ MM_CAMERA_OK,
+ MM_CAMERA_E_GENERAL,
+ MM_CAMERA_E_NO_MEMORY,
+ MM_CAMERA_E_NOT_SUPPORTED,
+ MM_CAMERA_E_INVALID_INPUT,
+ MM_CAMERA_E_INVALID_OPERATION, /* 5 */
+ MM_CAMERA_E_ENCODE,
+ MM_CAMERA_E_BUFFER_REG,
+ MM_CAMERA_E_PMEM_ALLOC,
+ MM_CAMERA_E_CAPTURE_FAILED,
+ MM_CAMERA_E_CAPTURE_TIMEOUT, /* 10 */
+}mm_camera_status_type_t;
+
+typedef enum {
+ MM_CAMERA_OP_MODE_NOTUSED,
+ MM_CAMERA_OP_MODE_CAPTURE,
+ MM_CAMERA_OP_MODE_VIDEO,
+ MM_CAMERA_OP_MODE_ZSL,
+ MM_CAMERA_OP_MODE_MAX
+}mm_camera_op_mode_type_t;
+
+#define MM_CAMERA_PARM_SUPPORT_SET 0x01
+#define MM_CAMERA_PARM_SUPPORT_GET 0x02
+#define MM_CAMERA_PARM_SUPPORT_BOTH 0x03
+
+typedef struct {
+ int32_t left;
+ int32_t top;
+ int32_t width;
+ int32_t height;
+} mm_camera_rect_t;
+
+typedef enum {
+ WHITE_BALANCE_AUTO = 1,
+ WHITE_BALANCE_INCANDESCENT = 3,
+ WHITE_BALANCE_FLUORESCENT = 4,
+ WHITE_BALANCE_DAYLIGHT = 5,
+ WHITE_BALANCE_CLOUDY = 6,
+ WHITE_BALANCE_OFF = 9,
+} White_Balance_modes;
+
+typedef enum {
+ MM_CAMERA_CH_PREVIEW,
+ MM_CAMERA_CH_VIDEO,
+ MM_CAMERA_CH_SNAPSHOT,
+ MM_CAMERA_CH_RAW,
+ MM_CAMERA_CH_MAX
+} mm_camera_channel_type_t;
+
+typedef enum {
+ MM_CAMERA_WHITE_BALANCE_AUTO = 1,
+ MM_CAMERA_WHITE_BALANCE_OFF = 2,
+ MM_CAMERA_WHITE_BALANCE_DAYLIGHT = 3,
+ MM_CAMERA_WHITE_BALANCE_INCANDESCENT = 4,
+ MM_CAMERA_WHITE_BALANCE_FLUORESCENT = 5,
+} mm_camera_white_balance_mode_type_t;
+/* MM_CAMERA_PARM_RAW_IMAGE_FMT */
+typedef struct {
+ cam_format_t fmt;
+ mm_camera_dimension_t dim;
+} mm_camera_image_fmt_t;
+
+typedef struct {
+ mm_camera_image_fmt_t main;
+ mm_camera_image_fmt_t thumbnail;
+} mm_camera_ch_image_fmt_snapshot_t;
+
+typedef enum {
+ MM_CAMERA_RAW_STREAMING_CAPTURE_SINGLE,
+ MM_CAMERA_RAW_STREAMING_MAX
+} mm_camera_raw_streaming_type_t;
+
+typedef struct {
+ mm_camera_image_fmt_t main;
+ mm_camera_image_fmt_t video;
+} mm_camera_ch_image_fmt_video_t;
+
+typedef struct {
+ mm_camera_channel_type_t ch_type;
+ union {
+ mm_camera_image_fmt_t def;
+ mm_camera_ch_image_fmt_snapshot_t snapshot;
+ mm_camera_ch_image_fmt_video_t video;
+ };
+} mm_camera_ch_image_fmt_parm_t;
+typedef struct {
+ mm_camera_rect_t main_crop;
+ mm_camera_rect_t thumbnail_crop;
+} mm_camera_crop_snapshot_t;
+
+typedef struct {
+ mm_camera_channel_type_t ch_type;
+ union {
+ mm_camera_rect_t crop;
+ mm_camera_crop_snapshot_t snapshot;
+ };
+} mm_camera_ch_crop_t;
+
+typedef struct {
+ uint8_t name[32];
+ int32_t min_value;
+ int32_t max_value;
+ int32_t step;
+ int32_t default_value;
+} mm_camera_ctrl_cap_sharpness_t;
+
+typedef struct {
+ int16_t *zoom_ratio_tbl;
+ int32_t size;
+} mm_camera_zoom_tbl_t;
+
+#define MM_CAMERA_MAX_FRAME_NUM 16
+
+typedef struct {
+ uint32_t *frame_offset;
+ struct msm_frame *frame;
+} mm_camera_sp_buf_t;
+
+typedef struct {
+ int8_t num_planes;
+ struct v4l2_plane planes[VIDEO_MAX_PLANES];
+ uint32_t frame_offset;
+ struct msm_frame frame;
+ int idx; /* index to stream frame */
+} mm_camera_mp_buf_t;
+
+typedef struct {
+ int8_t num;
+ union {
+ mm_camera_sp_buf_t sp;
+ mm_camera_mp_buf_t *mp;
+ }buf;
+} mm_camera_buf_def_t;
+
+typedef struct {
+ mm_camera_buf_def_t thumbnail;
+ mm_camera_buf_def_t main;
+} mm_camera_buf_snapshot_t;
+
+typedef struct {
+ mm_camera_buf_def_t video;
+ mm_camera_buf_def_t main;
+} mm_camera_buf_video_t;
+
+typedef struct {
+ mm_camera_channel_type_t ch_type;
+ union {
+ mm_camera_buf_def_t def;
+ mm_camera_buf_def_t preview;
+ mm_camera_buf_snapshot_t snapshot;
+ mm_camera_buf_video_t video;
+ };
+} mm_camera_reg_buf_t;
+
+typedef enum {
+ MM_CAMERA_OPS_PREVIEW, // start/stop preview
+ MM_CAMERA_OPS_VIDEO, // start/stop video
+ MM_CAMERA_OPS_PREPARE_SNAPSHOT, // prepare capture in capture mode
+ MM_CAMERA_OPS_SNAPSHOT, // take snapshot (HDR,ZSL,live shot)
+ MM_CAMERA_OPS_RAW, // take raw streaming (raw snapshot, etc)
+ MM_CAMERA_OPS_ZSL, // start/stop zsl
+ // mm_camera_ops_parm_get_buffered_frame_t is used for MM_CAMERA_OPS_GET_BUFFERED_FRAME
+ MM_CAMERA_OPS_GET_BUFFERED_FRAME, // channel to dispatch buffered frame to app through call back
+ MM_CAMERA_OPS_FOCUS, // change focus,isp3a_af_mode_t* used in val
+ MM_CAMERA_OPS_MAX // max ops
+}mm_camera_ops_type_t;
+
+/* Temp: We are declaring it here so that we can still use the
+ legacy GET_CAPABILITIES call to config thread. A new design
+ to query capabilities based on V4L2 interface is being
+ discussed. */
+typedef enum {
+ CAMERA_OPS_LOCAL = -1, /*no need to query mm-camera*/
+ CAMERA_OPS_STREAMING_PREVIEW = 0,
+ CAMERA_OPS_STREAMING_ZSL,
+ CAMERA_OPS_STREAMING_VIDEO,
+ CAMERA_OPS_CAPTURE, /*not supported*/
+ CAMERA_OPS_FOCUS,
+ CAMERA_OPS_GET_PICTURE, /*5*/
+ CAMERA_OPS_PREPARE_SNAPSHOT,
+ CAMERA_OPS_SNAPSHOT,
+ CAMERA_OPS_LIVESHOT,
+ CAMERA_OPS_RAW_SNAPSHOT,
+ CAMERA_OPS_VIDEO_RECORDING, /*10*/
+ CAMERA_OPS_REGISTER_BUFFER,
+ CAMERA_OPS_UNREGISTER_BUFFER,
+ CAMERA_OPS_CAPTURE_AND_ENCODE,
+ CAMERA_OPS_RAW_CAPTURE,
+ CAMERA_OPS_ENCODE, /*15*/
+ CAMERA_OPS_ZSL_STREAMING_CB,
+ /* add new above*/
+ CAMERA_OPS_MAX
+}mm_camera_legacy_ops_type_t;
+
+typedef enum {
+ MM_CAMERA_CH_ATTR_RAW_STREAMING_TYPE,
+ MM_CAMERA_CH_ATTR_BUFFERING_FRAME,
+ MM_CAMERA_CH_ATTR_MAX
+} mm_camera_channel_attr_type_t;
+
+typedef struct {
+ /* how deep the circular frame queue */
+ int water_mark;
+ int look_back;
+ int interval; /*skipping n-1 frames*/
+} mm_camera_channel_attr_buffering_frame_t;
+
+typedef struct {
+ mm_camera_channel_attr_type_t type;
+ union {
+ /* add more if needed */
+ mm_camera_raw_streaming_type_t raw_streaming_mode;
+ mm_camera_channel_attr_buffering_frame_t buffering_frame;
+ };
+} mm_camera_channel_attr_t;
+
+typedef struct {
+ mm_camera_channel_type_t ch_type;
+} mm_camera_ops_parm_get_buffered_frame_t;
+
+typedef struct mm_camera mm_camera_t;
+
+typedef struct {
+ /* if the parm is supported */
+ uint8_t (*is_parm_supported)(mm_camera_t *camera, mm_camera_parm_type_t parm_type);
+ /* if the channel is supported */
+ uint8_t (*is_ch_supported)(mm_camera_t *camera, mm_camera_channel_type_t ch_type);
+ /* set a parms current value */
+ int32_t (*set_parm)(mm_camera_t *camera, mm_camera_parm_type_t parm_type,
+ void* p_value);
+ /* get a parms current value */
+ int32_t (*get_parm)(mm_camera_t *camera, mm_camera_parm_type_t parm_type,
+ void* p_value);
+ int32_t (*request_buf) (mm_camera_t *camera, mm_camera_reg_buf_t *buf);
+ int32_t (*enqueue_buf) (mm_camera_t *camera, mm_camera_reg_buf_t *buf);
+ int32_t (*prepare_buf) (mm_camera_t *camera, mm_camera_reg_buf_t *buf);
+ int32_t (*unprepare_buf) (mm_camera_t *camera, mm_camera_channel_type_t ch_type);
+} mm_camera_config_t;
+
+typedef struct {
+ uint8_t (*is_op_supported)(mm_camera_t * camera, mm_camera_ops_type_t opcode);
+ /* val is reserved for some action such as MM_CAMERA_OPS_FOCUS */
+ int32_t (*action)(mm_camera_t * camera, uint8_t start,
+ mm_camera_ops_type_t opcode, void *val);
+ int32_t (*open)(mm_camera_t * camera, mm_camera_op_mode_type_t op_mode);
+ void (*close)(mm_camera_t * camera);
+ int32_t (*ch_acquire)(mm_camera_t * camera, mm_camera_channel_type_t ch_type);
+ void (*ch_release)(mm_camera_t * camera, mm_camera_channel_type_t ch_type);
+ int32_t (*ch_set_attr)(mm_camera_t * camera, mm_camera_channel_type_t ch_type,
+ mm_camera_channel_attr_t *attr);
+ int32_t (*sendmsg)(mm_camera_t * camera, void *msg, uint32_t buf_size, int sendfd);
+} mm_camera_ops_t;
+
+typedef struct {
+ int type;
+ uint32_t length;
+ void *value;
+} mm_camera_stats_t;
+
+typedef struct {
+ int idx;
+ struct msm_frame *frame;
+} mm_camera_notify_frame_t;
+
+typedef struct {
+ mm_camera_notify_frame_t video;
+ mm_camera_notify_frame_t main;
+} mm_camera_notify_video_buf_t;
+
+typedef struct {
+ mm_camera_notify_frame_t thumbnail;
+ mm_camera_notify_frame_t main;
+} mm_camera_notify_snapshot_buf_t;
+
+typedef struct {
+ mm_camera_channel_type_t type;
+// union {
+ mm_camera_notify_snapshot_buf_t snapshot;
+ mm_camera_notify_video_buf_t video;
+ mm_camera_notify_frame_t def;
+// };
+} mm_camera_ch_data_buf_t;
+
+
+typedef enum {
+ MM_CAMERA_REG_BUF_CB_ONCE,
+ MM_CAMERA_REG_BUF_CB_COUNT,
+ MM_CAMERA_REG_BUF_CB_INFINITE
+} mm_camera_register_buf_cb_type_t;
+
+typedef void (*mm_camera_event_notify_t)(mm_camera_event_t *evt,
+ void *user_data);
+
+typedef void (*mm_camera_buf_notify_t)(mm_camera_ch_data_buf_t *bufs,
+ void *user_data);
+
+typedef struct {
+ uint8_t (*is_event_supported)(mm_camera_t * camera,
+ mm_camera_event_type_t evt_type);
+ int32_t (*register_event_notify)(mm_camera_t * camera,
+ mm_camera_event_notify_t evt_cb,
+ void * user_data,
+ mm_camera_event_type_t evt_type);
+ int32_t (*register_buf_notify)(mm_camera_t * camera,
+ mm_camera_channel_type_t ch_type,
+ mm_camera_buf_notify_t buf_cb,
+ mm_camera_register_buf_cb_type_t cb_type,
+ uint32_t cb_count,
+ void * user_data);
+ int32_t (*buf_done)(mm_camera_t * camera, mm_camera_ch_data_buf_t *bufs);
+} mm_camera_notify_t;
+
+typedef enum {
+ MM_CAMERA_JPEG_PARM_ROTATION,
+ MM_CAMERA_JPEG_PARM_MAINIMG_QUALITY,
+ MM_CAMERA_JPEG_PARM_THUMB_QUALITY,
+ MM_CAMERA_JPEG_PARM_MAX
+} mm_camera_jpeg_parm_type_t;
+
+typedef struct {
+ uint8_t* ptr;
+ uint32_t filled_size;
+ uint32_t size;
+ int32_t fd;
+ uint32_t offset;
+}mm_camera_buffer_t;
+
+typedef struct {
+ exif_tags_info_t* exif_data;
+ int exif_numEntries;
+ mm_camera_buffer_t* p_output_buffer;
+ uint8_t buffer_count;
+ uint32_t rotation;
+ uint32_t quality;
+ int y_offset;
+ int cbcr_offset;
+ /* bitmask for the images to be encoded. if capture_and_encode
+ * option is selected, all the images will be encoded irrespective
+ * of bitmask.
+ */
+ uint8_t encodeBitMask;
+ uint32_t output_picture_width;
+ uint32_t output_picture_height;
+ int format3d;
+}encode_params_t;
+
+typedef struct {
+ void * src_img1_buf; // input main image buffer
+ uint32_t src_img1_size; // input main image size
+ void * src_img2_buf; // input thumbnail image buffer
+ uint32_t src_img2_size; // input thumbnail image size
+ void* out_jpeg1_buf; // out jpeg buffer
+ uint32_t out_jpeg1_size; // IN/OUT-result buf size/jpeg image size
+ void* out_jpeg2_buf; // out jpeg buffer
+ uint32_t out_jpeg2_size; // IN/OUT-result buf size/jpeg image size
+ mm_camera_status_type_t status; // result status place holder
+} mm_camera_jpeg_encode_t;
+
+typedef void (*mm_camera_jpeg_cb_t)(mm_camera_jpeg_encode_t *result,
+ void *user_data);
+
+typedef struct {
+ uint8_t (*is_jpeg_supported)( mm_camera_t * camera);
+ int32_t (*set_parm)(mm_camera_t * camera, mm_camera_jpeg_parm_type_t parm_type,
+ void* p_value);
+ int32_t (*get_parm)(mm_camera_t * camera, mm_camera_jpeg_parm_type_t parm_type,
+ void* p_value);
+ int32_t (* register_event_cb)(mm_camera_t * camera, mm_camera_jpeg_cb_t * evt_cb,
+ void * user_data);
+ int32_t (*encode)(mm_camera_t * camera, uint8_t start,
+ mm_camera_jpeg_encode_t *data);
+} mm_camera_jpeg_t;
+
+struct mm_camera {
+ mm_camera_config_t *cfg; // config interface
+ mm_camera_ops_t *ops; // operation interface
+ mm_camera_notify_t *evt; // evt callback interface
+ mm_camera_jpeg_t *jpeg_ops; // jpeg config and encoding interface
+ qcamera_info_t camera_info; // postion, mount_angle, etc.
+ enum sensor_type_t sensor_type; // BAYER, YUV, JPEG_SOC, etc.
+ char video_dev_name[32]; // device node name, e.g. /dev/video1
+};
+
+typedef enum {
+ MM_CAMERA_PAD_WORD,
+ MM_CAMERA_PAD_2K,
+ MM_CAMERA_PAD_MAX
+} mm_camera_pad_type_t;
+
+typedef struct
+{
+ struct camera_size_type *sizes_tbl;
+ uint32_t tbl_size;
+}default_sizes_tbl_t;
+
+/*configure methods*/
+uint8_t cam_config_is_parm_supported(
+ int cam_id,
+ mm_camera_parm_type_t parm_type);
+uint8_t cam_config_is_ch_supported(
+ int cam_id,
+ mm_camera_channel_type_t ch_type);
+/* set a parms current value */
+int32_t cam_config_set_parm(
+ int cam_id,
+ mm_camera_parm_type_t parm_type,
+ void* p_value);
+/* get a parms current value */
+int32_t cam_config_get_parm(
+ int cam_id,
+ mm_camera_parm_type_t parm_type,
+ void* p_value);
+int32_t cam_config_request_buf(int cam_id, mm_camera_reg_buf_t *buf);
+int32_t cam_config_enqueue_buf(int cam_id, mm_camera_reg_buf_t *buf);
+int32_t cam_config_prepare_buf(int cam_id, mm_camera_reg_buf_t *buf);
+int32_t cam_config_unprepare_buf(int cam_id, mm_camera_channel_type_t ch_type);
+
+/*operation methods*/
+uint8_t cam_ops_is_op_supported(int cam_id, mm_camera_ops_type_t opcode);
+/* val is reserved for some action such as MM_CAMERA_OPS_FOCUS */
+int32_t cam_ops_action(int cam_id, uint8_t start,
+ mm_camera_ops_type_t opcode, void *val);
+int32_t cam_ops_open(int cam_id, mm_camera_op_mode_type_t op_mode);
+void cam_ops_close(int cam_id);
+int32_t cam_ops_ch_acquire(int cam_id, mm_camera_channel_type_t ch_type);
+void cam_ops_ch_release(int cam_id, mm_camera_channel_type_t ch_type);
+int32_t cam_ops_ch_set_attr(int cam_id, mm_camera_channel_type_t ch_type,
+ mm_camera_channel_attr_t *attr);
+int32_t cam_ops_sendmsg(int cam_id, void *msg, uint32_t buf_size, int sendfd);
+
+/*call-back notify methods*/
+uint8_t cam_evt_is_event_supported(int cam_id, mm_camera_event_type_t evt_type);
+int32_t cam_evt_register_event_notify(int cam_id,
+ mm_camera_event_notify_t evt_cb,
+ void * user_data,
+ mm_camera_event_type_t evt_type);
+int32_t cam_evt_register_buf_notify(int cam_id,
+ mm_camera_channel_type_t ch_type,
+ mm_camera_buf_notify_t buf_cb,
+ mm_camera_register_buf_cb_type_t cb_type,
+ uint32_t cb_count,
+ void * user_data);
+
+int32_t cam_evt_buf_done(int cam_id, mm_camera_ch_data_buf_t *bufs);
+
+/*camera JPEG methods*/
+uint8_t cam_jpeg_is_jpeg_supported(int cam_id);
+int32_t cam_jpeg_set_parm(int cam_id, mm_camera_jpeg_parm_type_t parm_type,
+ void* p_value);
+int32_t cam_jpeg_get_parm(int cam_id, mm_camera_jpeg_parm_type_t parm_type,
+ void* p_value);
+int32_t cam_jpeg_register_event_cb(int cam_id, mm_camera_jpeg_cb_t * evt_cb,
+ void * user_data);
+int32_t cam_jpeg_encode(int cam_id, uint8_t start,
+ mm_camera_jpeg_encode_t *data);
+
+extern mm_camera_t * mm_camera_query(uint8_t *num_cameras);
+extern uint8_t *mm_camera_do_mmap(uint32_t size, int *pmemFd);
+extern int mm_camera_do_munmap(int pmem_fd, void *addr, size_t size);
+extern uint8_t *mm_camera_do_mmap_ion(int ion_fd, struct ion_allocation_data *alloc,
+ struct ion_fd_data *ion_info_fd, int *mapFd);
+extern int mm_camera_do_munmap_ion (int ion_fd, struct ion_fd_data *ion_info_fd,
+ void *addr, size_t size);
+extern int mm_camera_dump_image(void *addr, uint32_t size, char *filename);
+extern uint32_t mm_camera_get_msm_frame_len(cam_format_t fmt_type,
+ camera_mode_t mode,
+ int width,
+ int height,
+ int image_type,
+ uint8_t *num_planes,
+ uint32_t planes[]);
+uint8_t *mm_camera_do_mmap_ion(int ion_fd, struct ion_allocation_data *alloc,
+ struct ion_fd_data *ion_info_fd, int *mapFd);
+int mm_camera_do_munmap_ion (int ion_fd, struct ion_fd_data *ion_info_fd,
+ void *addr, size_t size);
+extern void mm_camera_util_profile(const char *str);
+#endif /*__MM_CAMERA_INTERFACE2_H__*/
diff --git a/camera/mm-camera-interface/mm_camera_notify.c b/camera/mm-camera-interface/mm_camera_notify.c
new file mode 100644
index 0000000..d87e1f2
--- /dev/null
+++ b/camera/mm-camera-interface/mm_camera_notify.c
@@ -0,0 +1,849 @@
+/*
+Copyright (c) 2011-2012, The Linux Foundation. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following
+ disclaimer in the documentation and/or other materials provided
+ with the distribution.
+ * Neither the name of The Linux Foundation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+
+#include <pthread.h>
+#include "mm_camera_dbg.h"
+#include <errno.h>
+#include <sys/ioctl.h>
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <fcntl.h>
+#include <poll.h>
+#include <linux/msm_ion.h>
+#include "mm_camera_interface2.h"
+#include "mm_camera.h"
+
+#if 0
+#undef CDBG
+#undef LOG_TAG
+#define CDBG ALOGV
+#define LOG_TAG "NotifyLogs"
+#endif
+
+static void mm_camera_read_raw_frame(mm_camera_obj_t * my_obj)
+{
+ int rc = 0;
+ int idx;
+ int i;
+ int cnt = 0;
+ mm_camera_stream_t *stream;
+ mm_camera_buf_cb_t buf_cb[MM_CAMERA_BUF_CB_MAX];
+ mm_camera_ch_data_buf_t data[MM_CAMERA_BUF_CB_MAX];
+
+ stream = &my_obj->ch[MM_CAMERA_CH_RAW].raw.stream;
+ idx = mm_camera_read_msm_frame(my_obj, stream);
+ if (idx < 0) {
+ return;
+ }
+ pthread_mutex_lock(&my_obj->ch[MM_CAMERA_CH_RAW].mutex);
+ for( i=0;i<MM_CAMERA_BUF_CB_MAX;i++) {
+ if((my_obj->ch[MM_CAMERA_CH_RAW].buf_cb[i].cb) &&
+ (my_obj->poll_threads[MM_CAMERA_CH_RAW].data.used == 1)){
+ data[cnt].type = MM_CAMERA_CH_RAW;
+ data[cnt].def.idx = idx;
+ data[cnt].def.frame = &my_obj->ch[MM_CAMERA_CH_RAW].raw.stream.frame.frame[idx].frame;
+ my_obj->ch[MM_CAMERA_CH_RAW].raw.stream.frame.ref_count[idx]++;
+ CDBG("%s:calling data notify cb 0x%x, 0x%x\n", __func__,
+ (uint32_t)my_obj->ch[MM_CAMERA_CH_RAW].buf_cb[i].cb,
+ (uint32_t)my_obj->ch[MM_CAMERA_CH_RAW].buf_cb[i].user_data);
+ memcpy(&buf_cb[cnt], &my_obj->ch[MM_CAMERA_CH_RAW].buf_cb[i], sizeof(mm_camera_buf_cb_t));
+ cnt++;
+ }
+ }
+ pthread_mutex_unlock(&my_obj->ch[MM_CAMERA_CH_RAW].mutex);
+
+ for( i=0;i<cnt;i++) {
+ if(buf_cb[i].cb != NULL && my_obj->poll_threads[MM_CAMERA_CH_RAW].data.used == 1){
+ buf_cb[i].cb(&data[i],buf_cb[i].user_data);
+ }
+ }
+}
+
+int mm_camera_zsl_frame_cmp_and_enq(mm_camera_obj_t * my_obj,
+ mm_camera_frame_t *node,
+ mm_camera_stream_t *mystream)
+{
+ int watermark, interval;
+ mm_camera_frame_queue_t *myq;
+ mm_camera_frame_queue_t *peerq;
+ mm_camera_stream_t *peerstream;
+ int rc = 0;
+ int deliver_done = 0;
+ mm_camera_frame_t *peer_frame;
+ mm_camera_frame_t *peer_frame_prev;
+ mm_camera_frame_t *peer_frame_tmp;
+ mm_camera_notify_frame_t notify_frame;
+ uint32_t expected_id;
+ mm_camera_ch_data_buf_t data;
+ mm_camera_frame_t *my_frame = NULL;
+ int i;
+ mm_camera_buf_cb_t buf_cb[MM_CAMERA_BUF_CB_MAX];
+
+ pthread_mutex_lock(&my_obj->ch[MM_CAMERA_CH_PREVIEW].mutex);
+ pthread_mutex_lock(&my_obj->ch[MM_CAMERA_CH_SNAPSHOT].mutex);
+
+ if(mystream->stream_type == MM_CAMERA_STREAM_PREVIEW) {
+ peerstream = &my_obj->ch[MM_CAMERA_CH_SNAPSHOT].snapshot.main;
+ } else
+ peerstream = &my_obj->ch[MM_CAMERA_CH_PREVIEW].preview.stream;
+ myq = &mystream->frame.readyq;
+ peerq = &peerstream->frame.readyq;
+ watermark = my_obj->ch[MM_CAMERA_CH_SNAPSHOT].buffering_frame.water_mark;
+ interval = my_obj->ch[MM_CAMERA_CH_SNAPSHOT].buffering_frame.interval;
+ expected_id = my_obj->ch[MM_CAMERA_CH_SNAPSHOT].snapshot.expected_matching_id;
+ peer_frame = peerq->tail;
+ /* for 30-120 fps streaming no need to consider the wrapping back of frame_id
+ expected_matching_id is used when requires skipping bwtween frames */
+ if(!peer_frame || (node->frame.frame_id > peer_frame->frame.frame_id &&
+ node->frame.frame_id >= expected_id)) {
+ /* new frame is newer than all stored peer frames. simply keep the node */
+ /* in case the frame_id wraps back, the peer frame's frame_id will be
+ larger than the new frame's frame id */
+ CDBG("%s New frame. Just enqueue it into the queue ", __func__);
+ mm_camera_stream_frame_enq_no_lock(myq, node);
+ node->valid_entry = 1;
+ }
+ CDBG("%s Need to find match for the frame id %d ,exped_id =%d, strm type =%d",
+ __func__, node->frame.frame_id, expected_id, mystream->stream_type);
+ /* the node is older than the peer, we will either find a match or drop it */
+ peer_frame = peerq->head;
+ peer_frame_prev = NULL;
+ peer_frame_tmp = NULL;
+ while(peer_frame) {
+ CDBG("%s peer frame_id = %d node frame_id = %d, expected_id =%d, interval=%d", __func__,
+ peer_frame->frame.frame_id, node->frame.frame_id,
+ expected_id, interval);
+ if(peer_frame->match) {
+ CDBG("%s Peer frame already matched, keep looking in the list ",
+ __func__);
+ /* matched frame., skip */
+ peer_frame_prev = peer_frame;
+ peer_frame = peer_frame->next;
+ continue;
+ }
+ if(peer_frame->frame.frame_id == node->frame.frame_id &&
+ node->frame.frame_id >= my_obj->ch[MM_CAMERA_CH_SNAPSHOT].snapshot.expected_matching_id) {
+ /* find a match keep the frame */
+ node->match = 1;
+ peer_frame->match = 1;
+ CDBG("%s Found match, add to myq, frame_id=%d ", __func__, node->frame.frame_id);
+ mm_camera_stream_frame_enq_no_lock(myq, node);
+ myq->match_cnt++;
+ peerq->match_cnt++;
+ /*set next min matching id*/
+ my_obj->ch[MM_CAMERA_CH_SNAPSHOT].snapshot.expected_matching_id =
+ node->frame.frame_id + interval;
+ goto water_mark;
+ } else {
+ /* no match */
+ if(node->frame.frame_id > peer_frame->frame.frame_id) {
+ /* the incoming frame is newer than the peer's unmatched frame.
+ drop the peer frame */
+ CDBG("%s node frame is newer, release old peer frame ",
+ __func__);
+ if(!peer_frame_prev) {
+ /* this is the head */
+ peer_frame_tmp = mm_camera_stream_frame_deq_no_lock(peerq);
+ notify_frame.frame = &peer_frame_tmp->frame;
+ notify_frame.idx = peer_frame_tmp->idx;
+ mm_camera_stream_util_buf_done(my_obj, peerstream,
+ ¬ify_frame);
+ peer_frame = peerq->head;
+ peer_frame_prev = NULL;
+ continue;
+ } else {
+ /* this is not the head. */
+ peer_frame_tmp = peer_frame;
+ peer_frame_prev->next = peer_frame->next;
+ if(peer_frame == peerq->tail) {
+ /* peer_frame is the tail */
+ peerq->tail = peer_frame_prev;
+ }
+ notify_frame.frame = &peer_frame_tmp->frame;
+ notify_frame.idx = peer_frame_tmp->idx;
+ mm_camera_stream_util_buf_done(my_obj, peerstream,
+ ¬ify_frame);
+ peer_frame = peer_frame_prev->next;
+ peerq->cnt--;
+ continue;
+ }
+ } else {
+ /* Current frame is older than peer's unmatched frame, dont add
+ * it into the queue. just drop it */
+ CDBG("%s node frame is older than peer's unmatched frame. "
+ "Drop the current frame.", __func__);
+ notify_frame.frame = &node->frame;
+ notify_frame.idx = node->idx;
+ mm_camera_stream_util_buf_done(my_obj, mystream, ¬ify_frame);
+ goto end;
+ }
+ }
+ }
+ if(!node->match && !node->valid_entry) {
+ /* if no match and not a valid entry.
+ * the node is not added into the queue. it's dirty node */
+ CDBG_ERROR("%s: stream type = %d and fd = %d, frame 0x%x is dirty"
+ " and queue back kernel", __func__, mystream->stream_type,
+ mystream->fd, node->frame.frame_id);
+ notify_frame.frame = &node->frame;
+ notify_frame.idx = node->idx;
+ mm_camera_stream_util_buf_done(my_obj, mystream, ¬ify_frame);
+ }
+water_mark:
+ while((myq->match_cnt > watermark) && (peerq->match_cnt > watermark)) {
+ peer_frame_tmp = mm_camera_stream_frame_deq_no_lock(peerq);
+ if (NULL == peer_frame_tmp) {
+ break;
+ }
+ notify_frame.frame = &peer_frame_tmp->frame;
+ notify_frame.idx = peer_frame_tmp->idx;
+ CDBG("%s match_cnt %d > watermark %d, buf_done on "
+ "peer frame idx %d id = %d", __func__,
+ myq->match_cnt, watermark, notify_frame.idx,
+ notify_frame.frame->frame_id);
+ mm_camera_stream_util_buf_done(my_obj, peerstream, ¬ify_frame);
+ peerq->match_cnt--;
+ peer_frame_tmp = mm_camera_stream_frame_deq_no_lock(myq);
+ notify_frame.frame = &peer_frame_tmp->frame;
+ notify_frame.idx = peer_frame_tmp->idx;
+ mm_camera_stream_util_buf_done(my_obj, mystream, ¬ify_frame);
+ myq->match_cnt--;
+ }
+end:
+ CDBG("%s myQ->cnt = %d myQ->match_cnt = %d ", __func__,
+ myq->cnt, myq->match_cnt);
+ if(myq->cnt > myq->match_cnt + 1) {
+ /* drop the first unmatched frame */
+ mm_camera_frame_t *peer_frame = myq->head;;
+ mm_camera_frame_t *peer_frame_prev = NULL;
+ while(peer_frame) {
+ CDBG("%s myQ->cnt = %d myQ->match_cnt = %d ", __func__,
+ myq->cnt, myq->match_cnt);
+ if(peer_frame->match == 0) {
+ /* first unmatched frame */
+ if(!peer_frame_prev) {
+ /* this is the head */
+ peer_frame_tmp = mm_camera_stream_frame_deq_no_lock(myq);
+ notify_frame.frame = &peer_frame_tmp->frame;
+ notify_frame.idx = peer_frame_tmp->idx;
+ CDBG("%s Head Issuing buf_done on my frame idx %d id %d",
+ __func__, notify_frame.idx,
+ notify_frame.frame->frame_id);
+ mm_camera_stream_util_buf_done(my_obj, mystream,
+ ¬ify_frame);
+ } else {
+ /* this is not the head. */
+ peer_frame_tmp = peer_frame;
+ peer_frame_prev->next = peer_frame->next;
+ if(peer_frame == peerq->tail) {
+ /* peer_frame is the tail */
+ myq->tail = peer_frame_prev;
+ }
+ notify_frame.frame = &peer_frame_tmp->frame;
+ notify_frame.idx = peer_frame_tmp->idx;
+ CDBG("%s Issuing buf_done on my frame idx %d id = %d",
+ __func__, notify_frame.idx,
+ notify_frame.frame->frame_id);
+ mm_camera_stream_util_buf_done(my_obj, mystream,
+ ¬ify_frame);
+ myq->cnt--;
+ }
+ break;
+ } else {
+ peer_frame_prev= peer_frame;
+ peer_frame = peer_frame_prev->next;
+ }
+ }
+ }
+ CDBG("%s peerQ->cnt = %d peerQ->match_cnt = %d ", __func__,
+ peerq->cnt, peerq->match_cnt);
+ if(peerq->cnt > peerq->match_cnt + 1) {
+ /* drop the first unmatched frame */
+ mm_camera_frame_t *peer_frame = peerq->head;
+ mm_camera_frame_t *peer_frame_prev = NULL;
+ while(peer_frame) {
+ CDBG("%s Traverse peerq list frame idx %d frame_id = %d match %d ",
+ __func__, peer_frame->idx, peer_frame->frame.frame_id,
+ peer_frame->match);
+ if(peer_frame->match == 0) {
+ /* first unmatched frame */
+ if(!peer_frame_prev) {
+ /* this is the head */
+ peer_frame_tmp = mm_camera_stream_frame_deq_no_lock(peerq);
+ notify_frame.frame = &peer_frame_tmp->frame;
+ notify_frame.idx = peer_frame_tmp->idx;
+ CDBG("%s Head Issuing buf_done on peer frame idx %d "
+ "id = %d", __func__, notify_frame.idx,
+ notify_frame.frame->frame_id);
+ mm_camera_stream_util_buf_done(my_obj, peerstream,
+ ¬ify_frame);
+ } else {
+ /* this is not the head. */
+ peer_frame_tmp = peer_frame;
+ peer_frame_prev->next = peer_frame->next;
+ if(peer_frame == peerq->tail) {
+ /* peer_frame is the tail */
+ peerq->tail = peer_frame_prev;
+ }
+ notify_frame.frame = &peer_frame_tmp->frame;
+ notify_frame.idx = peer_frame_tmp->idx;
+ CDBG("%s Issuing buf_done on peer frame idx %d id = %d",
+ __func__, notify_frame.idx,
+ notify_frame.frame->frame_id);
+ mm_camera_stream_util_buf_done(my_obj, peerstream,
+ ¬ify_frame);
+ peerq->cnt--;
+ }
+ break;
+ } else {
+ peer_frame_prev= peer_frame;
+ peer_frame = peer_frame_prev->next;
+ }
+ }
+ }
+
+ CDBG("%s Dispatching ZSL frame ", __func__);
+ if(my_obj->ch[MM_CAMERA_CH_SNAPSHOT].snapshot.pending_cnt > 0) {
+ if(!myq->match_cnt || !peerq->match_cnt) {
+ pthread_mutex_unlock(&my_obj->ch[MM_CAMERA_CH_SNAPSHOT].mutex);
+ pthread_mutex_unlock(&my_obj->ch[MM_CAMERA_CH_PREVIEW].mutex);
+ return 0;
+ }
+ /* dequeue one by one and then pass to HAL */
+ my_frame = mm_camera_stream_frame_deq_no_lock(&my_obj->ch[MM_CAMERA_CH_SNAPSHOT].snapshot.main.frame.readyq);
+ peer_frame = mm_camera_stream_frame_deq_no_lock(&my_obj->ch[MM_CAMERA_CH_PREVIEW].preview.stream.frame.readyq);
+ if (!my_frame || !peer_frame) {
+ pthread_mutex_unlock(&my_obj->ch[MM_CAMERA_CH_SNAPSHOT].mutex);
+ pthread_mutex_unlock(&my_obj->ch[MM_CAMERA_CH_PREVIEW].mutex);
+ return 0;
+ }
+ myq->match_cnt--;
+ peerq->match_cnt--;
+ CDBG("%s: Dequeued frame: main frame idx: %d thumbnail "
+ "frame idx: %d", __func__, my_frame->idx, peer_frame->idx);
+ /* dispatch this pair of frames */
+ memset(&data, 0, sizeof(data));
+ data.type = MM_CAMERA_CH_SNAPSHOT;
+ data.snapshot.main.frame = &my_frame->frame;
+ data.snapshot.main.idx = my_frame->idx;
+ data.snapshot.thumbnail.frame = &peer_frame->frame;
+ data.snapshot.thumbnail.idx = peer_frame->idx;
+ my_obj->ch[MM_CAMERA_CH_SNAPSHOT].snapshot.pending_cnt--;
+ memcpy(&buf_cb[0], &my_obj->ch[MM_CAMERA_CH_SNAPSHOT].buf_cb[0],
+ sizeof(mm_camera_buf_cb_t)* MM_CAMERA_BUF_CB_MAX);
+ if(my_obj->ch[MM_CAMERA_CH_SNAPSHOT].snapshot.pending_cnt == 0)
+ deliver_done = 1;
+ pthread_mutex_unlock(&my_obj->ch[MM_CAMERA_CH_SNAPSHOT].mutex);
+ pthread_mutex_unlock(&my_obj->ch[MM_CAMERA_CH_PREVIEW].mutex);
+
+ goto send_to_hal;
+ }
+
+ pthread_mutex_unlock(&my_obj->ch[MM_CAMERA_CH_SNAPSHOT].mutex);
+ pthread_mutex_unlock(&my_obj->ch[MM_CAMERA_CH_PREVIEW].mutex);
+ return rc;
+
+send_to_hal:
+ for( i=0;i < MM_CAMERA_BUF_CB_MAX;i++) {
+ if (buf_cb[i].cb && my_obj->poll_threads[MM_CAMERA_CH_SNAPSHOT].data.used == 1)
+ buf_cb[i].cb(&data,buf_cb[i].user_data);
+ }
+ if(deliver_done > 0) {
+ mm_camera_event_t data_evt;
+ CDBG("%s: ZSL delivered", __func__);
+ data_evt.event_type = MM_CAMERA_EVT_TYPE_CH;
+ data_evt.e.ch.evt = MM_CAMERA_CH_EVT_DATA_DELIVERY_DONE;
+ data_evt.e.ch.ch = MM_CAMERA_CH_SNAPSHOT;
+ mm_camera_poll_send_ch_event(my_obj, &data_evt);
+ }
+ return rc;
+}
+
+static void mm_camera_read_preview_frame(mm_camera_obj_t * my_obj)
+{
+ int rc = 0;
+ int idx;
+ int i;
+ int cnt = 0;
+ mm_camera_stream_t *stream;
+ mm_camera_buf_cb_t buf_cb[MM_CAMERA_BUF_CB_MAX];
+ mm_camera_ch_data_buf_t data[MM_CAMERA_BUF_CB_MAX];
+
+ if (!my_obj->ch[MM_CAMERA_CH_PREVIEW].acquired) {
+ ALOGV("Preview channel is not in acquired state \n");
+ return;
+ }
+ stream = &my_obj->ch[MM_CAMERA_CH_PREVIEW].preview.stream;
+ idx = mm_camera_read_msm_frame(my_obj, stream);
+ if (idx < 0) {
+ return;
+ }
+ CDBG("%s Read Preview frame %d ", __func__, idx);
+ pthread_mutex_lock(&my_obj->ch[MM_CAMERA_CH_PREVIEW].mutex);
+ for( i=0;i<MM_CAMERA_BUF_CB_MAX;i++) {
+ if((my_obj->ch[MM_CAMERA_CH_PREVIEW].buf_cb[i].cb) &&
+ (my_obj->poll_threads[MM_CAMERA_CH_PREVIEW].data.used == 1)) {
+ data[cnt].type = MM_CAMERA_CH_PREVIEW;
+ data[cnt].def.idx = idx;
+ data[cnt].def.frame = &my_obj->ch[MM_CAMERA_CH_PREVIEW].preview.stream.frame.frame[idx].frame;
+ /* Since the frame is originating here, reset the ref count to either
+ * 2(ZSL case) or 1(non-ZSL case). */
+ if(my_obj->op_mode == MM_CAMERA_OP_MODE_ZSL)
+ my_obj->ch[MM_CAMERA_CH_PREVIEW].preview.stream.frame.ref_count[idx] = 2;
+ else
+ my_obj->ch[MM_CAMERA_CH_PREVIEW].preview.stream.frame.ref_count[idx] = 1;
+ CDBG("%s:calling data notify cb 0x%x, 0x%x\n", __func__,
+ (uint32_t)my_obj->ch[MM_CAMERA_CH_PREVIEW].buf_cb[i].cb,
+ (uint32_t)my_obj->ch[MM_CAMERA_CH_PREVIEW].buf_cb[i].user_data);
+ /*my_obj->ch[MM_CAMERA_CH_PREVIEW].buf_cb[i].cb(&data,
+ my_obj->ch[MM_CAMERA_CH_PREVIEW].buf_cb[i].user_data);*/
+ memcpy(&buf_cb[cnt], &my_obj->ch[MM_CAMERA_CH_PREVIEW].buf_cb[i],
+ sizeof(mm_camera_buf_cb_t));
+ cnt++;
+ }
+ }
+ pthread_mutex_unlock(&my_obj->ch[MM_CAMERA_CH_PREVIEW].mutex);
+
+ if(my_obj->op_mode == MM_CAMERA_OP_MODE_ZSL) {
+ /* Reset match to 0. */
+ stream->frame.frame[idx].match = 0;
+ stream->frame.frame[idx].valid_entry = 0;
+ mm_camera_zsl_frame_cmp_and_enq(my_obj,
+ &my_obj->ch[MM_CAMERA_CH_PREVIEW].preview.stream.frame.frame[idx],
+ stream);
+ }
+
+ for( i=0;i<cnt;i++) {
+ if(buf_cb[i].cb != NULL && my_obj->poll_threads[MM_CAMERA_CH_PREVIEW].data.used == 1) {
+ buf_cb[i].cb(&data[i],buf_cb[i].user_data);
+ }
+ }
+}
+
+static void mm_camera_snapshot_send_liveshot_notify(mm_camera_obj_t * my_obj)
+{
+ int delivered = 0;
+ mm_camera_frame_queue_t *s_q;
+ int i;
+ int cnt = 0;
+// mm_camera_frame_queue_t *s_q, *t_q;
+ mm_camera_buf_cb_t buf_cb[MM_CAMERA_BUF_CB_MAX];
+ mm_camera_ch_data_buf_t data[MM_CAMERA_BUF_CB_MAX];
+
+ mm_camera_frame_t *frame;
+ s_q = &my_obj->ch[MM_CAMERA_CH_SNAPSHOT].snapshot.main.frame.readyq;
+ pthread_mutex_lock(&my_obj->ch[MM_CAMERA_CH_SNAPSHOT].mutex);
+
+ for( i=0;i<MM_CAMERA_BUF_CB_MAX;i++) {
+ if(s_q->cnt && my_obj->ch[MM_CAMERA_CH_SNAPSHOT].buf_cb[i].cb) {
+ data[cnt].type = MM_CAMERA_CH_SNAPSHOT;
+ frame = mm_camera_stream_frame_deq(s_q);
+ data[cnt].snapshot.main.frame = &frame->frame;
+ data[cnt].snapshot.main.idx = frame->idx;
+ data[cnt].snapshot.thumbnail.frame = NULL;
+ my_obj->ch[MM_CAMERA_CH_SNAPSHOT].snapshot.main.frame.ref_count[data[cnt].snapshot.main.idx]++;
+ /*my_obj->ch[MM_CAMERA_CH_SNAPSHOT].buf_cb[i].cb(&data,
+ my_obj->ch[MM_CAMERA_CH_SNAPSHOT].buf_cb[i].user_data);*/
+ memcpy(&buf_cb[cnt], &my_obj->ch[MM_CAMERA_CH_SNAPSHOT].buf_cb[i],
+ sizeof(mm_camera_buf_cb_t));
+ cnt++;
+
+ my_obj->snap_burst_num_by_user -= 1;
+ CDBG("%s: burst number =%d", __func__, my_obj->snap_burst_num_by_user);
+ delivered = 1;
+ }
+ }
+ pthread_mutex_unlock(&my_obj->ch[MM_CAMERA_CH_SNAPSHOT].mutex);
+
+ for( i=0;i<cnt;i++) {
+ if(buf_cb[i].cb != NULL && my_obj->poll_threads[MM_CAMERA_CH_SNAPSHOT].data.used == 1) {
+ buf_cb[i].cb(&data[i],buf_cb[i].user_data);
+ }
+ }
+
+ if(delivered) {
+ mm_camera_event_t data;
+ data.event_type = MM_CAMERA_EVT_TYPE_CH;
+ data.e.ch.evt = MM_CAMERA_CH_EVT_DATA_DELIVERY_DONE;
+ data.e.ch.ch = MM_CAMERA_CH_SNAPSHOT;
+ mm_camera_poll_send_ch_event(my_obj, &data);
+ }
+}
+
+static void mm_camera_snapshot_send_snapshot_notify(mm_camera_obj_t * my_obj)
+{
+ int delivered = 0;
+ int i;
+ int cnt = 0;
+ mm_camera_frame_queue_t *s_q, *t_q;
+ mm_camera_frame_t *frame;
+ //mm_camera_buf_cb_t buf_cb;
+
+ mm_camera_buf_cb_t buf_cb[MM_CAMERA_BUF_CB_MAX];
+ mm_camera_ch_data_buf_t data[MM_CAMERA_BUF_CB_MAX];
+
+ memset(&buf_cb, 0, sizeof(buf_cb));
+ s_q = &my_obj->ch[MM_CAMERA_CH_SNAPSHOT].snapshot.main.frame.readyq;
+ t_q = &my_obj->ch[MM_CAMERA_CH_SNAPSHOT].snapshot.thumbnail.frame.readyq;
+ pthread_mutex_lock(&my_obj->ch[MM_CAMERA_CH_SNAPSHOT].mutex);
+
+ for( i=0;i<MM_CAMERA_BUF_CB_MAX;i++) {
+ CDBG("%s Got notify: s_q->cnt = %d, t_q->cnt = %d, buf_cb = %x, "
+ "data.used = %d ", __func__, s_q->cnt, t_q->cnt,
+ (uint32_t)my_obj->ch[MM_CAMERA_CH_SNAPSHOT].buf_cb[i].cb,
+ my_obj->poll_threads[MM_CAMERA_CH_SNAPSHOT].data.used);
+ if((s_q->cnt && t_q->cnt && my_obj->ch[MM_CAMERA_CH_SNAPSHOT].buf_cb[i].cb) &&
+ (my_obj->poll_threads[MM_CAMERA_CH_SNAPSHOT].data.used == 1)) {
+ data[cnt].type = MM_CAMERA_CH_SNAPSHOT;
+ frame = mm_camera_stream_frame_deq(s_q);
+ data[cnt].snapshot.main.frame = &frame->frame;
+ data[cnt].snapshot.main.idx = frame->idx;
+ frame = mm_camera_stream_frame_deq(t_q);
+ data[cnt].snapshot.thumbnail.frame = &frame->frame;
+ data[cnt].snapshot.thumbnail.idx = frame->idx;
+ my_obj->ch[MM_CAMERA_CH_SNAPSHOT].snapshot.main.frame.ref_count[data[i].snapshot.main.idx]++;
+ my_obj->ch[MM_CAMERA_CH_SNAPSHOT].snapshot.thumbnail.frame.ref_count[data[i].snapshot.thumbnail.idx]++;
+
+ //bu = my_obj->ch[MM_CAMERA_CH_SNAPSHOT].buf_cb[i];
+ memcpy(&buf_cb[cnt], &my_obj->ch[MM_CAMERA_CH_SNAPSHOT].buf_cb[i],
+ sizeof(mm_camera_buf_cb_t));
+ cnt++;
+
+ //buf_cb.cb(&data,buf_cb.user_data);
+ my_obj->snap_burst_num_by_user -= 1;
+ CDBG("%s: burst number =%d", __func__, my_obj->snap_burst_num_by_user);
+ delivered = 1;
+ }
+ }
+ pthread_mutex_unlock(&my_obj->ch[MM_CAMERA_CH_SNAPSHOT].mutex);
+
+ for( i=0;i<cnt;i++) {
+ if(buf_cb[i].cb != NULL && my_obj->poll_threads[MM_CAMERA_CH_SNAPSHOT].data.used == 1) {
+ buf_cb[i].cb(&data[i],buf_cb[i].user_data);
+ }
+ }
+
+ CDBG("%s Delivered = %d ", __func__, delivered );
+ if(delivered) {
+ mm_camera_event_t edata;
+ /*for( i=0;i<MM_CAMERA_BUF_CB_MAX;i++){
+ buf_cb = &my_obj->ch[MM_CAMERA_CH_SNAPSHOT].buf_cb[i];
+ if((buf_cb) && (my_obj->poll_threads[MM_CAMERA_CH_SNAPSHOT].data.used == 1)) {
+ buf_cb->cb(&data,buf_cb->user_data);
+ }
+ }*/
+ edata.event_type = MM_CAMERA_EVT_TYPE_CH;
+ edata.e.ch.evt = MM_CAMERA_CH_EVT_DATA_DELIVERY_DONE;
+ edata.e.ch.ch = MM_CAMERA_CH_SNAPSHOT;
+ mm_camera_poll_send_ch_event(my_obj, &edata);
+ }
+}
+
+static void mm_camera_read_snapshot_main_frame(mm_camera_obj_t * my_obj)
+{
+ int rc = 0;
+ int idx;
+ mm_camera_stream_t *stream;
+ mm_camera_frame_queue_t *q;
+ if (!my_obj->ch[MM_CAMERA_CH_SNAPSHOT].acquired) {
+ ALOGV("Snapshot channel is not in acquired state \n");
+ return;
+ }
+ q = &my_obj->ch[MM_CAMERA_CH_SNAPSHOT].snapshot.main.frame.readyq;
+ stream = &my_obj->ch[MM_CAMERA_CH_SNAPSHOT].snapshot.main;
+ idx = mm_camera_read_msm_frame(my_obj,stream);
+ if (idx < 0)
+ return;
+
+ CDBG("%s Read Snapshot frame %d ", __func__, idx);
+ if(my_obj->op_mode == MM_CAMERA_OP_MODE_ZSL) {
+ my_obj->ch[MM_CAMERA_CH_SNAPSHOT].snapshot.main.frame.ref_count[idx]++;
+ /* Reset match to 0. */
+ stream->frame.frame[idx].match = 0;
+ stream->frame.frame[idx].valid_entry = 0;
+ mm_camera_zsl_frame_cmp_and_enq(my_obj,
+ &my_obj->ch[MM_CAMERA_CH_SNAPSHOT].snapshot.main.frame.frame[idx], stream);
+ } else {
+ /* send to HAL */
+ mm_camera_stream_frame_enq(q, &stream->frame.frame[idx]);
+ if (!my_obj->full_liveshot)
+ mm_camera_snapshot_send_snapshot_notify(my_obj);
+ else
+ mm_camera_snapshot_send_liveshot_notify(my_obj);
+ }
+}
+static void mm_camera_read_snapshot_thumbnail_frame(mm_camera_obj_t * my_obj)
+{
+ int idx, rc = 0;
+ mm_camera_stream_t *stream;
+ mm_camera_frame_queue_t *q;
+
+ if (!my_obj->ch[MM_CAMERA_CH_SNAPSHOT].acquired) {
+ ALOGV("Snapshot channel is not in acquired state \n");
+ return;
+ }
+ q = &my_obj->ch[MM_CAMERA_CH_SNAPSHOT].snapshot.thumbnail.frame.readyq;
+ stream = &my_obj->ch[MM_CAMERA_CH_SNAPSHOT].snapshot.thumbnail;
+ idx = mm_camera_read_msm_frame(my_obj,stream);
+ if (idx < 0)
+ return;
+ if(my_obj->op_mode != MM_CAMERA_OP_MODE_ZSL) {
+ mm_camera_stream_frame_enq(q, &stream->frame.frame[idx]);
+ mm_camera_snapshot_send_snapshot_notify(my_obj);
+ } else {
+// CDBG("%s: ZSL does not use thumbnail stream", __func__);
+ rc = mm_camera_stream_qbuf(my_obj, stream, idx);
+// CDBG("%s Q back thumbnail buffer rc = %d ", __func__, rc);
+ }
+}
+
+static void mm_camera_read_video_frame(mm_camera_obj_t * my_obj)
+{
+ int idx, rc = 0;
+ mm_camera_stream_t *stream;
+ mm_camera_frame_queue_t *q;
+ int i;
+ int cnt = 0;
+ mm_camera_buf_cb_t buf_cb[MM_CAMERA_BUF_CB_MAX];
+ mm_camera_ch_data_buf_t data[MM_CAMERA_BUF_CB_MAX];
+
+ if (!my_obj->ch[MM_CAMERA_CH_VIDEO].acquired) {
+ ALOGV("Snapshot channel is not in acquired state \n");
+ return;
+ }
+ stream = &my_obj->ch[MM_CAMERA_CH_VIDEO].video.video;
+ idx = mm_camera_read_msm_frame(my_obj,stream);
+ if (idx < 0)
+ return;
+
+ ALOGV("Video thread locked");
+ pthread_mutex_lock(&my_obj->ch[MM_CAMERA_CH_VIDEO].mutex);
+ for( i=0;i<MM_CAMERA_BUF_CB_MAX;i++) {
+ if((my_obj->ch[MM_CAMERA_CH_VIDEO].buf_cb[i].cb) &&
+ (my_obj->poll_threads[MM_CAMERA_CH_VIDEO].data.used == 1)){
+ data[cnt].type = MM_CAMERA_CH_VIDEO;
+ data[cnt].video.main.frame = NULL;
+ data[cnt].video.main.idx = -1;
+ data[cnt].video.video.idx = idx;
+ data[cnt].video.video.frame = &my_obj->ch[MM_CAMERA_CH_VIDEO].video.video.
+ frame.frame[idx].frame;
+ my_obj->ch[MM_CAMERA_CH_VIDEO].video.video.frame.ref_count[idx]++;
+ ALOGV("Video thread callback issued");
+ //my_obj->ch[MM_CAMERA_CH_VIDEO].buf_cb[i].cb(&data,
+ // my_obj->ch[MM_CAMERA_CH_VIDEO].buf_cb[i].user_data);
+ memcpy(&buf_cb[cnt], &my_obj->ch[MM_CAMERA_CH_VIDEO].buf_cb[i],
+ sizeof(mm_camera_buf_cb_t));
+ cnt++;
+
+ ALOGV("Video thread callback returned");
+ if( my_obj->ch[MM_CAMERA_CH_VIDEO].buf_cb[i].cb_type==MM_CAMERA_BUF_CB_COUNT ) {
+ ALOGV("<DEBUG>:%s: Additional cb called for buffer %p:%d",__func__,stream,idx);
+ if(--(my_obj->ch[MM_CAMERA_CH_VIDEO].buf_cb[i].cb_count) == 0 )
+ my_obj->ch[MM_CAMERA_CH_VIDEO].buf_cb[i].cb=NULL;
+ }
+ }
+ }
+ pthread_mutex_unlock(&my_obj->ch[MM_CAMERA_CH_VIDEO].mutex);
+
+ for( i=0;i<cnt;i++) {
+ if(buf_cb[i].cb != NULL && my_obj->poll_threads[MM_CAMERA_CH_VIDEO].data.used == 1) {
+ buf_cb[i].cb(&data[i],buf_cb[i].user_data);
+ }
+ /*if( buf_cb[i].cb_type==MM_CAMERA_BUF_CB_COUNT ) {
+ ALOGV("<DEBUG>:%s: Additional cb called for buffer %p:%d",__func__,stream,idx);
+ if(--(buf_cb[i].cb_count) == 0 )
+ buf_cb[i].cb=NULL;
+ }*/
+ }
+
+ ALOGV("Video thread unlocked");
+}
+
+static void mm_camera_read_video_main_frame(mm_camera_obj_t * my_obj)
+{
+ int rc = 0;
+ return;rc;
+}
+
+static void mm_camera_read_zsl_main_frame(mm_camera_obj_t * my_obj)
+{
+ int idx, rc = 0;
+ mm_camera_stream_t *stream;
+ mm_camera_frame_queue_t *q;
+ mm_camera_frame_t *frame;
+ int cnt, watermark;
+
+ q = &my_obj->ch[MM_CAMERA_CH_SNAPSHOT].snapshot.main.frame.readyq;
+ stream = &my_obj->ch[MM_CAMERA_CH_SNAPSHOT].snapshot.main;
+ idx = mm_camera_read_msm_frame(my_obj,stream);
+ if (idx < 0)
+ return;
+
+ CDBG("%s: Enqueuing frame id: %d", __func__, idx);
+ mm_camera_stream_frame_enq(q, &stream->frame.frame[idx]);
+ cnt = mm_camera_stream_frame_get_q_cnt(q);
+ watermark = my_obj->ch[MM_CAMERA_CH_SNAPSHOT].buffering_frame.water_mark;
+
+ CDBG("%s: Watermark: %d Queue in a frame: %d", __func__, watermark, cnt);
+ if(watermark < cnt) {
+ /* water overflow, queue head back to kernel */
+ frame = mm_camera_stream_frame_deq(q);
+ if(frame) {
+ rc = mm_camera_stream_qbuf(my_obj, stream, frame->idx);
+ if(rc < 0) {
+ CDBG("%s: mm_camera_stream_qbuf(idx=%d) err=%d\n",
+ __func__, frame->idx, rc);
+ return;
+ }
+ }
+ }
+ mm_camera_check_pending_zsl_frames(my_obj, MM_CAMERA_CH_SNAPSHOT);
+}
+
+static void mm_camera_read_zsl_postview_frame(mm_camera_obj_t * my_obj)
+{
+ int idx, rc = 0;
+ mm_camera_stream_t *stream;
+ mm_camera_frame_queue_t *q;
+ mm_camera_frame_t *frame;
+ int cnt, watermark;
+ q = &my_obj->ch[MM_CAMERA_CH_SNAPSHOT].snapshot.thumbnail.frame.readyq;
+ stream = &my_obj->ch[MM_CAMERA_CH_SNAPSHOT].snapshot.thumbnail;
+ idx = mm_camera_read_msm_frame(my_obj,stream);
+ if (idx < 0)
+ return;
+ mm_camera_stream_frame_enq(q, &stream->frame.frame[idx]);
+ watermark = my_obj->ch[MM_CAMERA_CH_SNAPSHOT].buffering_frame.water_mark;
+ cnt = mm_camera_stream_frame_get_q_cnt(q);
+ if(watermark < cnt) {
+ /* water overflow, queue head back to kernel */
+ frame = mm_camera_stream_frame_deq(q);
+ if(frame) {
+ rc = mm_camera_stream_qbuf(my_obj, stream, frame->idx);
+ if(rc < 0) {
+ CDBG("%s: mm_camera_stream_qbuf(idx=%d) err=%d\n",
+ __func__, frame->idx, rc);
+ return;
+ }
+ }
+ }
+ mm_camera_check_pending_zsl_frames(my_obj, MM_CAMERA_CH_SNAPSHOT);
+}
+
+void mm_camera_msm_data_notify(mm_camera_obj_t * my_obj, int fd,
+ mm_camera_stream_type_t stream_type)
+{
+ switch(stream_type) {
+ case MM_CAMERA_STREAM_RAW:
+ mm_camera_read_raw_frame(my_obj);
+ break;
+ case MM_CAMERA_STREAM_PREVIEW:
+ mm_camera_read_preview_frame(my_obj);
+ break;
+ case MM_CAMERA_STREAM_SNAPSHOT:
+ mm_camera_read_snapshot_main_frame(my_obj);
+ break;
+ case MM_CAMERA_STREAM_THUMBNAIL:
+ mm_camera_read_snapshot_thumbnail_frame(my_obj);
+ break;
+ case MM_CAMERA_STREAM_VIDEO:
+ mm_camera_read_video_frame(my_obj);
+ break;
+ case MM_CAMERA_STREAM_VIDEO_MAIN:
+ mm_camera_read_video_main_frame(my_obj);
+ break;
+ default:
+ break;
+ }
+}
+
+static mm_camera_channel_type_t mm_camera_image_mode_to_ch(int image_mode)
+{
+ switch(image_mode) {
+ case MSM_V4L2_EXT_CAPTURE_MODE_PREVIEW:
+ return MM_CAMERA_CH_PREVIEW;
+ case MSM_V4L2_EXT_CAPTURE_MODE_MAIN:
+ case MSM_V4L2_EXT_CAPTURE_MODE_THUMBNAIL:
+ return MM_CAMERA_CH_SNAPSHOT;
+ case MSM_V4L2_EXT_CAPTURE_MODE_VIDEO:
+ return MM_CAMERA_CH_VIDEO;
+ case MSM_V4L2_EXT_CAPTURE_MODE_RAW:
+ return MM_CAMERA_CH_RAW;
+ default:
+ return MM_CAMERA_CH_MAX;
+ }
+}
+
+void mm_camera_dispatch_app_event(mm_camera_obj_t *my_obj, mm_camera_event_t *event)
+{
+ int i;
+ mm_camera_evt_obj_t evtcb;
+
+ if(event->event_type < MM_CAMERA_EVT_TYPE_MAX) {
+ pthread_mutex_lock(&my_obj->mutex);
+ memcpy(&evtcb,
+ &my_obj->evt[event->event_type],
+ sizeof(mm_camera_evt_obj_t));
+ pthread_mutex_unlock(&my_obj->mutex);
+ for(i = 0; i < MM_CAMERA_EVT_ENTRY_MAX; i++) {
+ if(evtcb.evt[i].evt_cb) {
+ evtcb.evt[i].evt_cb(event, evtcb.evt[i].user_data);
+ }
+ }
+ }
+}
+
+void mm_camera_msm_evt_notify(mm_camera_obj_t * my_obj, int fd)
+{
+ struct v4l2_event ev;
+ int rc;
+ mm_camera_event_t *evt = NULL;
+
+ memset(&ev, 0, sizeof(ev));
+ rc = ioctl(fd, VIDIOC_DQEVENT, &ev);
+ evt = (mm_camera_event_t *)ev.u.data;
+
+ if (rc >= 0) {
+ if(ev.type == V4L2_EVENT_PRIVATE_START+MSM_CAM_APP_NOTIFY_ERROR_EVENT) {
+ evt->event_type = MM_CAMERA_EVT_TYPE_CTRL;
+ evt->e.ctrl.evt = MM_CAMERA_CTRL_EVT_ERROR;
+ }
+ switch(evt->event_type) {
+ case MM_CAMERA_EVT_TYPE_INFO:
+ break;
+ case MM_CAMERA_EVT_TYPE_STATS:
+ break;
+ case MM_CAMERA_EVT_TYPE_CTRL:
+ break;
+ default:
+ break;
+ }
+ mm_camera_dispatch_app_event(my_obj, evt);
+ }
+}
diff --git a/camera/mm-camera-interface/mm_camera_poll_thread.c b/camera/mm-camera-interface/mm_camera_poll_thread.c
new file mode 100644
index 0000000..aee0a58
--- /dev/null
+++ b/camera/mm-camera-interface/mm_camera_poll_thread.c
@@ -0,0 +1,466 @@
+/*
+Copyright (c) 2011, The Linux Foundation. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following
+ disclaimer in the documentation and/or other materials provided
+ with the distribution.
+ * Neither the name of The Linux Foundation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+
+#include <pthread.h>
+#include "mm_camera_dbg.h"
+#include <errno.h>
+#include <sys/ioctl.h>
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <fcntl.h>
+#include <poll.h>
+#include "mm_camera_interface2.h"
+#include "mm_camera.h"
+
+typedef enum {
+ /* ask the channel to flash out the queued frames. */
+ MM_CAMERA_PIPE_CMD_FLASH_QUEUED_FRAME,
+ /* ask ctrl fd to generate ch event to HAL */
+ MM_CAMERA_PIPE_CMD_CH_EVENT,
+ /*start*/
+ MM_CAMERA_PIPE_CMD_ADD_CH,
+
+ /*stop*/
+ MM_CAMERA_PIPE_CMD_DEL_CH,
+
+ /* exit */
+ MM_CAMERA_PIPE_CMD_EXIT,
+ /* max count */
+ MM_CAMERA_PIPE_CMD_MAX
+} mm_camera_pipe_cmd_type_t;
+
+typedef enum {
+ MM_CAMERA_POLL_TASK_STATE_POLL, /* polling pid in polling state. */
+ MM_CAMERA_POLL_TASK_STATE_MAX
+} mm_camera_poll_task_state_type_t;
+
+typedef struct {
+ uint8_t cmd;
+ mm_camera_event_t event;
+} mm_camera_sig_evt_t;
+
+static int32_t mm_camera_poll_sig(mm_camera_poll_thread_t *poll_cb,
+ uint32_t cmd)
+{
+ /* send through pipe */
+ /* get the mutex */
+ mm_camera_sig_evt_t cmd_evt;
+ memset(&cmd_evt, 0, sizeof(cmd_evt));
+ cmd_evt.cmd = cmd;
+ int len;
+ CDBG("%s: begin", __func__);
+
+ pthread_mutex_lock(&poll_cb->mutex);
+ /* reset the statue to false */
+ poll_cb->status = FALSE;
+ /* send cmd to worker */
+ len = write(poll_cb->data.pfds[1], &cmd_evt, sizeof(cmd_evt));
+ if(len < 1) {
+ CDBG("%s: len = %d, errno = %d", __func__, len, errno);
+ //pthread_mutex_unlock(&poll_cb->mutex);
+ //return -1;
+ }
+ CDBG("%s: begin IN mutex write done, len = %d", __func__, len);
+ /* wait till worker task gives positive signal */
+ while (!poll_cb->status) {
+ int rc;
+ struct timespec ts;
+ clock_gettime(CLOCK_REALTIME, &ts);
+ ts.tv_sec += 2;
+ CDBG("%s: wait", __func__);
+ rc = pthread_cond_timedwait(&poll_cb->cond_v, &poll_cb->mutex, &ts);
+ if (rc) {
+ ALOGV("%s: error on pthread_cond_timedwait: %s", __func__, strerror(rc));
+ break;
+ }
+ }
+ /* done */
+ pthread_mutex_unlock(&poll_cb->mutex);
+ CDBG("%s: end, len = %d, size = %d", __func__, len, sizeof(cmd_evt));
+ return MM_CAMERA_OK;
+}
+
+static void mm_camera_poll_sig_done(mm_camera_poll_thread_t *poll_cb)
+{
+ pthread_mutex_lock(&poll_cb->mutex);
+ poll_cb->status = TRUE;
+ pthread_cond_signal(&poll_cb->cond_v);
+ CDBG("%s: done, in mutex", __func__);
+ pthread_mutex_unlock(&poll_cb->mutex);
+}
+
+static int32_t mm_camera_poll_proc_msm(mm_camera_poll_thread_t *poll_cb, struct pollfd *fds)
+{
+ int i;
+
+ for(i = 0; i < poll_cb->data.num_fds-1; i++) {
+ /*Checking for data events*/
+ if((poll_cb->data.poll_type == MM_CAMERA_POLL_TYPE_CH) &&
+ (fds[i].revents & POLLIN) &&
+ (fds[i].revents & POLLRDNORM)) {
+ if(poll_cb->data.used) {
+ mm_camera_msm_data_notify(poll_cb->data.my_obj,
+ fds[i].fd,
+ poll_cb->data.poll_streams[i]->stream_type);
+ }
+
+ }
+ /*Checking for ctrl events*/
+ if((poll_cb->data.poll_type == MM_CAMERA_POLL_TYPE_EVT) &&
+ (fds[i].revents & POLLPRI)) {
+ CDBG("%s: mm_camera_msm_evt_notify\n", __func__);
+ mm_camera_msm_evt_notify(poll_cb->data.my_obj, fds[i].fd);
+ }
+
+ }
+ return 0;
+}
+
+static void cm_camera_poll_set_state(mm_camera_poll_thread_t *poll_cb,
+ mm_camera_poll_task_state_type_t state)
+{
+ poll_cb->data.state = state;
+}
+
+static void mm_camera_poll_proc_pipe(mm_camera_poll_thread_t *poll_cb)
+{
+ ssize_t read_len;
+ int i;
+ mm_camera_sig_evt_t cmd_evt;
+ read_len = read(poll_cb->data.pfds[0], &cmd_evt, sizeof(cmd_evt));
+ CDBG("%s: read_fd = %d, read_len = %d, expect_len = %d",
+ __func__, poll_cb->data.pfds[0], (int)read_len, (int)sizeof(cmd_evt));
+ switch(cmd_evt.cmd) {
+ case MM_CAMERA_PIPE_CMD_FLASH_QUEUED_FRAME:
+ mm_camera_dispatch_buffered_frames(poll_cb->data.my_obj,
+ poll_cb->data.ch_type);
+ break;
+ case MM_CAMERA_PIPE_CMD_CH_EVENT: {
+ mm_camera_event_t *event = &cmd_evt.event;
+ CDBG("%s: ch event, type=0x%x, ch=%d, evt=%d",
+ __func__, event->event_type, event->e.ch.ch, event->e.ch.evt);
+ mm_camera_dispatch_app_event(poll_cb->data.my_obj, event);
+ break;
+ }
+ case MM_CAMERA_PIPE_CMD_ADD_CH:
+ if(poll_cb->data.poll_type == MM_CAMERA_POLL_TYPE_CH) {
+ for(i = 0; i < MM_CAMERA_CH_STREAM_MAX; i++) {
+ if(poll_cb->data.poll_streams[i]) {
+ poll_cb->data.poll_fd[poll_cb->data.num_fds + i] = poll_cb->data.poll_streams[i]->fd;
+ }
+ }
+ }
+ poll_cb->data.num_fds += mm_camera_ch_util_get_num_stream(poll_cb->data.my_obj,
+ poll_cb->data.ch_type);
+ poll_cb->data.used = 1;
+ CDBG("Num fds after MM_CAMERA_PIPE_CMD_ADD_CH = %d",poll_cb->data.num_fds);
+ break;
+
+ case MM_CAMERA_PIPE_CMD_DEL_CH:
+ poll_cb->data.num_fds -= mm_camera_ch_util_get_num_stream(poll_cb->data.my_obj,
+ poll_cb->data.ch_type);
+ poll_cb->data.used = 0;
+ CDBG("Num fds after MM_CAMERA_PIPE_CMD_DEL_CH = %d",poll_cb->data.num_fds);
+ break;
+
+ case MM_CAMERA_PIPE_CMD_EXIT:
+ default:
+ cm_camera_poll_set_state(poll_cb, MM_CAMERA_POLL_TASK_STATE_MAX);
+ mm_camera_poll_sig_done(poll_cb);
+ break;
+ }
+}
+
+static int mm_camera_poll_ch_busy(mm_camera_obj_t * my_obj, int ch_type)
+{
+ int i;
+ int used = 0;
+ mm_camera_poll_thread_t *poll_cb = &my_obj->poll_threads[ch_type];
+ pthread_mutex_lock(&poll_cb->mutex);
+ used = poll_cb->data.used;
+ pthread_mutex_unlock(&poll_cb->mutex);
+ if(used)
+ return 1;
+ else
+ return 0;
+}
+int32_t mm_camera_poll_dispatch_buffered_frames(mm_camera_obj_t * my_obj, int ch_type)
+{
+ mm_camera_poll_thread_t *poll_cb = &my_obj->poll_threads[ch_type];
+ mm_camera_sig_evt_t cmd;
+ int len;
+
+ cmd.cmd = MM_CAMERA_PIPE_CMD_FLASH_QUEUED_FRAME;
+ memset(&cmd.event, 0, sizeof(cmd.event));
+ pthread_mutex_lock(&poll_cb->mutex);
+ len = write(poll_cb->data.pfds[1], &cmd, sizeof(cmd));
+ pthread_mutex_unlock(&poll_cb->mutex);
+ return MM_CAMERA_OK;
+}
+
+int mm_camera_poll_busy(mm_camera_obj_t * my_obj)
+{
+ int i;
+ mm_camera_poll_thread_t *poll_cb;
+ for(i = 0; i < (MM_CAMERA_POLL_THRAED_MAX - 1); i++) {
+ if(mm_camera_poll_ch_busy(my_obj, i) > 0)
+ return 1;
+ }
+ return 0;
+}
+
+int mm_camera_poll_send_ch_event(mm_camera_obj_t * my_obj, mm_camera_event_t *event)
+{
+ mm_camera_poll_thread_t *poll_cb = &my_obj->poll_threads[MM_CAMERA_CH_MAX];
+ mm_camera_sig_evt_t cmd;
+ int len;
+
+ cmd.cmd = MM_CAMERA_PIPE_CMD_CH_EVENT;
+ memcpy(&cmd.event, event, sizeof(cmd.event));
+ CDBG("%s: ch event, type=0x%x, ch=%d, evt=%d, poll_type = %d, read_fd=%d, write_fd=%d",
+ __func__, event->event_type, event->e.ch.ch, event->e.ch.evt, poll_cb->data.poll_type,
+ poll_cb->data.pfds[0], poll_cb->data.pfds[1]);
+ pthread_mutex_lock(&poll_cb->mutex);
+ len = write(poll_cb->data.pfds[1], &cmd, sizeof(cmd));
+ pthread_mutex_unlock(&poll_cb->mutex);
+ return MM_CAMERA_OK;
+}
+
+static void *mm_camera_poll_fn(mm_camera_poll_thread_t *poll_cb)
+{
+ int rc = 0, i;
+ struct pollfd fds[MM_CAMERA_CH_STREAM_MAX+1];
+ int timeoutms;
+ CDBG("%s: poll type = %d, num_fd = %d\n",
+ __func__, poll_cb->data.poll_type, poll_cb->data.num_fds);
+ do {
+ for(i = 0; i < poll_cb->data.num_fds; i++) {
+ fds[i].fd = poll_cb->data.poll_fd[i];
+ fds[i].events = POLLIN|POLLRDNORM|POLLPRI;
+ }
+ timeoutms = poll_cb->data.timeoutms;
+ rc = poll(fds, poll_cb->data.num_fds, timeoutms);
+ if(rc > 0) {
+ if((fds[0].revents & POLLIN) && (fds[0].revents & POLLRDNORM))
+ mm_camera_poll_proc_pipe(poll_cb);
+ else
+ mm_camera_poll_proc_msm(poll_cb, &fds[1]);
+ } else {
+ /* in error case sleep 10 us and then continue. hard coded here */
+ usleep(10);
+ continue;
+ }
+ } while (poll_cb->data.state == MM_CAMERA_POLL_TASK_STATE_POLL);
+ return NULL;
+}
+
+static void *mm_camera_poll_thread(void *data)
+{
+ int rc = 0;
+ int i;
+ void *ret = NULL;
+ mm_camera_poll_thread_t *poll_cb = data;
+
+ poll_cb->data.poll_fd[poll_cb->data.num_fds++] = poll_cb->data.pfds[0];
+ switch(poll_cb->data.poll_type) {
+ case MM_CAMERA_POLL_TYPE_EVT:
+ poll_cb->data.poll_fd[poll_cb->data.num_fds++] =
+ ((mm_camera_obj_t *)(poll_cb->data.my_obj))->ctrl_fd;
+ break;
+ case MM_CAMERA_POLL_TYPE_CH:
+ default:
+ break;
+ }
+ mm_camera_poll_sig_done(poll_cb);
+ ret = mm_camera_poll_fn(poll_cb);
+ return ret;
+}
+
+int mm_camera_poll_start(mm_camera_obj_t * my_obj, mm_camera_poll_thread_t *poll_cb)
+{
+ pthread_mutex_lock(&poll_cb->mutex);
+ poll_cb->status = 0;
+ pthread_create(&poll_cb->data.pid, NULL, mm_camera_poll_thread, (void *)poll_cb);
+ while (!poll_cb->status) {
+ int rc;
+ struct timespec ts;
+
+ clock_gettime(CLOCK_REALTIME, &ts);
+ ts.tv_sec += 2;
+ rc = pthread_cond_timedwait(&poll_cb->cond_v, &poll_cb->mutex, &ts);
+ if (rc) {
+ ALOGV("%s: error on pthread_cond_timedwait: %s", __func__, strerror(rc));
+ break;
+ }
+ }
+ pthread_mutex_unlock(&poll_cb->mutex);
+ return MM_CAMERA_OK;
+}
+
+int mm_camera_poll_stop(mm_camera_obj_t * my_obj, mm_camera_poll_thread_t *poll_cb)
+{
+ CDBG("%s, my_obj=0x%x\n", __func__, (uint32_t)my_obj);
+ mm_camera_poll_sig(poll_cb, MM_CAMERA_PIPE_CMD_EXIT);
+ if (pthread_join(poll_cb->data.pid, NULL) != 0) {
+ CDBG("%s: pthread dead already\n", __func__);
+ }
+ return MM_CAMERA_OK;
+}
+
+
+int mm_camera_poll_thread_add_ch(mm_camera_obj_t * my_obj, int ch_type)
+{
+ mm_camera_poll_thread_t *poll_cb = &my_obj->poll_threads[ch_type];
+ mm_camera_sig_evt_t cmd;
+ int len;
+
+ if(poll_cb->data.used == 1){
+ CDBG_ERROR("%s : Thread is Active",__func__);
+ return MM_CAMERA_OK;
+ }
+ CDBG("Run thread for ch_type = %d ",ch_type);
+ cmd.cmd = MM_CAMERA_PIPE_CMD_ADD_CH;
+ poll_cb->data.ch_type = ch_type;
+
+ pthread_mutex_lock(&poll_cb->mutex);
+ len = write(poll_cb->data.pfds[1], &cmd, sizeof(cmd));
+ pthread_mutex_unlock(&poll_cb->mutex);
+ poll_cb->data.used = 1;
+ return MM_CAMERA_OK;
+}
+
+int mm_camera_poll_thread_del_ch(mm_camera_obj_t * my_obj, int ch_type)
+{
+ mm_camera_poll_thread_t *poll_cb = &my_obj->poll_threads[ch_type];
+ mm_camera_sig_evt_t cmd;
+ int len;
+
+ if(poll_cb->data.used == 0){
+ CDBG_ERROR("%s : Thread is Not Active",__func__);
+ return MM_CAMERA_OK;
+ }
+ CDBG("Stop thread for ch_type = %d ",ch_type);
+ cmd.cmd = MM_CAMERA_PIPE_CMD_DEL_CH;
+ poll_cb->data.ch_type = (mm_camera_channel_type_t)ch_type;
+
+ pthread_mutex_lock(&poll_cb->mutex);
+ len = write(poll_cb->data.pfds[1], &cmd, sizeof(cmd));
+ pthread_mutex_unlock(&poll_cb->mutex);
+ poll_cb->data.used = 0;
+ return MM_CAMERA_OK;
+
+}
+
+
+int mm_camera_poll_thread_launch(mm_camera_obj_t * my_obj, int ch_type)
+{
+ int rc = MM_CAMERA_OK;
+ mm_camera_poll_thread_t *poll_cb = &my_obj->poll_threads[ch_type];
+ if(mm_camera_poll_ch_busy(my_obj, ch_type) > 0) {
+ CDBG_ERROR("%s: err, poll thread of channel %d already running. cam_id=%d\n",
+ __func__, ch_type, my_obj->my_id);
+ return -MM_CAMERA_E_INVALID_OPERATION;
+ }
+ poll_cb->data.ch_type = ch_type;
+ rc = pipe(poll_cb->data.pfds);
+ if(rc < 0) {
+ CDBG_ERROR("%s: camera_id = %d, pipe open rc=%d\n", __func__, my_obj->my_id, rc);
+ rc = - MM_CAMERA_E_GENERAL;
+ }
+ CDBG("%s: ch = %d, poll_type = %d, read fd = %d, write fd = %d",
+ __func__, ch_type, poll_cb->data.poll_type,
+ poll_cb->data.pfds[0], poll_cb->data.pfds[1]);
+ poll_cb->data.my_obj = my_obj;
+ poll_cb->data.used = 0;
+ poll_cb->data.timeoutms = -1; /* Infinite seconds */
+
+ if(ch_type < MM_CAMERA_CH_MAX) {
+ poll_cb->data.poll_type = MM_CAMERA_POLL_TYPE_CH;
+ mm_camera_ch_util_get_stream_objs(my_obj, ch_type,
+ &poll_cb->data.poll_streams[0],
+ &poll_cb->data.poll_streams[1]);
+ } else{
+ poll_cb->data.poll_type = MM_CAMERA_POLL_TYPE_EVT;
+ }
+
+ ALOGV("%s: ch_type = %d, poll_type = %d, read fd = %d, write fd = %d",
+ __func__, ch_type, poll_cb->data.poll_type,
+ poll_cb->data.pfds[0], poll_cb->data.pfds[1]);
+ /* launch the thread */
+ rc = mm_camera_poll_start(my_obj, poll_cb);
+ return rc;
+}
+
+int mm_camera_poll_thread_release(mm_camera_obj_t * my_obj, int ch_type)
+{
+ int rc = MM_CAMERA_OK;
+ mm_camera_poll_thread_t *poll_cb = &my_obj->poll_threads[ch_type];
+ if(MM_CAMERA_POLL_TASK_STATE_MAX == poll_cb->data.state) {
+ CDBG("%s: err, poll thread of channel % is not running. cam_id=%d\n",
+ __func__, ch_type, my_obj->my_id);
+ return -MM_CAMERA_E_INVALID_OPERATION;
+ }
+ rc = mm_camera_poll_stop(my_obj, poll_cb);
+
+ if(poll_cb->data.pfds[0]) {
+ close(poll_cb->data.pfds[0]);
+ }
+ if(poll_cb->data.pfds[1]) {
+ close(poll_cb->data.pfds[1]);
+ }
+ memset(&poll_cb->data, 0, sizeof(poll_cb->data));
+ return MM_CAMERA_OK;
+}
+
+void mm_camera_poll_threads_init(mm_camera_obj_t * my_obj)
+{
+ int i;
+ mm_camera_poll_thread_t *poll_cb;
+
+ for(i = 0; i < MM_CAMERA_POLL_THRAED_MAX; i++) {
+ poll_cb = &my_obj->poll_threads[i];
+ pthread_mutex_init(&poll_cb->mutex, NULL);
+ pthread_cond_init(&poll_cb->cond_v, NULL);
+ }
+}
+
+void mm_camera_poll_threads_deinit(mm_camera_obj_t * my_obj)
+{
+ int i;
+ mm_camera_poll_thread_t *poll_cb;
+
+ for(i = 0; i < MM_CAMERA_POLL_THRAED_MAX; i++) {
+ poll_cb = &my_obj->poll_threads[i];
+ if(poll_cb->data.used)
+ mm_camera_poll_stop(my_obj, poll_cb);
+ pthread_mutex_destroy(&poll_cb->mutex);
+ pthread_cond_destroy(&poll_cb->cond_v);
+ memset(poll_cb, 0, sizeof(mm_camera_poll_thread_t));
+ }
+}
diff --git a/camera/mm-camera-interface/mm_camera_sock.c b/camera/mm-camera-interface/mm_camera_sock.c
new file mode 100644
index 0000000..1a65863
--- /dev/null
+++ b/camera/mm-camera-interface/mm_camera_sock.c
@@ -0,0 +1,224 @@
+/*
+Copyright (c) 2012, The Linux Foundation. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following
+ disclaimer in the documentation and/or other materials provided
+ with the distribution.
+ * Neither the name of The Linux Foundation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+
+#include <stdio.h>
+#include <unistd.h>
+#include <stdlib.h>
+#include <errno.h>
+#include <string.h>
+#include <sys/socket.h>
+#include <sys/uio.h>
+#include <linux/un.h>
+
+#include "mm_camera_dbg.h"
+#include "mm_camera_sock.h"
+
+/*===========================================================================
+ * FUNCTION - mm_camera_socket_create -
+ *
+ * DESCRIPTION: opens a domain socket tied to camera ID and socket type
+ * int cam_id: camera ID
+ * mm_camera_sock_type_t sock_type: socket type, TCP/UDP
+ * retured fd related to the domain socket
+ *==========================================================================*/
+int mm_camera_socket_create(int cam_id, mm_camera_sock_type_t sock_type)
+{
+ int socket_fd;
+ struct sockaddr_un sock_addr;
+ int sktype;
+ int rc;
+
+ switch (sock_type)
+ {
+ case MM_CAMERA_SOCK_TYPE_UDP:
+ sktype = SOCK_DGRAM;
+ break;
+ case MM_CAMERA_SOCK_TYPE_TCP:
+ sktype = SOCK_STREAM;
+ break;
+ default:
+ CDBG_ERROR("%s: unknown socket type =%d", __func__, sock_type);
+ return -1;
+ }
+ socket_fd = socket(AF_UNIX, sktype, 0);
+ if (socket_fd < 0) {
+ CDBG_ERROR("%s: error create socket fd =%d", __func__, socket_fd);
+ return socket_fd;
+ }
+
+ memset(&sock_addr, 0, sizeof(sock_addr));
+ sock_addr.sun_family = AF_UNIX;
+ snprintf(sock_addr.sun_path, UNIX_PATH_MAX, "/data/cam_socket%d", cam_id);
+ if((rc = connect(socket_fd, (struct sockaddr *) &sock_addr,
+ sizeof(sock_addr))) != 0) {
+ close(socket_fd);
+ socket_fd = -1;
+ CDBG_ERROR("%s: socket_fd=%d %s ", __func__, socket_fd, strerror(errno));
+ }
+
+ CDBG("%s: socket_fd=%d %s", __func__, socket_fd, sock_addr.sun_path);
+ return socket_fd;
+}
+
+/*===========================================================================
+ * FUNCTION - mm_camera_socket_close -
+ *
+ * DESCRIPTION: close domain socket by its fd
+ *==========================================================================*/
+void mm_camera_socket_close(int fd)
+{
+ if (fd > 0) {
+ close(fd);
+ }
+}
+
+/*===========================================================================
+ * FUNCTION - mm_camera_socket_sendmsg -
+ *
+ * DESCRIPTION: send msg through domain socket
+ * int fd: socket fd
+ * mm_camera_sock_msg_packet_t *msg: pointer to msg to be sent over domain socket
+ * int sendfd: file descriptors to be sent
+ * return the total bytes of sent msg
+ *==========================================================================*/
+int mm_camera_socket_sendmsg(
+ int fd,
+ void *msg,
+ uint32_t buf_size,
+ int sendfd)
+{
+ struct msghdr msgh;
+ struct iovec iov[1];
+ struct cmsghdr * cmsghp = NULL;
+ char control[CMSG_SPACE(sizeof(int))];
+
+ if (msg == NULL) {
+ CDBG("%s: msg is NULL", __func__);
+ return -1;
+ }
+ memset(&msgh, 0, sizeof(msgh));
+ msgh.msg_name = NULL;
+ msgh.msg_namelen = 0;
+
+ iov[0].iov_base = msg;
+ iov[0].iov_len = buf_size;
+ msgh.msg_iov = iov;
+ msgh.msg_iovlen = 1;
+ CDBG("%s: iov_len=%d", __func__, iov[0].iov_len);
+
+ msgh.msg_control = NULL;
+ msgh.msg_controllen = 0;
+
+ // if sendfd is vlaid, we need to pass it through control msg
+ if( sendfd > 0) {
+ msgh.msg_control = control;
+ msgh.msg_controllen = sizeof(control);
+ cmsghp = CMSG_FIRSTHDR(&msgh);
+ if (cmsghp != NULL) {
+ CDBG("%s: Got ctrl msg pointer", __func__);
+ cmsghp->cmsg_level = SOL_SOCKET;
+ cmsghp->cmsg_type = SCM_RIGHTS;
+ cmsghp->cmsg_len = CMSG_LEN(sizeof(int));
+ *((int *)CMSG_DATA(cmsghp)) = sendfd;
+ CDBG("%s: cmsg data=%d", __func__, *((int *) CMSG_DATA(cmsghp)));
+ } else {
+ CDBG("%s: ctrl msg NULL", __func__);
+ return -1;
+ }
+ }
+
+ return sendmsg(fd, &(msgh), 0);
+}
+
+/*===========================================================================
+ * FUNCTION - mm_camera_socket_recvmsg -
+ *
+ * DESCRIPTION: receive msg from domain socket.
+ * int fd: socket fd
+ * void *msg: pointer to mm_camera_sock_msg_packet_t to hold incoming msg,
+ * need be allocated by the caller
+ * uint32_t buf_size: the size of the buf that holds incoming msg
+ * int *rcvdfd: pointer to hold recvd file descriptor if not NULL.
+ * return the total bytes of received msg
+ *==========================================================================*/
+int mm_camera_socket_recvmsg(
+ int fd,
+ void *msg,
+ uint32_t buf_size,
+ int *rcvdfd)
+{
+ struct msghdr msgh;
+ struct iovec iov[1];
+ struct cmsghdr *cmsghp = NULL;
+ char control[CMSG_SPACE(sizeof(int))];
+ int rcvd_fd = -1;
+ int rcvd_len = 0;
+
+ if ( (msg == NULL) || (buf_size <= 0) ) {
+ CDBG_ERROR(" %s: msg buf is NULL", __func__);
+ return -1;
+ }
+
+ memset(&msgh, 0, sizeof(msgh));
+ msgh.msg_name = NULL;
+ msgh.msg_namelen = 0;
+ msgh.msg_control = control;
+ msgh.msg_controllen = sizeof(control);
+
+ iov[0].iov_base = msg;
+ iov[0].iov_len = buf_size;
+ msgh.msg_iov = iov;
+ msgh.msg_iovlen = 1;
+
+ if ( (rcvd_len = recvmsg(fd, &(msgh), 0)) <= 0) {
+ CDBG_ERROR(" %s: recvmsg failed", __func__);
+ return rcvd_len;
+ }
+
+ CDBG("%s: msg_ctrl %p len %d", __func__, msgh.msg_control, msgh.msg_controllen);
+
+ if( ((cmsghp = CMSG_FIRSTHDR(&msgh)) != NULL) &&
+ (cmsghp->cmsg_len == CMSG_LEN(sizeof(int))) ) {
+ if (cmsghp->cmsg_level == SOL_SOCKET &&
+ cmsghp->cmsg_type == SCM_RIGHTS) {
+ CDBG("%s: CtrlMsg is valid", __func__);
+ rcvd_fd = *((int *) CMSG_DATA(cmsghp));
+ CDBG("%s: Receieved fd=%d", __func__, rcvd_fd);
+ } else {
+ CDBG_ERROR("%s: Unexpected Control Msg. Line=%d", __func__, __LINE__);
+ }
+ }
+
+ if (rcvdfd) {
+ *rcvdfd = rcvd_fd;
+ }
+
+ return rcvd_len;
+}
+
diff --git a/camera/mm-camera-interface/mm_camera_sock.h b/camera/mm-camera-interface/mm_camera_sock.h
new file mode 100644
index 0000000..0846c05
--- /dev/null
+++ b/camera/mm-camera-interface/mm_camera_sock.h
@@ -0,0 +1,57 @@
+/*
+Copyright (c) 2012, The Linux Foundation. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following
+ disclaimer in the documentation and/or other materials provided
+ with the distribution.
+ * Neither the name of The Linux Foundation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+
+#ifndef __MM_CAMERA_SOCKET_H__
+#define __MM_CAMERA_SOCKET_H__
+
+#include <inttypes.h>
+
+typedef enum {
+ MM_CAMERA_SOCK_TYPE_UDP,
+ MM_CAMERA_SOCK_TYPE_TCP,
+} mm_camera_sock_type_t;
+
+int mm_camera_socket_create(int cam_id, mm_camera_sock_type_t sock_type);
+
+int mm_camera_socket_sendmsg(
+ int fd,
+ void *msg,
+ uint32_t buf_size,
+ int sendfd);
+
+int mm_camera_socket_recvmsg(
+ int fd,
+ void *msg,
+ uint32_t buf_size,
+ int *rcvdfd);
+
+void mm_camera_socket_close(int fd);
+
+#endif /*__MM_CAMERA_SOCKET_H__*/
+
diff --git a/camera/mm-camera-interface/mm_camera_stream.c b/camera/mm-camera-interface/mm_camera_stream.c
new file mode 100644
index 0000000..6fca2db
--- /dev/null
+++ b/camera/mm-camera-interface/mm_camera_stream.c
@@ -0,0 +1,895 @@
+/*
+Copyright (c) 2011-2012, The Linux Foundation. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following
+ disclaimer in the documentation and/or other materials provided
+ with the distribution.
+ * Neither the name of The Linux Foundation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+
+#include <pthread.h>
+#include "mm_camera_dbg.h"
+#include <errno.h>
+#include <sys/ioctl.h>
+#include <sys/types.h>
+#include <sys/stat.h>
+#include <fcntl.h>
+#include <poll.h>
+#include <time.h>
+
+#include "mm_camera_interface2.h"
+#include "mm_camera.h"
+
+static void mm_camera_stream_util_set_state(mm_camera_stream_t *stream,
+ mm_camera_stream_state_type_t state);
+
+int mm_camera_stream_init_q(mm_camera_frame_queue_t *q)
+{
+ pthread_mutex_init(&q->mutex, NULL);
+ return MM_CAMERA_OK;
+}
+int mm_camera_stream_deinit_q(mm_camera_frame_queue_t *q)
+{
+ pthread_mutex_destroy(&q->mutex);
+ return MM_CAMERA_OK;
+}
+
+int mm_camera_stream_frame_get_q_cnt(mm_camera_frame_queue_t *q)
+{
+ int cnt;
+ pthread_mutex_lock(&q->mutex);
+ cnt = q->cnt;
+ pthread_mutex_unlock(&q->mutex);
+ return cnt;
+}
+
+mm_camera_frame_t *mm_camera_stream_frame_deq_no_lock(mm_camera_frame_queue_t *q)
+{
+ mm_camera_frame_t *tmp;
+
+ tmp = q->head;
+
+ if(tmp == NULL) goto end;
+ if(q->head == q->tail) {
+ q->head = NULL;
+ q->tail = NULL;
+ } else {
+ q->head = tmp->next;
+ }
+ tmp->next = NULL;
+ q->cnt--;
+end:
+ return tmp;
+}
+
+void mm_camera_stream_frame_enq_no_lock(mm_camera_frame_queue_t *q, mm_camera_frame_t *node)
+{
+ node->next = NULL;
+ if(q->head == NULL) {
+ q->head = node;
+ q->tail = node;
+ } else {
+ q->tail->next = node;
+ q->tail = node;
+ }
+ q->cnt++;
+}
+
+mm_camera_frame_t *mm_camera_stream_frame_deq(mm_camera_frame_queue_t *q)
+{
+ mm_camera_frame_t *tmp;
+
+ pthread_mutex_lock(&q->mutex);
+ tmp = q->head;
+
+ if(tmp == NULL) goto end;
+ if(q->head == q->tail) {
+ q->head = NULL;
+ q->tail = NULL;
+ } else {
+ q->head = tmp->next;
+ }
+ tmp->next = NULL;
+ q->cnt--;
+end:
+ pthread_mutex_unlock(&q->mutex);
+ return tmp;
+}
+
+void mm_camera_stream_frame_enq(mm_camera_frame_queue_t *q, mm_camera_frame_t *node)
+{
+ pthread_mutex_lock(&q->mutex);
+ node->next = NULL;
+ if(q->head == NULL) {
+ q->head = node;
+ q->tail = node;
+ } else {
+ q->tail->next = node;
+ q->tail = node;
+ }
+ q->cnt++;
+ pthread_mutex_unlock(&q->mutex);
+}
+
+void mm_stream_frame_flash_q(mm_camera_frame_queue_t *q)
+{
+ pthread_mutex_lock(&q->mutex);
+ q->cnt = 0;
+ q->match_cnt = 0;
+ q->head = NULL;
+ q->tail = NULL;
+ pthread_mutex_unlock(&q->mutex);
+}
+
+void mm_camera_stream_frame_refill_q(mm_camera_frame_queue_t *q, mm_camera_frame_t *node, int num)
+{
+ int i;
+
+ mm_stream_frame_flash_q(q);
+ for(i = 0; i < num; i++)
+ mm_camera_stream_frame_enq(q, &node[i]);
+ CDBG("%s: q=0x%x, num = %d, q->cnt=%d\n",
+ __func__,(uint32_t)q,num, mm_camera_stream_frame_get_q_cnt(q));
+}
+
+void mm_camera_stream_deinit_frame(mm_camera_stream_frame_t *frame)
+{
+ pthread_mutex_destroy(&frame->mutex);
+ mm_camera_stream_deinit_q(&frame->readyq);
+ memset(frame, 0, sizeof(mm_camera_stream_frame_t));
+}
+
+void mm_camera_stream_init_frame(mm_camera_stream_frame_t *frame)
+{
+ memset(frame, 0, sizeof(mm_camera_stream_frame_t));
+ pthread_mutex_init(&frame->mutex, NULL);
+ mm_camera_stream_init_q(&frame->readyq);
+}
+
+void mm_camera_stream_release(mm_camera_stream_t *stream)
+{
+ mm_camera_stream_deinit_frame(&stream->frame);
+ if(stream->fd > 0) close(stream->fd);
+ memset(stream, 0, sizeof(*stream));
+ //stream->fd = -1;
+ mm_camera_stream_util_set_state(stream, MM_CAMERA_STREAM_STATE_NOTUSED);
+}
+
+int mm_camera_stream_is_active(mm_camera_stream_t *stream)
+{
+ return (stream->state == MM_CAMERA_STREAM_STATE_ACTIVE)? TRUE : FALSE;
+}
+
+static void mm_camera_stream_util_set_state(mm_camera_stream_t *stream,
+ mm_camera_stream_state_type_t state)
+{
+ CDBG("%s:stream fd=%d, stream type=%d, cur_state=%d,new_state=%d\n",
+ __func__, stream->fd, stream->stream_type, stream->state, state);
+ stream->state = state;
+}
+
+int mm_camera_read_msm_frame(mm_camera_obj_t * my_obj,
+ mm_camera_stream_t *stream)
+{
+ int idx = -1, rc = MM_CAMERA_OK;
+ uint32_t i = 0;
+ struct v4l2_buffer vb;
+ struct v4l2_plane planes[VIDEO_MAX_PLANES];
+
+ memset(&vb, 0, sizeof(vb));
+ vb.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
+ vb.memory = V4L2_MEMORY_USERPTR;
+ vb.m.planes = &planes[0];
+ vb.length = stream->fmt.fmt.pix_mp.num_planes;
+
+ CDBG("%s: VIDIOC_DQBUF ioctl call\n", __func__);
+ rc = ioctl(stream->fd, VIDIOC_DQBUF, &vb);
+ if (rc < 0)
+ return idx;
+ idx = vb.index;
+ for(i = 0; i < vb.length; i++) {
+ CDBG("%s plane %d addr offset: %d data offset:%d\n",
+ __func__, i, vb.m.planes[i].reserved[0],
+ vb.m.planes[i].data_offset);
+ stream->frame.frame[idx].planes[i].reserved[0] =
+ vb.m.planes[i].reserved[0];
+ stream->frame.frame[idx].planes[i].data_offset =
+ vb.m.planes[i].data_offset;
+ }
+
+ stream->frame.frame[idx].frame.frame_id = vb.sequence;
+ stream->frame.frame[idx].frame.ts.tv_sec = vb.timestamp.tv_sec;
+ stream->frame.frame[idx].frame.ts.tv_nsec = vb.timestamp.tv_usec * 1000;
+ return idx;
+}
+
+static int mm_camera_stream_util_proc_get_crop(mm_camera_obj_t *my_obj,
+ mm_camera_stream_t *stream,
+ mm_camera_rect_t *val)
+{
+ struct v4l2_crop crop;
+ int rc = MM_CAMERA_OK;
+ memset(&crop, 0, sizeof(crop));
+ crop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
+ rc = ioctl(stream->fd, VIDIOC_G_CROP, &crop);
+ if (rc < 0)
+ return rc;
+ val->left = crop.c.left;
+ val->top = crop.c.top;
+ val->width = crop.c.width;
+ val->height = crop.c.height;
+ return rc;
+}
+
+int32_t mm_camera_util_s_ctrl( int32_t fd, uint32_t id, int32_t value)
+{
+ int rc = MM_CAMERA_OK;
+ struct v4l2_control control;
+
+ memset(&control, 0, sizeof(control));
+ control.id = id;
+ control.value = value;
+ rc = ioctl (fd, VIDIOC_S_CTRL, &control);
+
+ if(rc) {
+ CDBG("%s: fd=%d, S_CTRL, id=0x%x, value = 0x%x, rc = %ld\n",
+ __func__, fd, id, (uint32_t)value, rc);
+ rc = MM_CAMERA_E_GENERAL;
+ }
+ return rc;
+}
+
+int32_t mm_camera_util_private_s_ctrl(int32_t fd, uint32_t id, int32_t value)
+{
+ int rc = MM_CAMERA_OK;
+ struct msm_camera_v4l2_ioctl_t v4l2_ioctl;
+
+ memset(&v4l2_ioctl, 0, sizeof(v4l2_ioctl));
+ v4l2_ioctl.id = id;
+ v4l2_ioctl.ioctl_ptr = value;
+ rc = ioctl (fd, MSM_CAM_V4L2_IOCTL_PRIVATE_S_CTRL, &v4l2_ioctl);
+
+ if(rc) {
+ CDBG_ERROR("%s: fd=%d, S_CTRL, id=0x%x, value = 0x%x, rc = %ld\n",
+ __func__, fd, id, (uint32_t)value, rc);
+ rc = MM_CAMERA_E_GENERAL;
+ }
+ return rc;
+}
+
+int32_t mm_camera_util_g_ctrl( int32_t fd, uint32_t id, int32_t *value)
+{
+ int rc = MM_CAMERA_OK;
+ struct v4l2_control control;
+
+ memset(&control, 0, sizeof(control));
+ control.id = id;
+ control.value = (int32_t)value;
+ rc = ioctl (fd, VIDIOC_G_CTRL, &control);
+ if(rc) {
+ CDBG("%s: fd=%d, G_CTRL, id=0x%x, rc = %d\n", __func__, fd, id, rc);
+ rc = MM_CAMERA_E_GENERAL;
+ }
+ *value = control.value;
+ return rc;
+}
+
+static uint32_t mm_camera_util_get_v4l2_fmt(cam_format_t fmt,
+ uint8_t *num_planes)
+{
+ uint32_t val;
+ switch(fmt) {
+ case CAMERA_YUV_420_NV12:
+ val = V4L2_PIX_FMT_NV12;
+ *num_planes = 2;
+ break;
+ case CAMERA_YUV_420_NV21:
+ val = V4L2_PIX_FMT_NV21;
+ *num_planes = 2;
+ break;
+ case CAMERA_BAYER_SBGGR10:
+ val= V4L2_PIX_FMT_SBGGR10;
+ *num_planes = 1;
+ break;
+ case CAMERA_YUV_422_NV61:
+ val= V4L2_PIX_FMT_NV61;
+ *num_planes = 2;
+ break;
+ case CAMERA_YUV_422_YUYV:
+ val= V4L2_PIX_FMT_YUYV;
+ *num_planes = 1;
+ break;
+ case CAMERA_YUV_420_YV12:
+ val= V4L2_PIX_FMT_NV12;
+ *num_planes = 3;
+ break;
+ default:
+ val = 0;
+ *num_planes = 0;
+ break;
+ }
+ return val;
+}
+
+static int mm_camera_stream_util_set_ext_mode(mm_camera_stream_t *stream)
+{
+ int rc = 0;
+ struct v4l2_streamparm s_parm;
+ s_parm.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
+ switch(stream->stream_type) {
+ case MM_CAMERA_STREAM_PREVIEW:
+ s_parm.parm.capture.extendedmode = MSM_V4L2_EXT_CAPTURE_MODE_PREVIEW;
+ break;
+ case MM_CAMERA_STREAM_SNAPSHOT:
+ s_parm.parm.capture.extendedmode = MSM_V4L2_EXT_CAPTURE_MODE_MAIN;
+ break;
+ case MM_CAMERA_STREAM_THUMBNAIL:
+ s_parm.parm.capture.extendedmode = MSM_V4L2_EXT_CAPTURE_MODE_THUMBNAIL;
+ break;
+ case MM_CAMERA_STREAM_VIDEO:
+ s_parm.parm.capture.extendedmode = MSM_V4L2_EXT_CAPTURE_MODE_VIDEO;
+ break;
+ case MM_CAMERA_STREAM_RAW:
+ s_parm.parm.capture.extendedmode = MSM_V4L2_EXT_CAPTURE_MODE_MAIN; //MSM_V4L2_EXT_CAPTURE_MODE_RAW;
+ break;
+ case MM_CAMERA_STREAM_VIDEO_MAIN:
+ default:
+ return 0;
+ }
+
+ rc = ioctl(stream->fd, VIDIOC_S_PARM, &s_parm);
+ CDBG("%s:stream fd=%d,type=%d,rc=%d,extended_mode=%d\n",
+ __func__, stream->fd, stream->stream_type, rc,
+ s_parm.parm.capture.extendedmode);
+ return rc;
+}
+
+static int mm_camera_util_set_op_mode(int fd, int opmode)
+{
+ int rc = 0;
+ struct v4l2_control s_ctrl;
+ s_ctrl.id = MSM_V4L2_PID_CAM_MODE;
+ s_ctrl.value = opmode;
+
+ rc = ioctl(fd, VIDIOC_S_CTRL, &s_ctrl);
+ if (rc < 0)
+ CDBG("%s: VIDIOC_S_CTRL failed, rc=%d\n",
+ __func__, rc);
+ return rc;
+}
+
+int mm_camera_stream_qbuf(mm_camera_obj_t * my_obj, mm_camera_stream_t *stream,
+ int idx)
+{
+ int32_t i, rc = MM_CAMERA_OK;
+ int *ret;
+ struct v4l2_buffer buffer;
+
+ memset(&buffer, 0, sizeof(buffer));
+ buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
+ buffer.memory = V4L2_MEMORY_USERPTR;
+ buffer.index = idx;
+ buffer.m.planes = &(stream->frame.frame[idx].planes[0]);
+ buffer.length = stream->frame.frame[idx].num_planes;
+
+ CDBG("%s Ref : PREVIEW=%d VIDEO=%d SNAPSHOT=%d THUMB=%d ", __func__,
+ MM_CAMERA_STREAM_PREVIEW, MM_CAMERA_STREAM_VIDEO,
+ MM_CAMERA_STREAM_SNAPSHOT, MM_CAMERA_STREAM_THUMBNAIL);
+ CDBG("%s:fd=%d,type=%d,frame idx=%d,num planes %d\n", __func__,
+ stream->fd, stream->stream_type, idx, buffer.length);
+
+ rc = ioctl(stream->fd, VIDIOC_QBUF, &buffer);
+ if (rc < 0) {
+ CDBG_ERROR("%s: VIDIOC_QBUF error = %d, stream type=%d\n", __func__, rc, stream->stream_type);
+ return rc;
+ }
+ CDBG("%s: X idx: %d, stream_type:%d", __func__, idx, stream->stream_type);
+ return rc;
+}
+
+/* This function let kernel know amount of buffers will be registered */
+static int mm_camera_stream_util_request_buf(mm_camera_obj_t * my_obj,
+ mm_camera_stream_t *stream,
+ int8_t buf_num)
+{
+ int32_t rc = MM_CAMERA_OK;
+ struct v4l2_requestbuffers bufreq;
+
+ if(buf_num > MM_CAMERA_MAX_NUM_FRAMES) {
+ rc = -MM_CAMERA_E_GENERAL;
+ CDBG("%s: buf num %d > max limit %d\n",
+ __func__, buf_num, MM_CAMERA_MAX_NUM_FRAMES);
+ goto end;
+ }
+
+ memset(&bufreq, 0, sizeof(bufreq));
+ bufreq.count = buf_num;
+ bufreq.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
+ bufreq.memory = V4L2_MEMORY_USERPTR;
+ rc = ioctl(stream->fd, VIDIOC_REQBUFS, &bufreq);
+ if (rc < 0) {
+ CDBG("%s: fd=%d, ioctl VIDIOC_REQBUFS failed: rc=%d\n",
+ __func__, stream->fd, rc);
+ goto end;
+ }
+ ALOGV("%s: stream fd=%d, ioctl VIDIOC_REQBUFS: memtype = %d, num_frames = %d, rc=%d\n",
+ __func__, stream->fd, bufreq.memory, bufreq.count, rc);
+
+end:
+ return rc;
+}
+
+/* This function enqueue existing buffers (not first time allocated buffers from Surface) to kernel */
+static int mm_camera_stream_util_enqueue_buf(mm_camera_obj_t * my_obj,
+ mm_camera_stream_t *stream,
+ mm_camera_buf_def_t *vbuf)
+{
+ int32_t i, rc = MM_CAMERA_OK, j;
+
+ if(vbuf->num > MM_CAMERA_MAX_NUM_FRAMES) {
+ rc = -MM_CAMERA_E_GENERAL;
+ CDBG("%s: buf num %d > max limit %d\n",
+ __func__, vbuf->num, MM_CAMERA_MAX_NUM_FRAMES);
+ goto end;
+ }
+
+ for(i = 0; i < vbuf->num; i++){
+ int idx = vbuf->buf.mp[i].idx;
+ ALOGV("%s: enqueue buf index = %d\n",__func__, idx);
+ if(idx < MM_CAMERA_MAX_NUM_FRAMES) {
+ ALOGV("%s: stream_fd = %d, frame_fd = %d, frame ID = %d, offset = %d\n",
+ __func__, stream->fd, stream->frame.frame[i].frame.fd,
+ idx, stream->frame.frame_offset[idx]);
+ rc = mm_camera_stream_qbuf(my_obj, stream, stream->frame.frame[idx].idx);
+ if (rc < 0) {
+ CDBG("%s: VIDIOC_QBUF rc = %d\n", __func__, rc);
+ goto end;
+ }
+ stream->frame.ref_count[idx] = 0;
+ }
+ }
+ stream->frame.qbuf = 1;
+end:
+ return rc;
+}
+
+static int mm_camera_stream_util_reg_buf(mm_camera_obj_t * my_obj,
+ mm_camera_stream_t *stream,
+ mm_camera_buf_def_t *vbuf)
+{
+ int32_t i, rc = MM_CAMERA_OK, j;
+ int *ret;
+ struct v4l2_requestbuffers bufreq;
+ int image_type;
+ uint8_t num_planes;
+ uint32_t planes[VIDEO_MAX_PLANES];
+
+ if(vbuf->num > MM_CAMERA_MAX_NUM_FRAMES) {
+ rc = -MM_CAMERA_E_GENERAL;
+ CDBG_ERROR("%s: buf num %d > max limit %d\n",
+ __func__, vbuf->num, MM_CAMERA_MAX_NUM_FRAMES);
+ goto end;
+ }
+ switch(stream->stream_type) {
+ case MM_CAMERA_STREAM_PREVIEW:
+ image_type = OUTPUT_TYPE_P;
+ break;
+ case MM_CAMERA_STREAM_SNAPSHOT:
+ case MM_CAMERA_STREAM_RAW:
+ image_type = OUTPUT_TYPE_S;
+ break;
+ case MM_CAMERA_STREAM_THUMBNAIL:
+ image_type = OUTPUT_TYPE_T;
+ break;
+ case MM_CAMERA_STREAM_VIDEO:
+ default:
+ image_type = OUTPUT_TYPE_V;
+ break;
+ }
+ stream->frame.frame_len = mm_camera_get_msm_frame_len(stream->cam_fmt,
+ my_obj->current_mode,
+ stream->fmt.fmt.pix.width,
+ stream->fmt.fmt.pix.height,
+ image_type, &num_planes, planes);
+ if(stream->frame.frame_len == 0) {
+ CDBG_ERROR("%s:incorrect frame size = %d\n", __func__, stream->frame.frame_len);
+ rc = -1;
+ goto end;
+ }
+ stream->frame.num_frame = vbuf->num;
+ bufreq.count = stream->frame.num_frame;
+ bufreq.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
+ bufreq.memory = V4L2_MEMORY_USERPTR;
+ CDBG("%s: calling VIDIOC_REQBUFS - fd=%d, num_buf=%d, type=%d, memory=%d\n",
+ __func__,stream->fd, bufreq.count, bufreq.type, bufreq.memory);
+ rc = ioctl(stream->fd, VIDIOC_REQBUFS, &bufreq);
+ if (rc < 0) {
+ CDBG_ERROR("%s: fd=%d, ioctl VIDIOC_REQBUFS failed: rc=%d\n",
+ __func__, stream->fd, rc);
+ goto end;
+ }
+ CDBG("%s: stream fd=%d, ioctl VIDIOC_REQBUFS: memtype = %d,"
+ "num_frames = %d, rc=%d\n", __func__, stream->fd, bufreq.memory,
+ bufreq.count, rc);
+
+ for(i = 0; i < vbuf->num; i++){
+ vbuf->buf.mp[i].idx = i; /* remember the index to stream frame if first time qbuf */
+ memcpy(&stream->frame.frame[i].frame, &(vbuf->buf.mp[i].frame),
+ sizeof(vbuf->buf.mp[i].frame));
+ stream->frame.frame[i].idx = i;
+ stream->frame.frame[i].num_planes = vbuf->buf.mp[i].num_planes;
+ for(j = 0; j < vbuf->buf.mp[i].num_planes; j++) {
+ stream->frame.frame[i].planes[j] = vbuf->buf.mp[i].planes[j];
+ }
+
+ if(vbuf->buf.mp[i].frame_offset) {
+ stream->frame.frame_offset[i] = vbuf->buf.mp[i].frame_offset;
+ } else {
+ stream->frame.frame_offset[i] = 0;
+ }
+
+ rc = mm_camera_stream_qbuf(my_obj, stream, stream->frame.frame[i].idx);
+ if (rc < 0) {
+ CDBG_ERROR("%s: VIDIOC_QBUF rc = %d\n", __func__, rc);
+ goto end;
+ }
+ stream->frame.ref_count[i] = 0;
+ CDBG("%s: stream_fd = %d, frame_fd = %d, frame ID = %d, offset = %d\n",
+ __func__, stream->fd, stream->frame.frame[i].frame.fd,
+ i, stream->frame.frame_offset[i]);
+ }
+ stream->frame.qbuf = 1;
+end:
+ return rc;
+}
+static int mm_camera_stream_util_unreg_buf(mm_camera_obj_t * my_obj,
+ mm_camera_stream_t *stream)
+{
+ struct v4l2_requestbuffers bufreq;
+ int32_t i, rc = MM_CAMERA_OK;
+
+ bufreq.count = 0;
+ bufreq.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
+ bufreq.memory = V4L2_MEMORY_USERPTR;
+ rc = ioctl(stream->fd, VIDIOC_REQBUFS, &bufreq);
+ if (rc < 0) {
+ CDBG_ERROR("%s: fd=%d, VIDIOC_REQBUFS failed, rc=%d\n",
+ __func__, stream->fd, rc);
+ return rc;
+ }
+ mm_stream_frame_flash_q(&stream->frame.readyq);
+ memset(stream->frame.ref_count,0,(stream->frame.num_frame * sizeof(int8_t)));
+ stream->frame.qbuf = 0;
+ CDBG("%s:fd=%d,type=%d,rc=%d\n", __func__, stream->fd,
+ stream->stream_type, rc);
+ return rc;
+}
+
+static int32_t mm_camera_stream_fsm_notused(mm_camera_obj_t * my_obj,
+ mm_camera_stream_t *stream,
+ mm_camera_state_evt_type_t evt, void *val)
+{
+ int32_t rc = 0;
+ char dev_name[MM_CAMERA_DEV_NAME_LEN];
+
+ switch(evt) {
+ case MM_CAMERA_STATE_EVT_ACQUIRE:
+ snprintf(dev_name, sizeof(dev_name), "/dev/%s", mm_camera_util_get_dev_name(my_obj));
+ CDBG("%s: open dev '%s', stream type = %d\n",
+ __func__, dev_name, *((mm_camera_stream_type_t *)val));
+ stream->fd = open(dev_name, O_RDWR | O_NONBLOCK);
+ if(stream->fd <= 0){
+ CDBG("%s: open dev returned %d\n", __func__, stream->fd);
+ return -1;
+ }
+ stream->stream_type = *((mm_camera_stream_type_t *)val);
+ rc = mm_camera_stream_util_set_ext_mode(stream);
+ CDBG("%s: fd=%d, stream type=%d, mm_camera_stream_util_set_ext_mode() err=%d\n",
+ __func__, stream->fd, stream->stream_type, rc);
+ if(rc == MM_CAMERA_OK) {
+ mm_camera_stream_init_frame(&stream->frame);
+ mm_camera_stream_util_set_state(stream, MM_CAMERA_STREAM_STATE_ACQUIRED);
+ } else if(stream->fd > 0) {
+ close(stream->fd);
+ stream->fd = 0;
+ }
+ break;
+ default:
+ CDBG_ERROR("%s: Invalid evt=%d, stream_state=%d", __func__, evt,
+ stream->state);
+ return -1;
+ }
+ return rc;
+}
+
+static int32_t mm_camera_stream_util_proc_fmt(mm_camera_obj_t *my_obj,
+ mm_camera_stream_t *stream,
+ mm_camera_image_fmt_t *fmt)
+{
+ int32_t rc = MM_CAMERA_OK;
+
+ if(fmt->dim.width == 0 || fmt->dim.height == 0) {
+ rc = -MM_CAMERA_E_INVALID_INPUT;
+ CDBG("%s:invalid input[w=%d,h=%d,fmt=%d]\n",
+ __func__, fmt->dim.width, fmt->dim.height, fmt->fmt);
+ goto end;
+ }
+ CDBG("%s: dw=%d,dh=%d,vw=%d,vh=%d,pw=%d,ph=%d,tw=%d,th=%d,raw_w=%d,raw_h=%d,fmt=%d\n",
+ __func__,
+ my_obj->dim.display_width,my_obj->dim.display_height,
+ my_obj->dim.video_width,my_obj->dim.video_height,
+ my_obj->dim.picture_width,my_obj->dim.picture_height,
+ my_obj->dim.ui_thumbnail_width,my_obj->dim.ui_thumbnail_height,
+ my_obj->dim.raw_picture_width,my_obj->dim.raw_picture_height,fmt->fmt);
+ stream->cam_fmt = fmt->fmt;
+ stream->fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
+ stream->fmt.fmt.pix_mp.width = fmt->dim.width;
+ stream->fmt.fmt.pix_mp.height= fmt->dim.height;
+ stream->fmt.fmt.pix_mp.field = V4L2_FIELD_NONE;
+ stream->fmt.fmt.pix_mp.pixelformat =
+ mm_camera_util_get_v4l2_fmt(stream->cam_fmt,
+ &(stream->fmt.fmt.pix_mp.num_planes));
+ rc = ioctl(stream->fd, VIDIOC_S_FMT, &stream->fmt);
+ if (rc < 0) {
+ CDBG("%s: ioctl VIDIOC_S_FMT failed: rc=%d\n", __func__, rc);
+ rc = -MM_CAMERA_E_GENERAL;
+ }
+end:
+ CDBG("%s:fd=%d,type=%d,rc=%d\n",
+ __func__, stream->fd, stream->stream_type, rc);
+ return rc;
+}
+static int32_t mm_camera_stream_fsm_acquired(mm_camera_obj_t * my_obj,
+ mm_camera_stream_t *stream,
+ mm_camera_state_evt_type_t evt, void *val)
+{
+ int32_t rc = 0;
+
+ switch(evt) {
+ case MM_CAMERA_STATE_EVT_SET_FMT:
+ rc = mm_camera_stream_util_proc_fmt(my_obj,stream,
+ (mm_camera_image_fmt_t *)val);
+ if(!rc) mm_camera_stream_util_set_state(stream, MM_CAMERA_STREAM_STATE_CFG);
+ break;
+ case MM_CAMERA_STATE_EVT_RELEASE:
+ mm_camera_stream_release(stream);
+ break;
+ case MM_CAMERA_STATE_EVT_GET_CROP:
+ rc = mm_camera_stream_util_proc_get_crop(my_obj,stream, val);
+ break;
+ default:
+ CDBG_ERROR("%s: Invalid evt=%d, stream_state=%d", __func__, evt,
+ stream->state);
+ return -1;
+ }
+ return rc;
+}
+static int32_t mm_camera_stream_fsm_cfg(mm_camera_obj_t * my_obj,
+ mm_camera_stream_t *stream,
+ mm_camera_state_evt_type_t evt, void *val)
+{
+ int32_t rc = 0;
+ switch(evt) {
+ case MM_CAMERA_STATE_EVT_RELEASE:
+ mm_camera_stream_release(stream);
+ break;
+ case MM_CAMERA_STATE_EVT_SET_FMT:
+ rc = mm_camera_stream_util_proc_fmt(my_obj,stream,
+ (mm_camera_image_fmt_t *)val);
+ break;
+ case MM_CAMERA_STATE_EVT_REG_BUF:
+ rc = mm_camera_stream_util_reg_buf(my_obj, stream, (mm_camera_buf_def_t *)val);
+ if(!rc) mm_camera_stream_util_set_state(stream, MM_CAMERA_STREAM_STATE_REG);
+ break;
+ case MM_CAMERA_STATE_EVT_GET_CROP:
+ rc = mm_camera_stream_util_proc_get_crop(my_obj,stream, val);
+ break;
+ case MM_CAMERA_STATE_EVT_REQUEST_BUF:
+ rc = mm_camera_stream_util_request_buf(my_obj, stream, ((mm_camera_buf_def_t *)val)->num);
+ break;
+ case MM_CAMERA_STATE_EVT_ENQUEUE_BUF:
+ rc = mm_camera_stream_util_enqueue_buf(my_obj, stream, (mm_camera_buf_def_t *)val);
+ if(!rc) mm_camera_stream_util_set_state(stream, MM_CAMERA_STREAM_STATE_REG);
+ break;
+ default:
+ CDBG_ERROR("%s: Invalid evt=%d, stream_state=%d", __func__, evt,
+ stream->state);
+ return -1;
+ }
+ return rc;
+}
+
+int32_t mm_camera_stream_util_buf_done(mm_camera_obj_t * my_obj,
+ mm_camera_stream_t *stream,
+ mm_camera_notify_frame_t *frame)
+{
+ int32_t rc = MM_CAMERA_OK;
+ pthread_mutex_lock(&stream->frame.mutex);
+
+ if(stream->frame.ref_count[frame->idx] == 0) {
+ rc = mm_camera_stream_qbuf(my_obj, stream, frame->idx);
+ CDBG_ERROR("%s: Error Trying to free second time?(idx=%d) count=%d, stream type=%d\n",
+ __func__, frame->idx, stream->frame.ref_count[frame->idx], stream->stream_type);
+ rc = -1;
+ }else{
+ stream->frame.ref_count[frame->idx]--;
+ if(0 == stream->frame.ref_count[frame->idx]) {
+ CDBG("<DEBUG> : Buf done for buffer:%p:%d",stream,frame->idx);
+ rc = mm_camera_stream_qbuf(my_obj, stream, frame->idx);
+ if(rc < 0)
+ CDBG_ERROR("%s: mm_camera_stream_qbuf(idx=%d) err=%d\n",
+ __func__, frame->idx, rc);
+ }else{
+ CDBG("<DEBUG> : Still ref count pending count :%d",stream->frame.ref_count[frame->idx]);
+ CDBG("<DEBUG> : for buffer:%p:%d, stream type=%d",stream,frame->idx, stream->stream_type);
+ }
+ }
+
+#if 0
+ stream->frame.ref_count[frame->idx]--;
+ if(stream->frame.ref_count[frame->idx] == 0) {
+ CDBG("%s: Queue the buffer (idx=%d) count=%d frame id = %d\n",
+ __func__, frame->idx, stream->frame.ref_count[frame->idx],
+ frame->frame->frame_id);
+ rc = mm_camera_stream_qbuf(my_obj, stream, frame->idx);
+ if(rc < 0)
+ CDBG_ERROR("%s: mm_camera_stream_qbuf(idx=%d) err=%d\n", __func__,
+ frame->idx, rc);
+ } else if(stream->frame.ref_count[frame->idx] == 1) {
+ ALOGE("<DEBUG> : Buf done for buffer:%p:%d",stream,frame->idx);
+ rc = mm_camera_stream_qbuf(my_obj, stream, frame->idx);
+ if(rc < 0)
+ CDBG("%s: mm_camera_stream_qbuf(idx=%d) err=%d\n",
+ __func__, frame->idx, rc);
+ } else {
+ CDBG_ERROR("%s: Error Trying to free second time?(idx=%d) count=%d\n",
+ __func__, frame->idx, stream->frame.ref_count[frame->idx]);
+ rc = -1;
+ }
+#endif
+ pthread_mutex_unlock(&stream->frame.mutex);
+ return rc;
+}
+
+static int32_t mm_camera_stream_fsm_reg(mm_camera_obj_t * my_obj,
+ mm_camera_stream_t *stream,
+ mm_camera_state_evt_type_t evt, void *val)
+{
+ int32_t rc = 0;
+ switch(evt) {
+ case MM_CAMERA_STATE_EVT_GET_CROP:
+ rc = mm_camera_stream_util_proc_get_crop(my_obj,stream, val);
+ break;
+ case MM_CAMERA_STATE_EVT_QBUF:
+ break;
+ case MM_CAMERA_STATE_EVT_RELEASE:
+ mm_camera_stream_release(stream);
+ break;
+ case MM_CAMERA_STATE_EVT_UNREG_BUF:
+ rc = mm_camera_stream_util_unreg_buf(my_obj, stream);
+ if(!rc)
+ mm_camera_stream_util_set_state(stream, MM_CAMERA_STREAM_STATE_CFG);
+ break;
+ case MM_CAMERA_STATE_EVT_STREAM_ON:
+ {
+ enum v4l2_buf_type buf_type;
+ int i = 0;
+ mm_camera_frame_t *frame;
+ if(stream->frame.qbuf == 0) {
+ for(i = 0; i < stream->frame.num_frame; i++) {
+ rc = mm_camera_stream_qbuf(my_obj, stream,
+ stream->frame.frame[i].idx);
+ if (rc < 0) {
+ CDBG_ERROR("%s: ioctl VIDIOC_QBUF error=%d, stream->type=%d\n",
+ __func__, rc, stream->stream_type);
+ return rc;
+ }
+ stream->frame.ref_count[i] = 0;
+ }
+ stream->frame.qbuf = 1;
+ }
+ buf_type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
+ CDBG("%s: STREAMON,fd=%d,stream_type=%d\n",
+ __func__, stream->fd, stream->stream_type);
+ rc = ioctl(stream->fd, VIDIOC_STREAMON, &buf_type);
+ if (rc < 0) {
+ CDBG_ERROR("%s: ioctl VIDIOC_STREAMON failed: rc=%d\n",
+ __func__, rc);
+ }
+ else
+ mm_camera_stream_util_set_state(stream, MM_CAMERA_STREAM_STATE_ACTIVE);
+ }
+ break;
+ case MM_CAMERA_STATE_EVT_ENQUEUE_BUF:
+ rc = mm_camera_stream_util_enqueue_buf(my_obj, stream, (mm_camera_buf_def_t *)val);
+ break;
+ default:
+ CDBG_ERROR("%s: Invalid evt=%d, stream_state=%d", __func__, evt,
+ stream->state);
+ return -1;
+ }
+ return rc;
+}
+static int32_t mm_camera_stream_fsm_active(mm_camera_obj_t * my_obj,
+ mm_camera_stream_t *stream,
+ mm_camera_state_evt_type_t evt, void *val)
+{
+ int32_t rc = 0;
+ switch(evt) {
+ case MM_CAMERA_STATE_EVT_GET_CROP:
+ rc = mm_camera_stream_util_proc_get_crop(my_obj,stream, val);
+ break;
+ case MM_CAMERA_STATE_EVT_QBUF:
+ rc = mm_camera_stream_util_buf_done(my_obj, stream,
+ (mm_camera_notify_frame_t *)val);
+ break;
+ case MM_CAMERA_STATE_EVT_RELEASE:
+ mm_camera_stream_release(stream);
+ break;
+ case MM_CAMERA_STATE_EVT_STREAM_OFF:
+ {
+ enum v4l2_buf_type buf_type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
+ CDBG("%s: STREAMOFF,fd=%d,type=%d\n",
+ __func__, stream->fd, stream->stream_type);
+ rc = ioctl(stream->fd, VIDIOC_STREAMOFF, &buf_type);
+ if (rc < 0) {
+ CDBG_ERROR("%s: STREAMOFF failed: %s\n",
+ __func__, strerror(errno));
+ }
+ else {
+ stream->frame.qbuf = 0;
+ mm_camera_stream_util_set_state(stream, MM_CAMERA_STREAM_STATE_REG);
+ }
+ }
+ break;
+ case MM_CAMERA_STATE_EVT_ENQUEUE_BUF:
+ rc = mm_camera_stream_util_enqueue_buf(my_obj, stream, (mm_camera_buf_def_t *)val);
+ break;
+ default:
+ CDBG_ERROR("%s: Invalid evt=%d, stream_state=%d", __func__, evt,
+ stream->state);
+ return -1;
+ }
+ return rc;
+}
+
+typedef int32_t (*mm_camera_stream_fsm_fn_t) (mm_camera_obj_t * my_obj,
+ mm_camera_stream_t *stream,
+ mm_camera_state_evt_type_t evt, void *val);
+
+static mm_camera_stream_fsm_fn_t mm_camera_stream_fsm_fn[MM_CAMERA_STREAM_STATE_MAX] = {
+ mm_camera_stream_fsm_notused,
+ mm_camera_stream_fsm_acquired,
+ mm_camera_stream_fsm_cfg,
+ mm_camera_stream_fsm_reg,
+ mm_camera_stream_fsm_active
+};
+int32_t mm_camera_stream_fsm_fn_vtbl (mm_camera_obj_t * my_obj,
+ mm_camera_stream_t *stream,
+ mm_camera_state_evt_type_t evt, void *val)
+{
+ CDBG("%s: stream fd=%d, type = %d, state=%d, evt\n",
+ __func__, stream->fd, stream->stream_type, stream->state, evt);
+ return mm_camera_stream_fsm_fn[stream->state] (my_obj, stream, evt, val);
+}
+
diff --git a/camera/mm-camera-interface/mm_jpeg_encoder.c b/camera/mm-camera-interface/mm_jpeg_encoder.c
new file mode 100644
index 0000000..9fda928
--- /dev/null
+++ b/camera/mm-camera-interface/mm_jpeg_encoder.c
@@ -0,0 +1,717 @@
+/*
+Copyright (c) 2011-2012, The Linux Foundation. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following
+ disclaimer in the documentation and/or other materials provided
+ with the distribution.
+ * Neither the name of The Linux Foundation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+
+#include <sys/time.h>
+#include <pthread.h>
+#include <semaphore.h>
+#include <errno.h>
+#include "mm_jpeg_encoder.h"
+#include "mm_camera_dbg.h"
+#include <sys/system_properties.h>
+#include "mm_camera_interface2.h"
+
+#ifdef JPG_DBG
+#undef CDBG
+ #ifdef _ANDROID_
+ #undef LOG_NIDEBUG
+ #undef LOG_TAG
+ #define LOG_NIDEBUG 0
+ #define LOG_TAG "mm-camera jpeg"
+ #include <utils/Log.h>
+ #define CDBG(fmt, args...) ALOGV(fmt, ##args)
+ #endif
+#endif
+
+#define JPEG_DEFAULT_MAINIMAGE_QUALITY 75
+#define JPEG_DEFAULT_THUMBNAIL_QUALITY 75
+
+int is_encoding = 0;
+pthread_mutex_t jpege_mutex = PTHREAD_MUTEX_INITIALIZER;
+pthread_mutex_t jpegcb_mutex = PTHREAD_MUTEX_INITIALIZER;
+
+int rc;
+jpege_src_t jpege_source;
+jpege_dst_t jpege_dest;
+jpege_cfg_t jpege_config;
+jpege_img_data_t main_img_info, tn_img_info;
+jpeg_buffer_t temp;
+jpege_obj_t jpeg_encoder;
+exif_info_obj_t exif_info;
+exif_tag_entry_t sample_tag;
+struct timeval tdBefore, tdAfter;
+struct timezone tz;
+static uint32_t jpegMainimageQuality = JPEG_DEFAULT_MAINIMAGE_QUALITY;
+static uint32_t jpegThumbnailQuality = JPEG_DEFAULT_THUMBNAIL_QUALITY;
+static uint32_t jpegRotation = 0;
+static int8_t usethumbnail = 1;
+static int8_t use_thumbnail_padding = 0;
+#ifdef HW_ENCODE
+static uint8_t hw_encode = true;
+#else
+static uint8_t hw_encode = false;
+#endif
+static int8_t is_3dmode = 0;
+static cam_3d_frame_format_t img_format_3d;
+jpegfragment_callback_t mmcamera_jpegfragment_callback = NULL;
+jpeg_callback_t mmcamera_jpeg_callback = NULL;
+
+void* user_data = NULL;
+#define JPEGE_FRAGMENT_SIZE (64*1024)
+
+/*===========================================================================
+FUNCTION jpege_event_handler
+
+DESCRIPTION Handler function for jpeg encoder events
+===========================================================================*/
+inline void jpege_use_thumb_padding(uint8_t a_use_thumb_padding)
+{
+ use_thumbnail_padding = a_use_thumb_padding;
+}
+
+void mm_jpeg_encoder_cancel()
+{
+ pthread_mutex_lock(&jpegcb_mutex);
+ mmcamera_jpegfragment_callback = NULL;
+ mmcamera_jpeg_callback = NULL;
+ user_data = NULL;
+ pthread_mutex_unlock(&jpegcb_mutex);
+ mm_jpeg_encoder_join();
+}
+
+void set_callbacks(
+ jpegfragment_callback_t fragcallback,
+ jpeg_callback_t eventcallback,
+ void* userdata
+
+){
+ pthread_mutex_lock(&jpegcb_mutex);
+ mmcamera_jpegfragment_callback = fragcallback;
+ mmcamera_jpeg_callback = eventcallback;
+ user_data = userdata;
+ pthread_mutex_unlock(&jpegcb_mutex);
+}
+
+/*===========================================================================
+FUNCTION jpege_event_handler
+
+DESCRIPTION Handler function for jpeg encoder events
+===========================================================================*/
+void mm_jpege_event_handler(void *p_user_data, jpeg_event_t event, void *p_arg)
+{
+ uint32_t buf_size;
+ uint8_t *buf_ptr = NULL;
+ int mainimg_fd, thumbnail_fd;
+
+ if (event == JPEG_EVENT_DONE) {
+
+ jpeg_buffer_t thumbnail_buffer, snapshot_buffer;
+
+ thumbnail_buffer = tn_img_info.p_fragments[0].color.yuv.luma_buf;
+ thumbnail_fd = jpeg_buffer_get_pmem_fd(thumbnail_buffer);
+ jpeg_buffer_get_actual_size(thumbnail_buffer, &buf_size);
+ jpeg_buffer_get_addr(thumbnail_buffer, &buf_ptr);
+
+ snapshot_buffer = main_img_info.p_fragments[0].color.yuv.luma_buf;
+ mainimg_fd = jpeg_buffer_get_pmem_fd(snapshot_buffer);
+ jpeg_buffer_get_actual_size(snapshot_buffer, &buf_size);
+ jpeg_buffer_get_addr(snapshot_buffer, &buf_ptr);
+
+#if 0
+ gettimeofday(&tdAfter, &tz);
+ CDBG("Profiling: JPEG encoding latency %ld microseconds\n",
+ 1000000 * (tdAfter.tv_sec - tdBefore.tv_sec) + tdAfter.tv_usec -
+ tdBefore.tv_usec);
+#endif
+// mmcamera_util_profile("encoder done");
+ }
+
+ if(mmcamera_jpeg_callback)
+ mmcamera_jpeg_callback(event, user_data);
+}
+
+/*===========================================================================
+FUNCTION jpege_output_produced_handler
+
+DESCRIPTION Handler function for when jpeg encoder has output produced
+===========================================================================*/
+void mm_jpege_output_produced_handler(void *p_user_data, void *p_arg,
+ jpeg_buffer_t buffer)
+{
+ uint32_t buf_size;
+ uint8_t *buf_ptr;
+
+ /* The mutex is to prevent the very rare case where the file writing is */
+ /* so slow that the next ping-pong output is delivered before the */
+ /* current one is finished writing, in which case the writing of the new */
+ /* buffer will be performed after the first one finishes (because of the lock) */
+
+ jpeg_buffer_get_actual_size(buffer, &buf_size);
+ jpeg_buffer_get_addr(buffer, &buf_ptr);
+
+ pthread_mutex_lock(&jpegcb_mutex);
+ if(mmcamera_jpegfragment_callback)
+ mmcamera_jpegfragment_callback(buf_ptr, buf_size, user_data);
+ pthread_mutex_unlock(&jpegcb_mutex);
+}
+
+#if !defined(_TARGET_7x2x_) && !defined(_TARGET_7x27A_)
+/*===========================================================================
+FUNCTION jpege_output_produced_handler2
+
+DESCRIPTION Handler function for when jpeg encoder has output produced
+===========================================================================*/
+int mm_jpege_output_produced_handler2(void *p_user_data, void *p_arg,
+ jpeg_buffer_t buffer, uint8_t last_buf_flag)
+{
+ uint32_t buf_size;
+ uint8_t *buf_ptr;
+ int rv;
+
+ /* The mutex is to prevent the very rare case where the file writing is */
+ /* so slow that the next ping-pong output is delivered before the */
+ /* current one is finished writing, in which case the writing of the new */
+ /* buffer will be performed after the first one finishes (because of the lock) */
+ jpeg_buffer_get_actual_size(buffer, &buf_size);
+ jpeg_buffer_get_addr(buffer, &buf_ptr);
+
+ pthread_mutex_lock(&jpegcb_mutex);
+ if(mmcamera_jpegfragment_callback)
+ mmcamera_jpegfragment_callback(buf_ptr, buf_size, user_data);
+ pthread_mutex_unlock(&jpegcb_mutex);
+
+ rv = jpeg_buffer_set_actual_size(buffer, 0);
+ if(rv == JPEGERR_SUCCESS){
+ rv = jpege_enqueue_output_buffer(
+ jpeg_encoder,
+ &buffer, 1);
+ }
+ return rv;
+}
+#endif
+
+static int jpeg_encoder_initialized = 0;
+
+void mm_jpeg_encoder_set_3D_info(cam_3d_frame_format_t format)
+{
+ pthread_mutex_lock(&jpege_mutex);
+ is_3dmode = 1;
+ img_format_3d = format;
+ pthread_mutex_unlock(&jpege_mutex);
+}
+
+extern int8_t mm_jpeg_encoder_init()
+{
+ pthread_mutex_lock(&jpege_mutex);
+ is_3dmode = 0;
+ /* Initialize jpeg encoder */
+ rc = jpege_init(&jpeg_encoder, mm_jpege_event_handler, NULL);
+ if (rc) {
+ //CDBG("jpege_init failed: %d\n", rc);
+ pthread_mutex_unlock(&jpege_mutex);
+ return FALSE;
+ }
+
+ jpeg_encoder_initialized = 1;
+ pthread_mutex_unlock(&jpege_mutex);
+
+ return TRUE;
+}
+
+void mm_jpeg_encoder_join(void)
+{
+ pthread_mutex_lock(&jpege_mutex);
+ if (jpeg_encoder_initialized) {
+ jpeg_encoder_initialized = 0;
+ pthread_mutex_destroy(&jpege_mutex);
+ jpege_abort(jpeg_encoder);
+ jpeg_buffer_destroy(&temp);
+ if (usethumbnail) {
+ jpeg_buffer_destroy(&tn_img_info.p_fragments[0].color.yuv.luma_buf);
+ jpeg_buffer_destroy(&tn_img_info.p_fragments[0].color.yuv.chroma_buf);
+ }
+ jpeg_buffer_destroy(&main_img_info.p_fragments[0].color.yuv.luma_buf);
+ jpeg_buffer_destroy(&main_img_info.p_fragments[0].color.yuv.chroma_buf);
+ jpeg_buffer_destroy(&jpege_dest.buffers[0]);
+ jpeg_buffer_destroy(&jpege_dest.buffers[1]);
+ exif_destroy(&exif_info);
+ jpege_destroy(&jpeg_encoder);
+ }
+ is_3dmode = 0;
+ pthread_mutex_unlock(&jpege_mutex);
+}
+/* This function returns the Yoffset and CbCr offset requirements for the Jpeg encoding*/
+int8_t mm_jpeg_encoder_get_buffer_offset(uint32_t width, uint32_t height,
+ uint32_t* p_y_offset, uint32_t* p_cbcr_offset,
+ uint32_t* p_buf_size, uint8_t *num_planes,
+ uint32_t planes[])
+{
+ CDBG("jpeg_encoder_get_buffer_offset");
+ if ((NULL == p_y_offset) || (NULL == p_cbcr_offset)) {
+ return FALSE;
+ }
+ /* Hardcode num planes and planes array for now. TBD Check if this
+ * needs to be set based on format. */
+ *num_planes = 2;
+ if (hw_encode) {
+ int cbcr_offset = 0;
+ uint32_t actual_size = width*height;
+ uint32_t padded_size = width * CEILING16(height);
+ *p_y_offset = 0;
+ *p_cbcr_offset = 0;
+ if ((jpegRotation == 90) || (jpegRotation == 180)) {
+ *p_y_offset = padded_size - actual_size;
+ *p_cbcr_offset = ((padded_size - actual_size) >> 1);
+ }
+ *p_buf_size = padded_size * 3/2;
+ planes[0] = width * CEILING16(height);
+ planes[1] = width * CEILING16(height)/2;
+ } else {
+ *p_y_offset = 0;
+ *p_cbcr_offset = PAD_TO_WORD(width*CEILING16(height));
+ *p_buf_size = *p_cbcr_offset * 3/2;
+ planes[0] = PAD_TO_WORD(width*CEILING16(height));
+ planes[1] = PAD_TO_WORD(width*CEILING16(height)/2);
+ }
+ return TRUE;
+}
+
+int8_t mm_jpeg_encoder_encode(const cam_ctrl_dimension_t * dimension,
+ const uint8_t * thumbnail_buf,
+ int thumbnail_fd, uint32_t thumbnail_offset,
+ const uint8_t * snapshot_buf,
+ int snapshot_fd,
+ uint32_t snapshot_offset,
+ common_crop_t *scaling_params,
+ exif_tags_info_t *exif_data,
+ int exif_numEntries,
+ const int32_t a_cbcroffset,
+ cam_point_t* main_crop_offset,
+ cam_point_t* thumb_crop_offset)
+{
+ int buf_size = 0;
+ int ret = 0;
+ int i = 0;
+ int cbcroffset = 0;
+ int actual_size = 0, padded_size = 0;
+ usethumbnail = thumbnail_buf ? 1 : 0;
+ int w_scale_factor = (is_3dmode && img_format_3d == SIDE_BY_SIDE_FULL) ? 2 : 1;
+
+ pthread_mutex_lock(&jpege_mutex);
+ //mmcamera_util_profile("encoder configure");
+
+ /* Do not allow snapshot if the previous one is not done */
+ /* Alternately we can queue the snapshot to be done after the one in progress is completed, */
+ /* but it involves more complex logic */
+ if (is_encoding) {
+ CDBG("Previous Jpeg Encoding is not done!\n");
+ pthread_mutex_unlock(&jpege_mutex);
+ return FALSE;
+ }
+ CDBG("jpeg_encoder_encode: thumbnail_fd = %d snapshot_fd = %d usethumbnail %d\n",
+ thumbnail_fd, snapshot_fd, usethumbnail);
+
+ gettimeofday(&tdBefore, &tz);
+ /* Initialize exif info */
+ exif_init(&exif_info);
+ /* Zero out supporting structures */
+ memset(&main_img_info, 0, sizeof(jpege_img_data_t));
+ memset(&tn_img_info, 0, sizeof(jpege_img_data_t));
+ memset(&jpege_source, 0, sizeof(jpege_src_t));
+ memset(&jpege_dest, 0, sizeof(jpege_dst_t));
+
+ /* Initialize JPEG buffers */
+ jpege_dest.buffer_cnt = 2;
+ if ((rc = jpeg_buffer_init(&temp)) ||
+ (usethumbnail && (rc = jpeg_buffer_init(&tn_img_info.p_fragments[0].color.yuv.luma_buf))) ||
+ (usethumbnail && (rc = jpeg_buffer_init(&tn_img_info.p_fragments[0].color.yuv.chroma_buf))) ||
+ (rc = jpeg_buffer_init(&main_img_info.p_fragments[0].color.yuv.luma_buf)) ||
+ (rc = jpeg_buffer_init(&main_img_info.p_fragments[0].color.yuv.chroma_buf))
+ || (rc = jpeg_buffer_init(&jpege_dest.buffers[0]))
+ || (rc = jpeg_buffer_init(&jpege_dest.buffers[1]))) {
+ CDBG_ERROR("jpeg_buffer_init failed: %d\n", rc);
+ pthread_mutex_unlock(&jpege_mutex);
+ jpege_dest.buffer_cnt = 0;
+ return FALSE;
+ }
+#if !defined(_TARGET_7x2x_) && !defined(_TARGET_7x27A_)
+ jpege_dest.p_buffer = &jpege_dest.buffers[0];
+#endif
+
+#if defined(_TARGET_7x27A_)
+ /* Allocate 2 ping-pong buffers on the heap for jpeg encoder outputs */
+ if ((rc = jpeg_buffer_allocate(jpege_dest.buffers[0], JPEGE_FRAGMENT_SIZE, 1)) ||
+ (rc = jpeg_buffer_allocate(jpege_dest.buffers[1], JPEGE_FRAGMENT_SIZE, 1))) {
+ CDBG("jpeg_buffer_allocate failed: %d\n", rc);
+ pthread_mutex_unlock(&jpege_mutex);
+ return FALSE;
+ }
+#else
+ /* Allocate 2 ping-pong buffers on the heap for jpeg encoder outputs */
+ if ((rc = jpeg_buffer_allocate(jpege_dest.buffers[0], JPEGE_FRAGMENT_SIZE, 0)) ||
+ (rc = jpeg_buffer_allocate(jpege_dest.buffers[1], JPEGE_FRAGMENT_SIZE, 0))) {
+ CDBG("jpeg_buffer_allocate failed: %d\n", rc);
+ pthread_mutex_unlock(&jpege_mutex);
+ return FALSE;
+ }
+#endif
+
+
+ if (usethumbnail) {
+ tn_img_info.width = dimension->thumbnail_width * w_scale_factor;
+ tn_img_info.height = dimension->thumbnail_height;
+ buf_size = tn_img_info.width * tn_img_info.height * 2;
+ tn_img_info.fragment_cnt = 1;
+ tn_img_info.color_format = YCRCBLP_H2V2;
+ tn_img_info.p_fragments[0].width = tn_img_info.width;
+ tn_img_info.p_fragments[0].height = CEILING16(dimension->thumbnail_height);
+ jpeg_buffer_reset(tn_img_info.p_fragments[0].color.yuv.luma_buf);
+ jpeg_buffer_reset(tn_img_info.p_fragments[0].color.yuv.chroma_buf);
+
+ CDBG("%s: Thumbnail: fd: %d offset: %d Main: fd: %d offset: %d", __func__,
+ thumbnail_fd, thumbnail_offset, snapshot_fd, snapshot_offset);
+ rc = jpeg_buffer_use_external_buffer(
+ tn_img_info.p_fragments[0].color.yuv.luma_buf,
+ (uint8_t *)thumbnail_buf, buf_size,
+ thumbnail_fd);
+
+ if (rc == JPEGERR_EFAILED) {
+ CDBG_ERROR("jpeg_buffer_use_external_buffer Thumbnail pmem failed...\n");
+ pthread_mutex_unlock(&jpege_mutex);
+ return FALSE;
+ }
+
+ cbcroffset = PAD_TO_WORD(tn_img_info.width * tn_img_info.height);
+ if (hw_encode) {
+ actual_size = dimension->thumbnail_width * dimension->thumbnail_height;
+ padded_size = dimension->thumbnail_width *
+ CEILING16(dimension->thumbnail_height);
+ cbcroffset = padded_size;
+ }
+
+ // The chroma plane in YUV4:2:0 semiplanar is at the end of the luma plane,
+ // so we attach the chroma buf to the luma buffer, which we've allocated to
+ // be large enough to hold the entire YUV image.
+ //
+ jpeg_buffer_attach_existing(tn_img_info.p_fragments[0].color.yuv.chroma_buf,
+ tn_img_info.p_fragments[0].color.yuv.luma_buf,
+ cbcroffset);
+ jpeg_buffer_set_actual_size(tn_img_info.p_fragments[0].color.yuv.luma_buf,
+ tn_img_info.width * tn_img_info.height);
+ jpeg_buffer_set_actual_size(
+ tn_img_info.p_fragments[0].color.yuv.chroma_buf, tn_img_info.width *
+ tn_img_info.height / 2);
+
+ if (hw_encode) {
+ if ((jpegRotation == 90) || (jpegRotation == 180)) {
+ jpeg_buffer_set_start_offset(tn_img_info.p_fragments[0].color.yuv.luma_buf, (padded_size - actual_size));
+ jpeg_buffer_set_start_offset(tn_img_info.p_fragments[0].color.yuv.chroma_buf, ((padded_size - actual_size) >> 1));
+ }
+ }
+ }
+
+ /* Set phy offset */
+ jpeg_buffer_set_phy_offset(tn_img_info.p_fragments[0].color.yuv.luma_buf, thumbnail_offset);
+
+ CDBG("jpeg_encoder_encode size %dx%d\n",dimension->orig_picture_dx,dimension->orig_picture_dy);
+ main_img_info.width = dimension->orig_picture_dx * w_scale_factor;
+ main_img_info.height = dimension->orig_picture_dy;
+ buf_size = main_img_info.width * main_img_info.height * 2;
+ main_img_info.fragment_cnt = 1;
+ main_img_info.color_format = YCRCBLP_H2V2;
+ main_img_info.p_fragments[0].width = main_img_info.width;
+ main_img_info.p_fragments[0].height = CEILING16(main_img_info.height);
+ jpeg_buffer_reset(main_img_info.p_fragments[0].color.yuv.luma_buf);
+ jpeg_buffer_reset(main_img_info.p_fragments[0].color.yuv.chroma_buf);
+
+ rc =
+ jpeg_buffer_use_external_buffer(
+ main_img_info.p_fragments[0].color.yuv.luma_buf,
+ (uint8_t *)snapshot_buf, buf_size,
+ snapshot_fd);
+
+ if (rc == JPEGERR_EFAILED) {
+ CDBG("jpeg_buffer_use_external_buffer Snapshot pmem failed...\n");
+ pthread_mutex_unlock(&jpege_mutex);
+ return FALSE;
+ }
+ cbcroffset = PAD_TO_WORD(main_img_info.width * CEILING16(main_img_info.height));
+ actual_size = 0;
+ padded_size = 0;
+ if (a_cbcroffset >= 0) {
+ cbcroffset = a_cbcroffset;
+ } else {
+ if (hw_encode) {
+ actual_size = dimension->orig_picture_dx * dimension->orig_picture_dy;
+ padded_size = dimension->orig_picture_dx * CEILING16(dimension->orig_picture_dy);
+ cbcroffset = padded_size;
+ }
+ }
+
+ CDBG("jpeg_encoder_encode: cbcroffset %d",cbcroffset);
+ jpeg_buffer_attach_existing(main_img_info.p_fragments[0].color.yuv.chroma_buf,
+ main_img_info.p_fragments[0].color.yuv.luma_buf,
+ cbcroffset);
+ jpeg_buffer_set_actual_size(main_img_info.p_fragments[0].color.yuv.luma_buf, main_img_info.width * main_img_info.height);
+ jpeg_buffer_set_actual_size(main_img_info.p_fragments[0].color.yuv.chroma_buf, main_img_info.width * main_img_info.height / 2);
+
+ if (hw_encode) {
+ if ((jpegRotation == 90) || (jpegRotation == 180)) {
+ jpeg_buffer_set_start_offset(main_img_info.p_fragments[0].color.yuv.luma_buf, (padded_size - actual_size));
+ jpeg_buffer_set_start_offset(main_img_info.p_fragments[0].color.yuv.chroma_buf, ((padded_size - actual_size) >> 1));
+ }
+ }
+
+ jpeg_buffer_set_phy_offset(main_img_info.p_fragments[0].color.yuv.luma_buf, snapshot_offset);
+
+ /* Set Source */
+ jpege_source.p_main = &main_img_info;
+ if (usethumbnail) {
+ jpege_source.p_thumbnail = &tn_img_info;
+ CDBG("fragment_cnt: thumb %d \n", jpege_source.p_thumbnail->fragment_cnt);
+ }
+
+ CDBG("fragment_cnt: main %d \n", jpege_source.p_main->fragment_cnt);
+
+ rc = jpege_set_source(jpeg_encoder, &jpege_source);
+ if (rc) {
+ CDBG("jpege_set_source failed: %d\n", rc);
+ pthread_mutex_unlock(&jpege_mutex);
+ return FALSE;
+ }
+
+#if defined(_TARGET_7x2x_) || defined(_TARGET_7x27A_)
+ jpege_dest.p_output_handler = (jpege_output_handler_t) mm_jpege_output_produced_handler;
+#else
+ jpege_dest.p_output_handler = mm_jpege_output_produced_handler2;
+#endif
+
+ jpege_dest.buffer_cnt = 2;
+ rc = jpege_set_destination(jpeg_encoder, &jpege_dest);
+ if (rc) {
+ CDBG("jpege_set_desination failed: %d\n", rc);
+ pthread_mutex_unlock(&jpege_mutex);
+ return FALSE;
+ }
+ /* Get default configuration */
+ jpege_get_default_config(&jpege_config);
+ jpege_config.thumbnail_present = usethumbnail;
+ if(hw_encode)
+ jpege_config.preference = JPEG_ENCODER_PREF_HW_ACCELERATED_PREFERRED;
+ else
+ jpege_config.preference = JPEG_ENCODER_PREF_SOFTWARE_ONLY;
+
+ CDBG("%s: preference %d ", __func__, jpege_config.preference);
+ jpege_config.main_cfg.quality = jpegMainimageQuality;
+ jpege_config.thumbnail_cfg.quality = jpegThumbnailQuality;
+
+ CDBG("Scaling params thumb in1_w %d in1_h %d out1_w %d out1_h %d "
+ "main_img in2_w %d in2_h %d out2_w %d out2_h %d\n",
+ scaling_params->in1_w, scaling_params->in1_h,
+ scaling_params->out1_w, scaling_params->out1_h,
+ scaling_params->in2_w, scaling_params->in2_h,
+ scaling_params->out2_w, scaling_params->out2_h);
+
+ if(scaling_params->in2_w && scaling_params->in2_h) {
+
+ if(jpegRotation)
+ jpege_config.preference = JPEG_ENCODER_PREF_SOFTWARE_ONLY;
+
+ /* Scaler information for main image */
+ jpege_config.main_cfg.scale_cfg.enable = TRUE;
+
+ jpege_config.main_cfg.scale_cfg.input_width = CEILING2(scaling_params->in2_w);
+ jpege_config.main_cfg.scale_cfg.input_height = CEILING2(scaling_params->in2_h);
+
+ if (main_crop_offset) {
+ jpege_config.main_cfg.scale_cfg.h_offset = main_crop_offset->x;
+ jpege_config.main_cfg.scale_cfg.v_offset = main_crop_offset->y;
+ } else {
+ jpege_config.main_cfg.scale_cfg.h_offset = 0;
+ jpege_config.main_cfg.scale_cfg.v_offset = 0;
+ }
+
+ jpege_config.main_cfg.scale_cfg.output_width = scaling_params->out2_w;
+ jpege_config.main_cfg.scale_cfg.output_height = scaling_params->out2_h;
+ } else {
+ CDBG("There is no scaling information for JPEG main image scaling.");
+ }
+
+ if(scaling_params->in1_w && scaling_params->in1_h) {
+ /* Scaler information for thumbnail */
+ jpege_config.thumbnail_cfg.scale_cfg.enable = TRUE;
+
+ jpege_config.thumbnail_cfg.scale_cfg.input_width = CEILING2(scaling_params->in1_w);
+ jpege_config.thumbnail_cfg.scale_cfg.input_height = CEILING2(scaling_params->in1_h);
+
+ if (thumb_crop_offset) {
+ jpege_config.thumbnail_cfg.scale_cfg.h_offset = thumb_crop_offset->x;
+ jpege_config.thumbnail_cfg.scale_cfg.v_offset = thumb_crop_offset->y;
+ } else {
+ jpege_config.thumbnail_cfg.scale_cfg.h_offset = 0;
+ jpege_config.thumbnail_cfg.scale_cfg.v_offset = 0;
+ }
+
+ jpege_config.thumbnail_cfg.scale_cfg.output_width = scaling_params->out1_w;
+ jpege_config.thumbnail_cfg.scale_cfg.output_height = scaling_params->out1_h;
+ } else {
+ CDBG("There is no scaling information for JPEG thumbnail upscaling.");
+ }
+
+ /* Set rotation based on the mode selected */
+ CDBG(" Setting Jpeg Rotation mode to %d ", jpegRotation );
+ jpege_config.main_cfg.rotation_degree_clk = jpegRotation;
+ jpege_config.thumbnail_cfg.rotation_degree_clk = jpegRotation;
+
+ if( exif_data != NULL) {
+ for(i = 0; i < exif_numEntries; i++) {
+ rc = exif_set_tag(exif_info, exif_data[i].tag_id,
+ &(exif_data[i].tag_entry));
+ if (rc) {
+ CDBG("exif_set_tag failed: %d\n", rc);
+ pthread_mutex_unlock(&jpege_mutex);
+ return FALSE;
+ }
+ }
+ }
+
+#if 0 /* Enable when JPS/MPO is ready */
+ /* 3D config */
+ CDBG("%s: is_3dmode %d ", __func__, is_3dmode );
+ if (is_3dmode) {
+ jps_cfg_3d_t cfg_3d;
+ if (jpege_config.main_cfg.scale_cfg.enable ||
+ (jpege_config.main_cfg.rotation_degree_clk > 0)) {
+ CDBG("%s: img_format_3d %d ", __func__, img_format_3d );
+ return FALSE;
+ }
+ CDBG("%s: img_format_3d %d ", __func__, img_format_3d );
+
+ switch (img_format_3d) {
+ case TOP_DOWN_HALF:
+ cfg_3d.layout = OVER_UNDER;
+ cfg_3d.width_flag = FULL_WIDTH;
+ cfg_3d.height_flag = HALF_HEIGHT;
+ cfg_3d.field_order = LEFT_FIELD_FIRST;
+ cfg_3d.separation = 0;
+ break;
+ case TOP_DOWN_FULL:
+ cfg_3d.layout = OVER_UNDER;
+ cfg_3d.width_flag = FULL_WIDTH;
+ cfg_3d.height_flag = FULL_HEIGHT;
+ cfg_3d.field_order = LEFT_FIELD_FIRST;
+ cfg_3d.separation = 0;
+ break;
+ case SIDE_BY_SIDE_HALF:
+ cfg_3d.layout = SIDE_BY_SIDE;
+ cfg_3d.width_flag = HALF_WIDTH;
+ cfg_3d.height_flag = FULL_HEIGHT;
+ cfg_3d.field_order = LEFT_FIELD_FIRST;
+ cfg_3d.separation = 0;
+ break;
+ default:
+ case SIDE_BY_SIDE_FULL:
+ cfg_3d.layout = SIDE_BY_SIDE;
+ cfg_3d.width_flag = FULL_WIDTH;
+ cfg_3d.height_flag = FULL_HEIGHT;
+ cfg_3d.field_order = LEFT_FIELD_FIRST;
+ cfg_3d.separation = 0;
+ break;
+ }
+
+ rc = jpse_config_3d(jpeg_encoder, cfg_3d);
+ if (rc) {
+ CDBG_ERROR("%s: jpse_config_3d failed: %d\n", __func__, rc);
+ pthread_mutex_unlock(&jpege_mutex);
+ return FALSE;
+ }
+ }
+#endif
+
+ /* Start encoder */
+/*
+ if( jpege_config.main_cfg.scale_cfg.enable) {
+ mmcamera_util_profile("SW encoder starting encoding");
+ } else {
+ mmcamera_util_profile("HW encoder starting encoding");
+ }
+*/
+ rc = jpege_start(jpeg_encoder, &jpege_config, &exif_info);
+ if (rc) {
+ CDBG("jpege_start failed: %d\n", rc);
+ pthread_mutex_unlock(&jpege_mutex);
+ return FALSE;
+ }
+
+ pthread_mutex_unlock(&jpege_mutex);
+ return TRUE;
+}
+
+int8_t mm_jpeg_encoder_setMainImageQuality(uint32_t quality)
+{
+ pthread_mutex_lock(&jpege_mutex);
+ CDBG(" jpeg_encoder_setMainImageQuality current main inage quality %d ," \
+ " new quality : %d\n", jpegMainimageQuality, quality);
+ if (quality <= 100)
+ jpegMainimageQuality = quality;
+ pthread_mutex_unlock(&jpege_mutex);
+ return TRUE;
+}
+
+int8_t mm_jpeg_encoder_setThumbnailQuality(uint32_t quality)
+{
+ pthread_mutex_lock(&jpege_mutex);
+ CDBG(" jpeg_encoder_setThumbnailQuality current thumbnail quality %d ," \
+ " new quality : %d\n", jpegThumbnailQuality, quality);
+ if (quality <= 100)
+ jpegThumbnailQuality = quality;
+ pthread_mutex_unlock(&jpege_mutex);
+ return TRUE;
+}
+
+int8_t mm_jpeg_encoder_setRotation(int rotation)
+{
+ pthread_mutex_lock(&jpege_mutex);
+ /* Set rotation configuration */
+ switch(rotation)
+ {
+ case 0:
+ case 90:
+ case 180:
+ case 270:
+ jpegRotation = rotation;
+ break;
+ default:
+ /* Invalid rotation mode, set to default */
+ CDBG(" Setting Default rotation mode ");
+ jpegRotation = 0;
+ break;
+ }
+ pthread_mutex_unlock(&jpege_mutex);
+ return TRUE;
+}
diff --git a/camera/mm-camera-interface/mm_jpeg_encoder.h b/camera/mm-camera-interface/mm_jpeg_encoder.h
new file mode 100644
index 0000000..cca0f66
--- /dev/null
+++ b/camera/mm-camera-interface/mm_jpeg_encoder.h
@@ -0,0 +1,75 @@
+/*
+Copyright (c) 2011-2012, The Linux Foundation. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+ * Redistributions of source code must retain the above copyright
+ notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+ copyright notice, this list of conditions and the following
+ disclaimer in the documentation and/or other materials provided
+ with the distribution.
+ * Neither the name of The Linux Foundation nor the names of its
+ contributors may be used to endorse or promote products derived
+ from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+*/
+
+#ifndef MM_JPEG_ENCODER_H
+#define MM_JPEG_ENCODER_H
+#include <linux/msm_ion.h>
+#include "jpege.h"
+#include "exif.h"
+#include "camera_defs_i.h"
+
+extern void mm_jpege_event_handler(void*, jpeg_event_t event, void *p_arg);
+
+extern void mm_jpege_output_produced_handler(void*, void *, jpeg_buffer_t);
+extern int mm_jpege_output_produced_handler2(void*, void *, jpeg_buffer_t, uint8_t);
+
+int8_t mm_jpeg_encoder_init(void);
+extern int8_t mm_jpeg_encoder_encode(const cam_ctrl_dimension_t * dimension,
+ const uint8_t * thumbnail_buf,
+ int thumbnail_fd, uint32_t thumbnail_offset,
+ const uint8_t * snapshot_buf,
+ int snapshot_fd,
+ uint32_t snapshot_offset,
+ common_crop_t *crop,
+ exif_tags_info_t *exif_data,
+ int exif_numEntries,
+ const int32_t a_cbcroffset,
+ cam_point_t* main_crop_offset,
+ cam_point_t* thumb_crop_offset);
+
+extern int8_t mm_jpeg_encoder_setMainImageQuality(uint32_t quality);
+extern int8_t mm_jpeg_encoder_setThumbnailQuality(uint32_t quality);
+extern int8_t mm_jpeg_encoder_setRotation(int rotation);
+extern void mm_jpeg_encoder_join(void);
+extern int8_t mm_jpeg_encoder_get_buffer_offset(uint32_t width, uint32_t height, uint32_t* p_y_offset,
+ uint32_t* p_cbcr_offset, uint32_t* p_buf_size, uint8_t *num_planes, uint32_t planes[]);
+extern void mm_jpeg_encoder_set_3D_info(cam_3d_frame_format_t format);
+typedef void (*jpegfragment_callback_t)(uint8_t * buff_ptr,
+ uint32_t buff_size,
+ void* user_data);
+typedef void (*jpeg_callback_t)(jpeg_event_t, void *);
+
+extern void set_callbacks(
+ jpegfragment_callback_t fragcallback,
+ jpeg_callback_t eventcallback,
+ void* userdata
+);
+
+extern void mm_jpeg_encoder_cancel();
+#endif //MMCAMERA_JPEG_ENCODER_H
diff --git a/camera/mm-camera-interface/mm_omx_jpeg_encoder.c b/camera/mm-camera-interface/mm_omx_jpeg_encoder.c
new file mode 100644
index 0000000..4c8e7e8
--- /dev/null
+++ b/camera/mm-camera-interface/mm_omx_jpeg_encoder.c
@@ -0,0 +1,806 @@
+/* Copyright (c) 2012, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ * * Neither the name of The Linux Foundation nor the names of its
+ * contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#include <sys/types.h>
+#include <stdbool.h>
+#include <fcntl.h>
+#include <dlfcn.h>
+#include "OMX_Types.h"
+#include "OMX_Index.h"
+#include "OMX_Core.h"
+#include "OMX_Component.h"
+#include "omx_debug.h"
+#include "omx_jpeg_ext.h"
+#include "mm_omx_jpeg_encoder.h"
+
+static uint8_t hw_encode = true;
+static int jpegRotation = 0;
+static int isZSLMode = 0;
+static int jpegThumbnailQuality = 75;
+static int jpegMainimageQuality = 85;
+static uint32_t phy_offset;
+void *user_data;
+
+static int encoding = 0;
+pthread_mutex_t jpege_mutex = PTHREAD_MUTEX_INITIALIZER;
+pthread_mutex_t jpegcb_mutex = PTHREAD_MUTEX_INITIALIZER;
+
+jpegfragment_callback_t mmcamera_jpegfragment_callback;
+jpeg_callback_t mmcamera_jpeg_callback;
+
+
+#define INPUT_PORT 0
+#define OUTPUT_PORT 1
+#define INPUT_PORT1 2
+#define DEFAULT_COLOR_FORMAT YCRCBLP_H2V2
+
+typedef struct {
+ cam_format_t isp_format;
+ jpeg_color_format_t jpg_format;
+} color_format_map_t;
+
+
+static const color_format_map_t color_format_map[] = {
+ {CAMERA_YUV_420_NV21, YCRCBLP_H2V2}, /*default*/
+ {CAMERA_YUV_420_NV21_ADRENO, YCRCBLP_H2V2},
+ {CAMERA_YUV_420_NV12, YCBCRLP_H2V2},
+ {CAMERA_YUV_420_YV12, YCBCRLP_H2V2},
+ {CAMERA_YUV_422_NV61, YCRCBLP_H2V1},
+ {CAMERA_YUV_422_NV16, YCBCRLP_H2V1},
+};
+
+static OMX_HANDLETYPE pHandle;
+static OMX_CALLBACKTYPE callbacks;
+static OMX_INDEXTYPE type;
+static OMX_CONFIG_ROTATIONTYPE rotType;
+static omx_jpeg_thumbnail thumbnail;
+static OMX_CONFIG_RECTTYPE recttype;
+static OMX_PARAM_PORTDEFINITIONTYPE * inputPort;
+static OMX_PARAM_PORTDEFINITIONTYPE * outputPort;
+static OMX_PARAM_PORTDEFINITIONTYPE * inputPort1;
+static OMX_BUFFERHEADERTYPE* pInBuffers;
+static OMX_BUFFERHEADERTYPE* pOutBuffers;
+static OMX_BUFFERHEADERTYPE* pInBuffers1;
+OMX_INDEXTYPE user_preferences;
+omx_jpeg_user_preferences userpreferences;
+OMX_INDEXTYPE exif;
+static omx_jpeg_exif_info_tag tag;
+
+static void *libmmstillomx;
+OMX_ERRORTYPE OMX_APIENTRY (*pOMX_GetHandle)(
+ OMX_OUT OMX_HANDLETYPE* pHandle,
+ OMX_IN OMX_STRING cComponentName,
+ OMX_IN OMX_PTR pAppData,
+ OMX_IN OMX_CALLBACKTYPE* pCallBacks);
+OMX_ERRORTYPE OMX_APIENTRY (*pOMX_Init)(void);
+OMX_ERRORTYPE OMX_APIENTRY (*pOMX_Deinit)(void);
+
+static pthread_mutex_t lock;
+static pthread_cond_t cond;
+static int expectedEvent = 0;
+static int expectedValue1 = 0;
+static int expectedValue2 = 0;
+static omx_jpeg_pmem_info pmem_info;
+static omx_jpeg_pmem_info pmem_info1;
+static OMX_IMAGE_PARAM_QFACTORTYPE qFactor;
+static omx_jpeg_thumbnail_quality thumbnailQuality;
+static OMX_INDEXTYPE thumbnailQualityType;
+static void *out_buffer;
+static int * out_buffer_size;
+static OMX_INDEXTYPE buffer_offset;
+static omx_jpeg_buffer_offset bufferoffset;
+
+static jpeg_color_format_t get_jpeg_format_from_cam_format(
+ cam_format_t cam_format )
+{
+ jpeg_color_format_t jpg_format = DEFAULT_COLOR_FORMAT;
+ int i, j;
+ j = sizeof (color_format_map) / sizeof(color_format_map_t);
+ ALOGV("%s: j =%d, cam_format =%d", __func__, j, cam_format);
+ for(i =0; i< j; i++) {
+ if (color_format_map[i].isp_format == cam_format){
+ jpg_format = color_format_map[i].jpg_format;
+ break;
+ }
+ }
+ ALOGV("%s x: i =%d, jpg_format=%d", __func__, i, jpg_format);
+
+ return jpg_format;
+}
+static omx_jpeg_buffer_offset bufferoffset1;
+void set_callbacks(
+ jpegfragment_callback_t fragcallback,
+ jpeg_callback_t eventcallback, void* userdata,
+ void* output_buffer,
+ int * outBufferSize) {
+ pthread_mutex_lock(&jpegcb_mutex);
+ mmcamera_jpegfragment_callback = fragcallback;
+ mmcamera_jpeg_callback = eventcallback;
+ user_data = userdata;
+ out_buffer = output_buffer;
+ out_buffer_size = outBufferSize;
+ pthread_mutex_unlock(&jpegcb_mutex);
+}
+
+
+OMX_ERRORTYPE etbdone(OMX_OUT OMX_HANDLETYPE hComponent,
+ OMX_OUT OMX_PTR pAppData,
+ OMX_OUT OMX_BUFFERHEADERTYPE* pBuffer)
+{
+ pthread_mutex_lock(&lock);
+ expectedEvent = OMX_EVENT_ETB_DONE;
+ expectedValue1 = 0;
+ expectedValue2 = 0;
+ pthread_cond_signal(&cond);
+ pthread_mutex_unlock(&lock);
+ return 0;
+}
+
+OMX_ERRORTYPE ftbdone(OMX_OUT OMX_HANDLETYPE hComponent,
+ OMX_OUT OMX_PTR pAppData,
+ OMX_OUT OMX_BUFFERHEADERTYPE* pBuffer)
+{
+ ALOGV("%s", __func__);
+ *out_buffer_size = pBuffer->nFilledLen;
+ pthread_mutex_lock(&lock);
+ expectedEvent = OMX_EVENT_FTB_DONE;
+ expectedValue1 = 0;
+ expectedValue2 = 0;
+ pthread_cond_signal(&cond);
+ pthread_mutex_unlock(&lock);
+ ALOGV("%s:filled len = %u", __func__, (uint32_t)pBuffer->nFilledLen);
+ if (mmcamera_jpeg_callback && encoding)
+ mmcamera_jpeg_callback(0, user_data);
+ return 0;
+}
+
+OMX_ERRORTYPE handleError(OMX_IN OMX_EVENTTYPE eEvent, OMX_IN OMX_U32 error)
+{
+ ALOGV("%s", __func__);
+ if (error == OMX_EVENT_JPEG_ERROR) {
+ if (mmcamera_jpeg_callback && encoding) {
+ ALOGV("%s:OMX_EVENT_JPEG_ERROR\n", __func__);
+ mmcamera_jpeg_callback(JPEG_EVENT_ERROR, user_data);
+ }
+ } else if (error == OMX_EVENT_THUMBNAIL_DROPPED) {
+ if (mmcamera_jpeg_callback && encoding) {
+ ALOGV("%s:(OMX_EVENT_THUMBNAIL_DROPPED\n", __func__);
+ mmcamera_jpeg_callback(JPEG_EVENT_THUMBNAIL_DROPPED, user_data);
+ }
+ }
+ return 0;
+}
+
+OMX_ERRORTYPE eventHandler( OMX_IN OMX_HANDLETYPE hComponent,
+ OMX_IN OMX_PTR pAppData, OMX_IN OMX_EVENTTYPE eEvent,
+ OMX_IN OMX_U32 nData1, OMX_IN OMX_U32 nData2,
+ OMX_IN OMX_PTR pEventData)
+{
+ ALOGV("%s", __func__);
+ ALOGV("%s:got event %d ndata1 %u ndata2 %u", __func__,
+ eEvent, nData1, nData2);
+ pthread_mutex_lock(&lock);
+ expectedEvent = eEvent;
+ expectedValue1 = nData1;
+ expectedValue2 = nData2;
+ pthread_cond_signal(&cond);
+ pthread_mutex_unlock(&lock);
+ if ((nData1== OMX_EVENT_JPEG_ERROR)||(nData1== OMX_EVENT_THUMBNAIL_DROPPED))
+ handleError(eEvent,nData1);
+ return 0;
+}
+
+void waitForEvent(int event, int value1, int value2 ){
+ pthread_mutex_lock(&lock);
+ ALOGV("%s:Waiting for:event=%d, value1=%d, value2=%d",
+ __func__, event, value1, value2);
+ while (! (expectedEvent == event &&
+ expectedValue1 == value1 && expectedValue2 == value2)) {
+ pthread_cond_wait(&cond, &lock);
+ ALOGV("%s:After cond_wait:expectedEvent=%d, expectedValue1=%d, expectedValue2=%d",
+ __func__, expectedEvent, expectedValue1, expectedValue2);
+ ALOGV("%s:After cond_wait:event=%d, value1=%d, value2=%d",
+ __func__, event, value1, value2);
+ }
+ ALOGV("%s:done:expectedEvent=%d, expectedValue1=%d, expectedValue2=%d",
+ __func__, expectedEvent, expectedValue1, expectedValue2);
+ pthread_mutex_unlock(&lock);
+}
+
+int8_t mm_jpeg_encoder_get_buffer_offset(uint32_t width, uint32_t height,
+ uint32_t* p_y_offset, uint32_t* p_cbcr_offset, uint32_t* p_buf_size,
+ uint8_t *num_planes, uint32_t planes[])
+{
+ ALOGV("%s:", __func__);
+ if ((NULL == p_y_offset) || (NULL == p_cbcr_offset)) {
+ return false;
+ }
+ *num_planes = 2;
+ if (hw_encode ) {
+ int cbcr_offset = 0;
+ uint32_t actual_size = width*height;
+ uint32_t padded_size = width * CEILING16(height);
+ *p_y_offset = 0;
+ *p_cbcr_offset = 0;
+ //if(!isZSLMode){
+ if ((jpegRotation == 90) || (jpegRotation == 180)) {
+ *p_y_offset = padded_size - actual_size;
+ *p_cbcr_offset = ((padded_size - actual_size) >> 1);
+ }
+ *p_buf_size = (padded_size + (padded_size - actual_size)) * 3/2;
+ planes[0] = width * CEILING16(height);
+ planes[1] = width * CEILING16(height)/2;
+ } else {
+ *p_y_offset = 0;
+ *p_cbcr_offset = PAD_TO_WORD(width*height);
+ *p_buf_size = *p_cbcr_offset * 3/2;
+ planes[0] = PAD_TO_WORD(width*CEILING16(height));
+ planes[1] = PAD_TO_WORD(width*CEILING16(height)/2);
+ }
+ return true;
+}
+
+int8_t omxJpegOpen()
+{
+ ALOGV("%s", __func__);
+ pthread_mutex_lock(&jpege_mutex);
+ libmmstillomx = dlopen("libmmstillomx.so", RTLD_NOW);
+ if (!libmmstillomx) {
+ ALOGE("%s: dlopen failed", __func__);
+ pthread_mutex_unlock(&jpege_mutex);
+ return false;
+ }
+ *(void **)(&pOMX_GetHandle) = dlsym(libmmstillomx, "OMX_GetHandle");
+ *(void **)(&pOMX_Init) = dlsym(libmmstillomx, "OMX_Init");
+ *(void **)(&pOMX_Deinit) = dlsym(libmmstillomx, "OMX_Deinit");
+ if (!pOMX_GetHandle || !pOMX_Init || !pOMX_Deinit) {
+ ALOGE("%s: dlsym failed", __func__);
+ dlclose(libmmstillomx);
+ pthread_mutex_unlock(&jpege_mutex);
+ return false;
+ }
+ OMX_ERRORTYPE ret = (*pOMX_GetHandle)(&pHandle, "OMX.qcom.image.jpeg.encoder",
+ NULL, &callbacks);
+ pthread_mutex_unlock(&jpege_mutex);
+ return true;
+}
+
+int8_t omxJpegStart(uint8_t hw_encode_enable)
+{
+ int rc = 0;
+ ALOGV("%s", __func__);
+ pthread_mutex_lock(&jpege_mutex);
+ hw_encode = hw_encode_enable;
+ callbacks.EmptyBufferDone = etbdone;
+ callbacks.FillBufferDone = ftbdone;
+ callbacks.EventHandler = eventHandler;
+ pthread_mutex_init(&lock, NULL);
+ pthread_cond_init(&cond, NULL);
+ rc = (*pOMX_Init)();
+ pthread_mutex_unlock(&jpege_mutex);
+ return rc;
+}
+
+static omx_jpeg_color_format format_cam2jpeg(cam_format_t fmt)
+{
+ omx_jpeg_color_format jpeg_fmt = OMX_YCRCBLP_H2V2;
+ switch (fmt) {
+ case CAMERA_YUV_420_NV12:
+ jpeg_fmt = OMX_YCBCRLP_H2V2;
+ break;
+ case CAMERA_YUV_420_NV21:
+ case CAMERA_YUV_420_NV21_ADRENO:
+ jpeg_fmt = OMX_YCRCBLP_H2V2;
+ break;
+ default:
+ ALOGV("Camera format %d not supported", fmt);
+ break;
+ }
+ return jpeg_fmt;
+}
+
+int8_t omxJpegEncodeNext(omx_jpeg_encode_params *encode_params)
+{
+ ALOGV("%s:E", __func__);
+ pthread_mutex_lock(&jpege_mutex);
+ encoding = 1;
+ int orientation;
+ if(inputPort == NULL || inputPort1 == NULL || outputPort == NULL) {
+ ALOGV("%s:pointer is null: X", __func__);
+ pthread_mutex_unlock(&jpege_mutex);
+ return -1;
+ }
+ inputPort->nPortIndex = INPUT_PORT;
+ outputPort->nPortIndex = OUTPUT_PORT;
+ inputPort1->nPortIndex = INPUT_PORT1;
+ OMX_GetParameter(pHandle, OMX_IndexParamPortDefinition, inputPort);
+ OMX_GetParameter(pHandle, OMX_IndexParamPortDefinition, outputPort);
+
+ ALOGV("%s:nFrameWidth=%d nFrameHeight=%d nBufferSize=%d w=%d h=%d",
+ __func__, inputPort->format.image.nFrameWidth,
+ inputPort->format.image.nFrameHeight, inputPort->nBufferSize,
+ bufferoffset.width, bufferoffset.height);
+ OMX_GetExtensionIndex(pHandle,"omx.qcom.jpeg.exttype.buffer_offset",
+ &buffer_offset);
+ ALOGV("%s:Buffer w %d h %d yOffset %d cbcrOffset %d totalSize %d\n",
+ __func__, bufferoffset.width, bufferoffset.height, bufferoffset.yOffset,
+ bufferoffset.cbcrOffset,bufferoffset.totalSize);
+ OMX_SetParameter(pHandle, buffer_offset, &bufferoffset);
+ OMX_SetParameter(pHandle, OMX_IndexParamPortDefinition, inputPort1);
+ OMX_GetParameter(pHandle, OMX_IndexParamPortDefinition, inputPort1);
+ ALOGV("%s: thumbnail widht %d height %d", __func__,
+ thumbnail.width, thumbnail.height);
+
+ userpreferences.color_format =
+ format_cam2jpeg(encode_params->dimension->main_img_format);
+ userpreferences.thumbnail_color_format =
+ format_cam2jpeg(encode_params->dimension->thumb_format);
+
+
+ pmem_info.fd = encode_params->snapshot_fd;
+ pmem_info.offset = 0;
+
+ //Release previously allocated buffers before doing UseBuffer in burst mode
+ OMX_FreeBuffer(pHandle, 2, pInBuffers1);
+ OMX_FreeBuffer(pHandle, 0, pInBuffers);
+ OMX_FreeBuffer(pHandle, 1, pOutBuffers);
+
+ OMX_UseBuffer(pHandle, &pInBuffers, 0, &pmem_info, inputPort->nBufferSize,
+ (void *) encode_params->snapshot_buf);
+ OMX_GetExtensionIndex(pHandle, "omx.qcom.jpeg.exttype.exif", &exif);
+ /*temporarily set rotation in EXIF data. This is done to avoid
+ image corruption issues in ZSL mode since roation is known
+ before hand. The orientation is set in the exif tag and
+ decoder will decode it will the right orientation. need to add double
+ padding to fix the issue */
+ if (isZSLMode) {
+ /*Get the orientation tag values depending on rotation*/
+ switch (jpegRotation) {
+ case 0:
+ orientation = 1; /*Normal*/
+ break;
+ case 90:
+ orientation = 6; /*Rotated 90 CCW*/
+ break;
+ case 180:
+ orientation = 3; /*Rotated 180*/
+ break;
+ case 270:
+ orientation = 8; /*Rotated 90 CW*/
+ break;
+ default:
+ orientation = 1;
+ break;
+ }
+ tag.tag_id = EXIFTAGID_ORIENTATION;
+ tag.tag_entry.type = EXIFTAGTYPE_ORIENTATION;
+ tag.tag_entry.count = 1;
+ tag.tag_entry.copy = 1;
+ tag.tag_entry.data._short = orientation;
+ ALOGV("%s jpegRotation = %d , orientation value =%d\n", __func__,
+ jpegRotation, orientation);
+ OMX_SetParameter(pHandle, exif, &tag);
+ }
+
+ /*Set omx parameter for all exif tags*/
+ int i;
+ for (i = 0; i < encode_params->exif_numEntries; i++) {
+ memcpy(&tag, encode_params->exif_data + i,
+ sizeof(omx_jpeg_exif_info_tag));
+ OMX_SetParameter(pHandle, exif, &tag);
+ }
+
+ pmem_info1.fd = encode_params->thumbnail_fd;
+ pmem_info1.offset = 0;
+
+ ALOGV("%s: input1 buff size %d", __func__, inputPort1->nBufferSize);
+ OMX_UseBuffer(pHandle, &pInBuffers1, 2, &pmem_info1,
+ inputPort1->nBufferSize, (void *) encode_params->thumbnail_buf);
+ OMX_UseBuffer(pHandle, &pOutBuffers, 1, NULL, inputPort->nBufferSize,
+ (void *) out_buffer);
+ OMX_EmptyThisBuffer(pHandle, pInBuffers);
+ OMX_EmptyThisBuffer(pHandle, pInBuffers1);
+ OMX_FillThisBuffer(pHandle, pOutBuffers);
+ pthread_mutex_unlock(&jpege_mutex);
+ ALOGV("%s:X", __func__);
+ return true;
+}
+
+int8_t omxJpegEncode(omx_jpeg_encode_params *encode_params)
+{
+ int size = 0;
+ uint8_t num_planes;
+ uint32_t planes[10];
+ int orientation;
+ ALOGV("%s:E", __func__);
+
+ inputPort = malloc(sizeof(OMX_PARAM_PORTDEFINITIONTYPE));
+ outputPort = malloc(sizeof(OMX_PARAM_PORTDEFINITIONTYPE));
+ inputPort1 = malloc(sizeof(OMX_PARAM_PORTDEFINITIONTYPE));
+
+ pthread_mutex_lock(&jpege_mutex);
+ encoding = 1;
+ inputPort->nPortIndex = INPUT_PORT;
+ outputPort->nPortIndex = OUTPUT_PORT;
+ inputPort1->nPortIndex = INPUT_PORT1;
+ OMX_GetParameter(pHandle, OMX_IndexParamPortDefinition, inputPort);
+ OMX_GetParameter(pHandle, OMX_IndexParamPortDefinition, outputPort);
+
+ bufferoffset.width = encode_params->dimension->orig_picture_dx;
+ bufferoffset.height = encode_params->dimension->orig_picture_dy;
+
+ if (hw_encode)
+ userpreferences.preference = OMX_JPEG_PREF_HW_ACCELERATED_PREFERRED;
+ else
+ userpreferences.preference = OMX_JPEG_PREF_SOFTWARE_ONLY;
+ if (encode_params->a_cbcroffset > 0) {
+ userpreferences.preference = OMX_JPEG_PREF_SOFTWARE_ONLY;
+ hw_encode = 0;
+ }
+ if (encode_params->scaling_params->in2_w &&
+ encode_params->scaling_params->in2_h) {
+ if (jpegRotation) {
+ userpreferences.preference = OMX_JPEG_PREF_SOFTWARE_ONLY;
+ ALOGV("%s:Scaling and roation true: setting pref to sw\n",
+ __func__);
+ hw_encode = 0;
+ }
+ }
+ mm_jpeg_encoder_get_buffer_offset(bufferoffset.width, bufferoffset.height,
+ &bufferoffset.yOffset,
+ &bufferoffset.cbcrOffset,
+ &bufferoffset.totalSize, &num_planes, planes);
+ if (encode_params->a_cbcroffset > 0) {
+ bufferoffset.totalSize = encode_params->a_cbcroffset * 1.5;
+ }
+ OMX_GetExtensionIndex(pHandle,"omx.qcom.jpeg.exttype.buffer_offset",&buffer_offset);
+ ALOGV(" Buffer width = %d, Buffer height = %d, yOffset =%d, cbcrOffset =%d, totalSize = %d\n",
+ bufferoffset.width, bufferoffset.height, bufferoffset.yOffset,
+ bufferoffset.cbcrOffset,bufferoffset.totalSize);
+ OMX_SetParameter(pHandle, buffer_offset, &bufferoffset);
+
+
+ if (encode_params->a_cbcroffset > 0) {
+ ALOGV("Using acbcroffset\n");
+ bufferoffset1.cbcrOffset = encode_params->a_cbcroffset;
+ OMX_GetExtensionIndex(pHandle,"omx.qcom.jpeg.exttype.acbcr_offset",&buffer_offset);
+ OMX_SetParameter(pHandle, buffer_offset, &bufferoffset1);
+ }
+
+ inputPort->format.image.nFrameWidth = encode_params->dimension->orig_picture_dx;
+ inputPort->format.image.nFrameHeight = encode_params->dimension->orig_picture_dy;
+ inputPort->format.image.nStride = encode_params->dimension->orig_picture_dx;
+ inputPort->format.image.nSliceHeight = encode_params->dimension->orig_picture_dy;
+ inputPort->nBufferSize = bufferoffset.totalSize;
+
+ inputPort1->format.image.nFrameWidth =
+ encode_params->dimension->thumbnail_width;
+ inputPort1->format.image.nFrameHeight =
+ encode_params->dimension->thumbnail_height;
+ inputPort1->format.image.nStride =
+ encode_params->dimension->thumbnail_width;
+ inputPort1->format.image.nSliceHeight =
+ encode_params->dimension->thumbnail_height;
+
+ OMX_SetParameter(pHandle, OMX_IndexParamPortDefinition, inputPort);
+ OMX_GetParameter(pHandle, OMX_IndexParamPortDefinition, inputPort);
+ size = inputPort->nBufferSize;
+ thumbnail.width = encode_params->dimension->thumbnail_width;
+ thumbnail.height = encode_params->dimension->thumbnail_height;
+
+ OMX_SetParameter(pHandle, OMX_IndexParamPortDefinition, inputPort1);
+ OMX_GetParameter(pHandle, OMX_IndexParamPortDefinition, inputPort1);
+ ALOGV("%s: thumbnail width %d height %d", __func__,
+ encode_params->dimension->thumbnail_width,
+ encode_params->dimension->thumbnail_height);
+
+ if(encode_params->a_cbcroffset > 0)
+ inputPort1->nBufferSize = inputPort->nBufferSize;
+
+ userpreferences.color_format =
+ get_jpeg_format_from_cam_format(encode_params->main_format);
+ userpreferences.thumbnail_color_format =
+ get_jpeg_format_from_cam_format(encode_params->thumbnail_format);
+
+
+
+
+ ALOGV("%s:Scaling params in1_w %d in1_h %d out1_w %d out1_h %d"
+ "main_img in2_w %d in2_h %d out2_w %d out2_h %d\n", __func__,
+ encode_params->scaling_params->in1_w,
+ encode_params->scaling_params->in1_h,
+ encode_params->scaling_params->out1_w,
+ encode_params->scaling_params->out1_h,
+ encode_params->scaling_params->in2_w,
+ encode_params->scaling_params->in2_h,
+ encode_params->scaling_params->out2_w,
+ encode_params->scaling_params->out2_h);
+ /*Main image scaling*/
+ ALOGV("%s:%d/n",__func__,__LINE__);
+
+
+ if (encode_params->scaling_params->in2_w &&
+ encode_params->scaling_params->in2_h) {
+
+ /* Scaler information for main image */
+ recttype.nWidth = CEILING2(encode_params->scaling_params->in2_w);
+ recttype.nHeight = CEILING2(encode_params->scaling_params->in2_h);
+ ALOGV("%s:%d/n",__func__,__LINE__);
+
+ if (encode_params->main_crop_offset) {
+ recttype.nLeft = encode_params->main_crop_offset->x;
+ recttype.nTop = encode_params->main_crop_offset->y;
+ ALOGV("%s:%d/n",__func__,__LINE__);
+
+ } else {
+ recttype.nLeft = 0;
+ recttype.nTop = 0;
+ ALOGV("%s:%d/n",__func__,__LINE__);
+
+ }
+ ALOGV("%s:%d/n",__func__,__LINE__);
+
+ recttype.nPortIndex = 1;
+ OMX_SetConfig(pHandle, OMX_IndexConfigCommonInputCrop, &recttype);
+ ALOGV("%s:%d/n",__func__,__LINE__);
+
+ if (encode_params->scaling_params->out2_w &&
+ encode_params->scaling_params->out2_h) {
+ recttype.nWidth = (encode_params->scaling_params->out2_w);
+ recttype.nHeight = (encode_params->scaling_params->out2_h);
+ ALOGV("%s:%d/n",__func__,__LINE__);
+
+
+ recttype.nPortIndex = 1;
+ OMX_SetConfig(pHandle, OMX_IndexConfigCommonOutputCrop, &recttype);
+ ALOGV("%s:%d/n",__func__,__LINE__);
+
+ }
+
+ } else {
+ ALOGV("%s: There is no main image scaling information",
+ __func__);
+ }
+ /*case of thumbnail*/
+
+ if ((encode_params->scaling_params->in1_w &&
+ encode_params->scaling_params->in1_h) ||
+ ((encode_params->scaling_params->out1_w !=
+ encode_params->dimension->thumbnail_width) &&
+ (encode_params->scaling_params->out1_h !=
+ encode_params->dimension->thumbnail_height))) {
+
+ thumbnail.scaling = 0;
+
+ if ((encode_params->scaling_params->out1_w !=
+ encode_params->dimension->thumbnail_width)&&
+ (encode_params->scaling_params->out1_h !=
+ encode_params->dimension->thumbnail_height)) {
+
+ ALOGV("%s:%d/n",__func__,__LINE__);
+ thumbnail.cropWidth = CEILING2(encode_params->dimension->thumbnail_width);
+ thumbnail.cropHeight = CEILING2(encode_params->dimension->thumbnail_height);
+ }
+ if (encode_params->scaling_params->in1_w &&
+ encode_params->scaling_params->in1_h) {
+ ALOGV("%s:%d/n",__func__,__LINE__);
+ thumbnail.cropWidth = CEILING2(encode_params->scaling_params->in1_w);
+ thumbnail.cropHeight = CEILING2(encode_params->scaling_params->in1_h);
+ }
+ thumbnail.width = encode_params->scaling_params->out1_w;
+ thumbnail.height = encode_params->scaling_params->out1_h;
+
+ if (encode_params->thumb_crop_offset) {
+ ALOGV("%s:%d/n",__func__,__LINE__);
+
+ thumbnail.left = encode_params->thumb_crop_offset->x;
+ thumbnail.top = encode_params->thumb_crop_offset->y;
+ thumbnail.scaling = 1;
+ } else {
+ thumbnail.left = 0;
+ thumbnail.top = 0;
+ }
+ } else {
+ thumbnail.scaling = 0;
+ ALOGV("%s: There is no thumbnail scaling information",
+ __func__);
+ }
+ OMX_GetExtensionIndex(pHandle,"omx.qcom.jpeg.exttype.user_preferences",
+ &user_preferences);
+ ALOGV("%s:User Preferences: color_format %d"
+ "thumbnail_color_format = %d encoder preference =%d\n", __func__,
+ userpreferences.color_format,userpreferences.thumbnail_color_format,
+ userpreferences.preference);
+ OMX_SetParameter(pHandle,user_preferences,&userpreferences);
+
+ ALOGV("%s Thumbnail present? : %d ", __func__,
+ encode_params->hasThumbnail);
+ if (encode_params->hasThumbnail) {
+ OMX_GetExtensionIndex(pHandle, "omx.qcom.jpeg.exttype.thumbnail", &type);
+ OMX_SetParameter(pHandle, type, &thumbnail);
+ }
+ qFactor.nPortIndex = INPUT_PORT;
+ OMX_GetParameter(pHandle, OMX_IndexParamQFactor, &qFactor);
+ qFactor.nQFactor = jpegMainimageQuality;
+ OMX_SetParameter(pHandle, OMX_IndexParamQFactor, &qFactor);
+
+ OMX_GetExtensionIndex(pHandle, "omx.qcom.jpeg.exttype.thumbnail_quality",
+ &thumbnailQualityType);
+
+ ALOGV("%s: thumbnail quality %u %d",
+ __func__, thumbnailQualityType, jpegThumbnailQuality);
+ OMX_GetParameter(pHandle, thumbnailQualityType, &thumbnailQuality);
+ thumbnailQuality.nQFactor = jpegThumbnailQuality;
+ OMX_SetParameter(pHandle, thumbnailQualityType, &thumbnailQuality);
+ rotType.nPortIndex = OUTPUT_PORT;
+ rotType.nRotation = jpegRotation;
+ OMX_SetConfig(pHandle, OMX_IndexConfigCommonRotate, &rotType);
+ ALOGV("Set rotation to %d\n",jpegRotation);
+ OMX_GetExtensionIndex(pHandle, "omx.qcom.jpeg.exttype.exif", &exif);
+
+ //Set omx parameter for all exif tags
+ int i;
+ for(i=0; i<encode_params->exif_numEntries; i++) {
+ memcpy(&tag, encode_params->exif_data + i, sizeof(omx_jpeg_exif_info_tag));
+ OMX_SetParameter(pHandle, exif, &tag);
+ }
+
+ pmem_info.fd = encode_params->snapshot_fd;
+ pmem_info.offset = 0;
+
+ ALOGV("input buffer size is %d",size);
+ OMX_UseBuffer(pHandle, &pInBuffers, 0, &pmem_info, size,
+ (void *) encode_params->snapshot_buf);
+
+ pmem_info1.fd = encode_params->thumbnail_fd;
+ pmem_info1.offset = 0;
+
+ ALOGV("%s: input1 buff size %d", __func__, inputPort1->nBufferSize);
+ OMX_UseBuffer(pHandle, &pInBuffers1, 2, &pmem_info1,
+ inputPort1->nBufferSize, (void *) encode_params->thumbnail_buf);
+
+
+ OMX_UseBuffer(pHandle, &pOutBuffers, 1, NULL, size, (void *) out_buffer);
+
+ waitForEvent(OMX_EventCmdComplete, OMX_CommandStateSet, OMX_StateIdle);
+ ALOGV("%s:State changed to OMX_StateIdle\n", __func__);
+ OMX_SendCommand(pHandle, OMX_CommandStateSet, OMX_StateExecuting, NULL);
+ waitForEvent(OMX_EventCmdComplete, OMX_CommandStateSet, OMX_StateExecuting);
+
+ OMX_EmptyThisBuffer(pHandle, pInBuffers);
+ OMX_EmptyThisBuffer(pHandle, pInBuffers1);
+ OMX_FillThisBuffer(pHandle, pOutBuffers);
+ pthread_mutex_unlock(&jpege_mutex);
+ ALOGV("%s:X", __func__);
+ return true;
+}
+
+void omxJpegFinish()
+{
+ pthread_mutex_lock(&jpege_mutex);
+ ALOGV("%s:encoding=%d", __func__, encoding);
+ if (encoding) {
+ encoding = 0;
+ OMX_SendCommand(pHandle, OMX_CommandStateSet, OMX_StateIdle, NULL);
+ OMX_SendCommand(pHandle, OMX_CommandStateSet, OMX_StateLoaded, NULL);
+ OMX_FreeBuffer(pHandle, 0, pInBuffers);
+ OMX_FreeBuffer(pHandle, 2, pInBuffers1);
+ OMX_FreeBuffer(pHandle, 1, pOutBuffers);
+ (*pOMX_Deinit)();
+ }
+ pthread_mutex_unlock(&jpege_mutex);
+}
+
+void omxJpegClose()
+{
+ ALOGV("%s:", __func__);
+ dlclose(libmmstillomx);
+}
+
+void omxJpegAbort()
+{
+ pthread_mutex_lock(&jpegcb_mutex);
+ mmcamera_jpegfragment_callback = NULL;
+ mmcamera_jpeg_callback = NULL;
+ user_data = NULL;
+ pthread_mutex_unlock(&jpegcb_mutex);
+ pthread_mutex_lock(&jpege_mutex);
+ ALOGV("%s: encoding=%d", __func__, encoding);
+ if (encoding) {
+ encoding = 0;
+ OMX_SendCommand(pHandle, OMX_CommandFlush, NULL, NULL);
+ ALOGV("%s:waitForEvent: OMX_CommandFlush", __func__);
+ waitForEvent(OMX_EVENT_JPEG_ABORT, 0, 0);
+ ALOGV("%s:waitForEvent: OMX_CommandFlush: DONE", __func__);
+ OMX_SendCommand(pHandle, OMX_CommandStateSet, OMX_StateIdle, NULL);
+ OMX_SendCommand(pHandle, OMX_CommandStateSet, OMX_StateLoaded, NULL);
+ OMX_FreeBuffer(pHandle, 0, pInBuffers);
+ OMX_FreeBuffer(pHandle, 2, pInBuffers1);
+ OMX_FreeBuffer(pHandle, 1, pOutBuffers);
+ (*pOMX_Deinit)();
+ }
+ pthread_mutex_unlock(&jpege_mutex);
+}
+
+
+int8_t mm_jpeg_encoder_setMainImageQuality(uint32_t quality)
+{
+ pthread_mutex_lock(&jpege_mutex);
+ ALOGV("%s: current main inage quality %d ," \
+ " new quality : %d\n", __func__, jpegMainimageQuality, quality);
+ if (quality <= 100)
+ jpegMainimageQuality = quality;
+ pthread_mutex_unlock(&jpege_mutex);
+ return true;
+}
+
+int8_t mm_jpeg_encoder_setThumbnailQuality(uint32_t quality)
+{
+ pthread_mutex_lock(&jpege_mutex);
+ ALOGV("%s: current thumbnail quality %d ," \
+ " new quality : %d\n", __func__, jpegThumbnailQuality, quality);
+ if (quality <= 100)
+ jpegThumbnailQuality = quality;
+ pthread_mutex_unlock(&jpege_mutex);
+ return true;
+}
+
+int8_t mm_jpeg_encoder_setRotation(int rotation, int isZSL)
+{
+ pthread_mutex_lock(&jpege_mutex);
+
+ /*Set ZSL Mode*/
+ isZSLMode = isZSL;
+ ALOGV("%s: Setting ZSL Mode to %d Rotation = %d\n",__func__,isZSLMode,rotation);
+ /* Set rotation configuration */
+ switch (rotation) {
+ case 0:
+ case 90:
+ case 180:
+ case 270:
+ jpegRotation = rotation;
+ break;
+ default:
+ /* Invalid rotation mode, set to default */
+ ALOGV("%s:Setting Default rotation mode", __func__);
+ jpegRotation = 0;
+ break;
+ }
+ pthread_mutex_unlock(&jpege_mutex);
+ return true;
+}
+
+/*===========================================================================
+FUNCTION mm_jpege_set_phy_offset
+
+DESCRIPTION Set physical offset for the buffer
+===========================================================================*/
+void mm_jpege_set_phy_offset(uint32_t a_phy_offset)
+{
+ phy_offset = a_phy_offset;
+}
diff --git a/camera/mm-camera-interface/mm_omx_jpeg_encoder.h b/camera/mm-camera-interface/mm_omx_jpeg_encoder.h
new file mode 100644
index 0000000..055d174
--- /dev/null
+++ b/camera/mm-camera-interface/mm_omx_jpeg_encoder.h
@@ -0,0 +1,82 @@
+/* Copyright (c) 2011, The Linux Foundation. All rights reserved.
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions are
+ * met:
+ * * Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * * Redistributions in binary form must reproduce the above
+ * copyright notice, this list of conditions and the following
+ * disclaimer in the documentation and/or other materials provided
+ * with the distribution.
+ * * Neither the name of The Linux Foundation nor the names of its
+ * contributors may be used to endorse or promote products derived
+ * from this software without specific prior written permission.
+ *
+ * THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
+ * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+ * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
+ * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
+ * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
+ * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
+ * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
+ * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
+ * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
+ * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+ */
+
+#ifndef MM_OMX_JPEG_ENCODER_H_
+#define MM_OMX_JPEG_ENCODER_H_
+#include <linux/msm_ion.h>
+#include "QCamera_Intf.h"
+
+typedef struct omx_jpeg_encode_params_t {
+ const cam_ctrl_dimension_t * dimension;
+ const uint8_t * thumbnail_buf;
+ int thumbnail_fd;
+ uint32_t thumbnail_offset;
+ const uint8_t * snapshot_buf;
+ int snapshot_fd;
+ uint32_t snapshot_offset;
+ common_crop_t *scaling_params;
+ exif_tags_info_t *exif_data;
+ int exif_numEntries;
+ int32_t a_cbcroffset;
+ cam_point_t* main_crop_offset;
+ cam_point_t* thumb_crop_offset;
+ int hasThumbnail;
+ cam_format_t main_format;
+ cam_format_t thumbnail_format;
+}omx_jpeg_encode_params;
+
+int8_t omxJpegOpen();
+int8_t omxJpegStart(uint8_t hw_encode_enable);
+int8_t omxJpegEncode(omx_jpeg_encode_params *encode_params);
+int8_t omxJpegEncodeNext(omx_jpeg_encode_params *encode_params);
+void omxJpegFinish();
+void omxJpegClose();
+void omxJpegAbort();
+
+int8_t mm_jpeg_encoder_setMainImageQuality(uint32_t quality);
+int8_t mm_jpeg_encoder_setThumbnailQuality(uint32_t quality);
+int8_t mm_jpeg_encoder_setRotation(int rotation,int isZSL);
+void jpege_set_phy_offset(uint32_t a_phy_offset);
+int8_t mm_jpeg_encoder_get_buffer_offset(uint32_t width, uint32_t height,
+ uint32_t* p_y_offset, uint32_t* p_cbcr_offset,
+ uint32_t* p_buf_size,uint8_t *num_planes,
+ uint32_t planes[]);
+typedef void (*jpegfragment_callback_t)(uint8_t * buff_ptr,
+ uint32_t buff_size, void* user_data);
+typedef void (*jpeg_callback_t)(jpeg_event_t, void *);
+
+void set_callbacks(
+ jpegfragment_callback_t fragcallback,
+ jpeg_callback_t eventcallback,
+ void* userdata,
+ void* output_buffer,
+ int * outBufferSize
+);
+
+
+#endif /* MM_OMX_JPEG_ENCODER_H_ */
diff --git a/init.mako.rc b/init.mako.rc
index eab8e87..d3aa100 100644
--- a/init.mako.rc
+++ b/init.mako.rc
@@ -49,6 +49,7 @@
on fs
mount_all ./fstab.mako
setprop ro.crypto.fuse_sdcard true
+ write /sys/kernel/boot_adsp/boot 1
on early-boot
# set RLIMIT_MEMLOCK to 64MB
diff --git a/nfc/libnfc-brcm.conf b/nfc/libnfc-brcm.conf
index 8ededf5..d52f6a8 100644
--- a/nfc/libnfc-brcm.conf
+++ b/nfc/libnfc-brcm.conf
@@ -97,7 +97,7 @@
# {20:C8:1E:06:??:00:??:??:??:00:??:24:00:1C:00:75:00:77:00:76:00:1C:00:03:00:0A:00:??:01:00:00:40:04}
# array[0] = 0x20 is length of the payload from array[1] to the end
# array[1] = 0xC8 is PREINIT_DSP_CFG
-PREINIT_DSP_CFG={06:1F:00:0A:03:30:00:04:24:00:1C:00:75:00:77:00:76:00:1C:00:03:00:0A:00:4C:01:00:00:40:04}
+PREINIT_DSP_CFG={20:C8:1E:06:1F:00:0A:03:30:00:04:24:00:1C:00:75:00:77:00:76:00:1C:00:03:00:0A:00:4C:01:00:00:40:04}
###############################################################################
# Configure crystal frequency when internal LPO can't detect the frequency.
diff --git a/proprietary-blobs.txt b/proprietary-blobs.txt
index 0a5ce4f..4c5a174 100644
--- a/proprietary-blobs.txt
+++ b/proprietary-blobs.txt
@@ -39,7 +39,6 @@
/system/bin/usbhub
/system/bin/usbhub_init
/system/bin/v4l2-qcamera-app
-/system/etc/diag.cfg
/system/etc/DxHDCP.cfg
/system/etc/firmware/vidc.b00
/system/etc/firmware/vidc.b01